id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
53,494
def append_gentoo_mirrors(value): return append_var('GENTOO_MIRRORS', value)
[ "def", "append_gentoo_mirrors", "(", "value", ")", ":", "return", "append_var", "(", "'GENTOO_MIRRORS'", ",", "value", ")" ]
add to or create a new gentoo_mirrors in the make .
train
false
53,495
def find_match(uuid): a = 0 for temp in INV_VM: if (uuid == temp): a = (a + 1) if (a < 1): print DS_VM[uuid]
[ "def", "find_match", "(", "uuid", ")", ":", "a", "=", "0", "for", "temp", "in", "INV_VM", ":", "if", "(", "uuid", "==", "temp", ")", ":", "a", "=", "(", "a", "+", "1", ")", "if", "(", "a", "<", "1", ")", ":", "print", "DS_VM", "[", "uuid", "]" ]
function takes vc .
train
false
53,497
def random_tree(n, seed=None): if (n == 0): raise nx.NetworkXPointlessConcept('the null graph is not a tree') if (n == 1): return nx.empty_graph(1) random.seed(seed) sequence = sample_with_replacement(range(n), (n - 2)) return nx.from_prufer_sequence(sequence)
[ "def", "random_tree", "(", "n", ",", "seed", "=", "None", ")", ":", "if", "(", "n", "==", "0", ")", ":", "raise", "nx", ".", "NetworkXPointlessConcept", "(", "'the null graph is not a tree'", ")", "if", "(", "n", "==", "1", ")", ":", "return", "nx", ".", "empty_graph", "(", "1", ")", "random", ".", "seed", "(", "seed", ")", "sequence", "=", "sample_with_replacement", "(", "range", "(", "n", ")", ",", "(", "n", "-", "2", ")", ")", "return", "nx", ".", "from_prufer_sequence", "(", "sequence", ")" ]
returns a uniformly random tree on n nodes .
train
false
53,498
@verbose def rap_music(evoked, forward, noise_cov, n_dipoles=5, return_residual=False, picks=None, verbose=None): info = evoked.info data = evoked.data times = evoked.times picks = _setup_picks(picks, info, forward, noise_cov) data = data[picks] (dipoles, explained_data) = _apply_rap_music(data, info, times, forward, noise_cov, n_dipoles, picks, return_residual) if return_residual: residual = evoked.copy() selection = [info['ch_names'][p] for p in picks] residual = pick_channels_evoked(residual, include=selection) residual.data -= explained_data active_projs = [p for p in residual.info['projs'] if p['active']] for p in active_projs: p['active'] = False residual.add_proj(active_projs, remove_existing=True) residual.apply_proj() return (dipoles, residual) else: return dipoles
[ "@", "verbose", "def", "rap_music", "(", "evoked", ",", "forward", ",", "noise_cov", ",", "n_dipoles", "=", "5", ",", "return_residual", "=", "False", ",", "picks", "=", "None", ",", "verbose", "=", "None", ")", ":", "info", "=", "evoked", ".", "info", "data", "=", "evoked", ".", "data", "times", "=", "evoked", ".", "times", "picks", "=", "_setup_picks", "(", "picks", ",", "info", ",", "forward", ",", "noise_cov", ")", "data", "=", "data", "[", "picks", "]", "(", "dipoles", ",", "explained_data", ")", "=", "_apply_rap_music", "(", "data", ",", "info", ",", "times", ",", "forward", ",", "noise_cov", ",", "n_dipoles", ",", "picks", ",", "return_residual", ")", "if", "return_residual", ":", "residual", "=", "evoked", ".", "copy", "(", ")", "selection", "=", "[", "info", "[", "'ch_names'", "]", "[", "p", "]", "for", "p", "in", "picks", "]", "residual", "=", "pick_channels_evoked", "(", "residual", ",", "include", "=", "selection", ")", "residual", ".", "data", "-=", "explained_data", "active_projs", "=", "[", "p", "for", "p", "in", "residual", ".", "info", "[", "'projs'", "]", "if", "p", "[", "'active'", "]", "]", "for", "p", "in", "active_projs", ":", "p", "[", "'active'", "]", "=", "False", "residual", ".", "add_proj", "(", "active_projs", ",", "remove_existing", "=", "True", ")", "residual", ".", "apply_proj", "(", ")", "return", "(", "dipoles", ",", "residual", ")", "else", ":", "return", "dipoles" ]
rap-music source localization method .
train
false
53,499
def MakeCdfFromItems(items, label=None): return Cdf(dict(items), label=label)
[ "def", "MakeCdfFromItems", "(", "items", ",", "label", "=", "None", ")", ":", "return", "Cdf", "(", "dict", "(", "items", ")", ",", "label", "=", "label", ")" ]
makes a cdf from an unsorted sequence of pairs .
train
false
53,500
def make_median(q2): return graph_objs.Scatter(x=[0], y=[q2], text=[('median: ' + '{:0.2f}'.format(q2))], mode='markers', marker=dict(symbol='square', color='rgb(255,255,255)'), hoverinfo='text')
[ "def", "make_median", "(", "q2", ")", ":", "return", "graph_objs", ".", "Scatter", "(", "x", "=", "[", "0", "]", ",", "y", "=", "[", "q2", "]", ",", "text", "=", "[", "(", "'median: '", "+", "'{:0.2f}'", ".", "format", "(", "q2", ")", ")", "]", ",", "mode", "=", "'markers'", ",", "marker", "=", "dict", "(", "symbol", "=", "'square'", ",", "color", "=", "'rgb(255,255,255)'", ")", ",", "hoverinfo", "=", "'text'", ")" ]
formats the median hovertext for a violin plot .
train
false
53,501
def _ScrubUpdateUser(op_args): _ScrubForClass(User, op_args['user_dict'])
[ "def", "_ScrubUpdateUser", "(", "op_args", ")", ":", "_ScrubForClass", "(", "User", ",", "op_args", "[", "'user_dict'", "]", ")" ]
scrub the pwd_hash and salt from the logs .
train
false
53,502
def get_keyid(keyname): if (not keyname): return None keypairs = list_keypairs(call='function') keyid = keypairs[keyname]['id'] if keyid: return keyid raise SaltCloudNotFound('The specified ssh key could not be found.')
[ "def", "get_keyid", "(", "keyname", ")", ":", "if", "(", "not", "keyname", ")", ":", "return", "None", "keypairs", "=", "list_keypairs", "(", "call", "=", "'function'", ")", "keyid", "=", "keypairs", "[", "keyname", "]", "[", "'id'", "]", "if", "keyid", ":", "return", "keyid", "raise", "SaltCloudNotFound", "(", "'The specified ssh key could not be found.'", ")" ]
return the id of the keyname .
train
true
53,503
def print_event_goal_details(goal_details): print('------ Event Goal -------') print(('Use Event Value = %s' % goal_details.get('useEventValue'))) for event_condition in goal_details.get('eventConditions', []): event_type = event_condition.get('type') print(('Type = %s' % event_type)) if (event_type in ('CATEGORY', 'ACTION', 'LABEL')): print(('Match Type = %s' % event_condition.get('matchType'))) print(('Expression = %s' % event_condition.get('expression'))) else: print(('Comparison Type = %s' % event_condition.get('comparisonType'))) print(('Comparison Value = %s' % event_condition.get('comparisonValue')))
[ "def", "print_event_goal_details", "(", "goal_details", ")", ":", "print", "(", "'------ Event Goal -------'", ")", "print", "(", "(", "'Use Event Value = %s'", "%", "goal_details", ".", "get", "(", "'useEventValue'", ")", ")", ")", "for", "event_condition", "in", "goal_details", ".", "get", "(", "'eventConditions'", ",", "[", "]", ")", ":", "event_type", "=", "event_condition", ".", "get", "(", "'type'", ")", "print", "(", "(", "'Type = %s'", "%", "event_type", ")", ")", "if", "(", "event_type", "in", "(", "'CATEGORY'", ",", "'ACTION'", ",", "'LABEL'", ")", ")", ":", "print", "(", "(", "'Match Type = %s'", "%", "event_condition", ".", "get", "(", "'matchType'", ")", ")", ")", "print", "(", "(", "'Expression = %s'", "%", "event_condition", ".", "get", "(", "'expression'", ")", ")", ")", "else", ":", "print", "(", "(", "'Comparison Type = %s'", "%", "event_condition", ".", "get", "(", "'comparisonType'", ")", ")", ")", "print", "(", "(", "'Comparison Value = %s'", "%", "event_condition", ".", "get", "(", "'comparisonValue'", ")", ")", ")" ]
prints all the event goal type info .
train
false
53,504
def _next_rooted_tree(predecessor, p=None): if (p is None): p = (len(predecessor) - 1) while (predecessor[p] == 1): p -= 1 if (p == 0): return None q = (p - 1) while (predecessor[q] != (predecessor[p] - 1)): q -= 1 result = list(predecessor) for i in range(p, len(result)): result[i] = result[((i - p) + q)] return result
[ "def", "_next_rooted_tree", "(", "predecessor", ",", "p", "=", "None", ")", ":", "if", "(", "p", "is", "None", ")", ":", "p", "=", "(", "len", "(", "predecessor", ")", "-", "1", ")", "while", "(", "predecessor", "[", "p", "]", "==", "1", ")", ":", "p", "-=", "1", "if", "(", "p", "==", "0", ")", ":", "return", "None", "q", "=", "(", "p", "-", "1", ")", "while", "(", "predecessor", "[", "q", "]", "!=", "(", "predecessor", "[", "p", "]", "-", "1", ")", ")", ":", "q", "-=", "1", "result", "=", "list", "(", "predecessor", ")", "for", "i", "in", "range", "(", "p", ",", "len", "(", "result", ")", ")", ":", "result", "[", "i", "]", "=", "result", "[", "(", "(", "i", "-", "p", ")", "+", "q", ")", "]", "return", "result" ]
one iteration of the beyer-hedetniemi algorithm .
train
false
53,506
def test_batch_normalized_mlp_allocation(): mlp = BatchNormalizedMLP([Tanh(), Tanh()], [5, 7, 9]) mlp.allocate() assert (mlp.activations[0].children[0].input_dim == 7) assert (mlp.activations[1].children[0].input_dim == 9) assert (not any((l.use_bias for l in mlp.linear_transformations)))
[ "def", "test_batch_normalized_mlp_allocation", "(", ")", ":", "mlp", "=", "BatchNormalizedMLP", "(", "[", "Tanh", "(", ")", ",", "Tanh", "(", ")", "]", ",", "[", "5", ",", "7", ",", "9", "]", ")", "mlp", ".", "allocate", "(", ")", "assert", "(", "mlp", ".", "activations", "[", "0", "]", ".", "children", "[", "0", "]", ".", "input_dim", "==", "7", ")", "assert", "(", "mlp", ".", "activations", "[", "1", "]", ".", "children", "[", "0", "]", ".", "input_dim", "==", "9", ")", "assert", "(", "not", "any", "(", "(", "l", ".", "use_bias", "for", "l", "in", "mlp", ".", "linear_transformations", ")", ")", ")" ]
test that batchnormalizedmlp performs allocation correctly .
train
false
53,507
def GetCommitInformation(transid): loc = data_store.DB.Location() if (not os.path.exists(loc)): return False if (not os.path.isdir(loc)): return False tempdir = _GetTransactionDirectory(loc, transid) tempfile = utils.JoinPath(tempdir, constants.TRANSACTION_FILENAME) if (not os.path.exists(tempfile)): return None if (not os.path.isfile(tempfile)): return None with open(tempfile, 'rb') as fp: return rdf_data_server.DataServerRebalance(fp.read())
[ "def", "GetCommitInformation", "(", "transid", ")", ":", "loc", "=", "data_store", ".", "DB", ".", "Location", "(", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "loc", ")", ")", ":", "return", "False", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "loc", ")", ")", ":", "return", "False", "tempdir", "=", "_GetTransactionDirectory", "(", "loc", ",", "transid", ")", "tempfile", "=", "utils", ".", "JoinPath", "(", "tempdir", ",", "constants", ".", "TRANSACTION_FILENAME", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "tempfile", ")", ")", ":", "return", "None", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "tempfile", ")", ")", ":", "return", "None", "with", "open", "(", "tempfile", ",", "'rb'", ")", "as", "fp", ":", "return", "rdf_data_server", ".", "DataServerRebalance", "(", "fp", ".", "read", "(", ")", ")" ]
read transaction information from stored file .
train
false
53,508
def _win_user_token_is_admin(user_token): class SID_IDENTIFIER_AUTHORITY(ctypes.Structure, ): _fields_ = [('byte0', ctypes.c_byte), ('byte1', ctypes.c_byte), ('byte2', ctypes.c_byte), ('byte3', ctypes.c_byte), ('byte4', ctypes.c_byte), ('byte5', ctypes.c_byte)] nt_authority = SID_IDENTIFIER_AUTHORITY() nt_authority.byte5 = 5 SECURITY_BUILTIN_DOMAIN_RID = 32 DOMAIN_ALIAS_RID_ADMINS = 544 administrators_group = ctypes.c_void_p() if (ctypes.windll.advapi32.AllocateAndInitializeSid(ctypes.byref(nt_authority), 2, SECURITY_BUILTIN_DOMAIN_RID, DOMAIN_ALIAS_RID_ADMINS, 0, 0, 0, 0, 0, 0, ctypes.byref(administrators_group)) == 0): raise Exception('AllocateAndInitializeSid failed') try: is_admin = ctypes.wintypes.BOOL() if (ctypes.windll.advapi32.CheckTokenMembership(user_token, administrators_group, ctypes.byref(is_admin)) == 0): raise Exception('CheckTokenMembership failed') return (is_admin.value != 0) finally: ctypes.windll.advapi32.FreeSid(administrators_group)
[ "def", "_win_user_token_is_admin", "(", "user_token", ")", ":", "class", "SID_IDENTIFIER_AUTHORITY", "(", "ctypes", ".", "Structure", ",", ")", ":", "_fields_", "=", "[", "(", "'byte0'", ",", "ctypes", ".", "c_byte", ")", ",", "(", "'byte1'", ",", "ctypes", ".", "c_byte", ")", ",", "(", "'byte2'", ",", "ctypes", ".", "c_byte", ")", ",", "(", "'byte3'", ",", "ctypes", ".", "c_byte", ")", ",", "(", "'byte4'", ",", "ctypes", ".", "c_byte", ")", ",", "(", "'byte5'", ",", "ctypes", ".", "c_byte", ")", "]", "nt_authority", "=", "SID_IDENTIFIER_AUTHORITY", "(", ")", "nt_authority", ".", "byte5", "=", "5", "SECURITY_BUILTIN_DOMAIN_RID", "=", "32", "DOMAIN_ALIAS_RID_ADMINS", "=", "544", "administrators_group", "=", "ctypes", ".", "c_void_p", "(", ")", "if", "(", "ctypes", ".", "windll", ".", "advapi32", ".", "AllocateAndInitializeSid", "(", "ctypes", ".", "byref", "(", "nt_authority", ")", ",", "2", ",", "SECURITY_BUILTIN_DOMAIN_RID", ",", "DOMAIN_ALIAS_RID_ADMINS", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "ctypes", ".", "byref", "(", "administrators_group", ")", ")", "==", "0", ")", ":", "raise", "Exception", "(", "'AllocateAndInitializeSid failed'", ")", "try", ":", "is_admin", "=", "ctypes", ".", "wintypes", ".", "BOOL", "(", ")", "if", "(", "ctypes", ".", "windll", ".", "advapi32", ".", "CheckTokenMembership", "(", "user_token", ",", "administrators_group", ",", "ctypes", ".", "byref", "(", "is_admin", ")", ")", "==", "0", ")", ":", "raise", "Exception", "(", "'CheckTokenMembership failed'", ")", "return", "(", "is_admin", ".", "value", "!=", "0", ")", "finally", ":", "ctypes", ".", "windll", ".", "advapi32", ".", "FreeSid", "(", "administrators_group", ")" ]
using the win32 api .
train
true
53,509
def unescape_string_literal(s): if ((s[0] not in u'"\'') or (s[(-1)] != s[0])): raise ValueError((u'Not a string literal: %r' % s)) quote = s[0] return s[1:(-1)].replace((u'\\%s' % quote), quote).replace(u'\\\\', u'\\')
[ "def", "unescape_string_literal", "(", "s", ")", ":", "if", "(", "(", "s", "[", "0", "]", "not", "in", "u'\"\\''", ")", "or", "(", "s", "[", "(", "-", "1", ")", "]", "!=", "s", "[", "0", "]", ")", ")", ":", "raise", "ValueError", "(", "(", "u'Not a string literal: %r'", "%", "s", ")", ")", "quote", "=", "s", "[", "0", "]", "return", "s", "[", "1", ":", "(", "-", "1", ")", "]", ".", "replace", "(", "(", "u'\\\\%s'", "%", "quote", ")", ",", "quote", ")", ".", "replace", "(", "u'\\\\\\\\'", ",", "u'\\\\'", ")" ]
convert quoted string literals to unquoted strings with escaped quotes and backslashes unquoted:: .
train
false
53,510
def get_default_cgsnapshot_type(): grp_type = {} ctxt = context.get_admin_context() try: grp_type = get_group_type_by_name(ctxt, DEFAULT_CGSNAPSHOT_TYPE) except exception.GroupTypeNotFoundByName: LOG.exception((_LE('Default cgsnapshot type %s is not found.') % DEFAULT_CGSNAPSHOT_TYPE)) return grp_type
[ "def", "get_default_cgsnapshot_type", "(", ")", ":", "grp_type", "=", "{", "}", "ctxt", "=", "context", ".", "get_admin_context", "(", ")", "try", ":", "grp_type", "=", "get_group_type_by_name", "(", "ctxt", ",", "DEFAULT_CGSNAPSHOT_TYPE", ")", "except", "exception", ".", "GroupTypeNotFoundByName", ":", "LOG", ".", "exception", "(", "(", "_LE", "(", "'Default cgsnapshot type %s is not found.'", ")", "%", "DEFAULT_CGSNAPSHOT_TYPE", ")", ")", "return", "grp_type" ]
get the default group type for migrating cgsnapshots .
train
false
53,511
def require_app(app_name, api_style=False): iterable = (inspect.getmodule(frame[0]) for frame in inspect.stack()) modules = [module for module in iterable if (module is not None)] if api_style: m = modules[2] else: m = modules[1] m._REQUIRED_APP = getattr(m, '_REQUIRED_APP', []) m._REQUIRED_APP.append(app_name) LOG.debug('require_app: %s is required by %s', app_name, m.__name__)
[ "def", "require_app", "(", "app_name", ",", "api_style", "=", "False", ")", ":", "iterable", "=", "(", "inspect", ".", "getmodule", "(", "frame", "[", "0", "]", ")", "for", "frame", "in", "inspect", ".", "stack", "(", ")", ")", "modules", "=", "[", "module", "for", "module", "in", "iterable", "if", "(", "module", "is", "not", "None", ")", "]", "if", "api_style", ":", "m", "=", "modules", "[", "2", "]", "else", ":", "m", "=", "modules", "[", "1", "]", "m", ".", "_REQUIRED_APP", "=", "getattr", "(", "m", ",", "'_REQUIRED_APP'", ",", "[", "]", ")", "m", ".", "_REQUIRED_APP", ".", "append", "(", "app_name", ")", "LOG", ".", "debug", "(", "'require_app: %s is required by %s'", ",", "app_name", ",", "m", ".", "__name__", ")" ]
request the application to be automatically loaded .
train
true
53,512
def clear_lookups(namespace): if (namespace in LOOKUP): del LOOKUP[namespace]
[ "def", "clear_lookups", "(", "namespace", ")", ":", "if", "(", "namespace", "in", "LOOKUP", ")", ":", "del", "LOOKUP", "[", "namespace", "]" ]
remove mako template lookups for the given namespace .
train
false
53,513
def getVector3(x=0.0, y=0.0, z=0.0): return Vector3(x, y, z)
[ "def", "getVector3", "(", "x", "=", "0.0", ",", "y", "=", "0.0", ",", "z", "=", "0.0", ")", ":", "return", "Vector3", "(", "x", ",", "y", ",", "z", ")" ]
get the vector3 .
train
false
53,515
def getComplexIfNone(valueComplex): if (valueComplex == None): return complex() return valueComplex
[ "def", "getComplexIfNone", "(", "valueComplex", ")", ":", "if", "(", "valueComplex", "==", "None", ")", ":", "return", "complex", "(", ")", "return", "valueComplex" ]
get new complex if the original complex is none .
train
false
53,516
def cochrans_q(x): warnings.warn('Deprecated, use stats.cochrans_q instead', DeprecationWarning) x = np.asarray(x) gruni = np.unique(x) (N, k) = x.shape count_row_success = (x == gruni[(-1)]).sum(1, float) count_col_success = (x == gruni[(-1)]).sum(0, float) count_row_ss = count_row_success.sum() count_col_ss = count_col_success.sum() assert (count_row_ss == count_col_ss) q_stat = (((k - 1) * ((k * np.sum((count_col_success ** 2))) - (count_col_ss ** 2))) / ((k * count_row_ss) - np.sum((count_row_success ** 2)))) return (q_stat, stats.chi2.sf(q_stat, (k - 1)))
[ "def", "cochrans_q", "(", "x", ")", ":", "warnings", ".", "warn", "(", "'Deprecated, use stats.cochrans_q instead'", ",", "DeprecationWarning", ")", "x", "=", "np", ".", "asarray", "(", "x", ")", "gruni", "=", "np", ".", "unique", "(", "x", ")", "(", "N", ",", "k", ")", "=", "x", ".", "shape", "count_row_success", "=", "(", "x", "==", "gruni", "[", "(", "-", "1", ")", "]", ")", ".", "sum", "(", "1", ",", "float", ")", "count_col_success", "=", "(", "x", "==", "gruni", "[", "(", "-", "1", ")", "]", ")", ".", "sum", "(", "0", ",", "float", ")", "count_row_ss", "=", "count_row_success", ".", "sum", "(", ")", "count_col_ss", "=", "count_col_success", ".", "sum", "(", ")", "assert", "(", "count_row_ss", "==", "count_col_ss", ")", "q_stat", "=", "(", "(", "(", "k", "-", "1", ")", "*", "(", "(", "k", "*", "np", ".", "sum", "(", "(", "count_col_success", "**", "2", ")", ")", ")", "-", "(", "count_col_ss", "**", "2", ")", ")", ")", "/", "(", "(", "k", "*", "count_row_ss", ")", "-", "np", ".", "sum", "(", "(", "count_row_success", "**", "2", ")", ")", ")", ")", "return", "(", "q_stat", ",", "stats", ".", "chi2", ".", "sf", "(", "q_stat", ",", "(", "k", "-", "1", ")", ")", ")" ]
cochrans q test for identical effect of k treatments cochrans q is a k-sample extension of the mcnemar test .
train
false
53,517
def corpus_chrf(list_of_references, hypotheses, min_len=1, max_len=6, beta=3.0): assert (len(list_of_references) == len(hypotheses)), 'The number of hypotheses and their references should be the same' for (reference, hypothesis) in zip(list_of_references, hypotheses): if (type(reference) and (type(hypothesis) != str)): (reference, hypothesis) = (' '.join(reference), ' '.join(hypothesis)) ref_ngrams = Counter(everygrams(reference, min_len, max_len)) hyp_ngrams = Counter(everygrams(hypothesis, min_len, max_len)) overlap_ngrams = (ref_ngrams & hyp_ngrams) tp = sum(overlap_ngrams.values()) tpfp = sum(hyp_ngrams.values()) tffn = sum(ref_ngrams.values()) precision = (tp / tpfp) recall = (tp / tffn) factor = (beta ** 2) score = (((1 + factor) * (precision * recall)) / ((factor * precision) + recall)) return score
[ "def", "corpus_chrf", "(", "list_of_references", ",", "hypotheses", ",", "min_len", "=", "1", ",", "max_len", "=", "6", ",", "beta", "=", "3.0", ")", ":", "assert", "(", "len", "(", "list_of_references", ")", "==", "len", "(", "hypotheses", ")", ")", ",", "'The number of hypotheses and their references should be the same'", "for", "(", "reference", ",", "hypothesis", ")", "in", "zip", "(", "list_of_references", ",", "hypotheses", ")", ":", "if", "(", "type", "(", "reference", ")", "and", "(", "type", "(", "hypothesis", ")", "!=", "str", ")", ")", ":", "(", "reference", ",", "hypothesis", ")", "=", "(", "' '", ".", "join", "(", "reference", ")", ",", "' '", ".", "join", "(", "hypothesis", ")", ")", "ref_ngrams", "=", "Counter", "(", "everygrams", "(", "reference", ",", "min_len", ",", "max_len", ")", ")", "hyp_ngrams", "=", "Counter", "(", "everygrams", "(", "hypothesis", ",", "min_len", ",", "max_len", ")", ")", "overlap_ngrams", "=", "(", "ref_ngrams", "&", "hyp_ngrams", ")", "tp", "=", "sum", "(", "overlap_ngrams", ".", "values", "(", ")", ")", "tpfp", "=", "sum", "(", "hyp_ngrams", ".", "values", "(", ")", ")", "tffn", "=", "sum", "(", "ref_ngrams", ".", "values", "(", ")", ")", "precision", "=", "(", "tp", "/", "tpfp", ")", "recall", "=", "(", "tp", "/", "tffn", ")", "factor", "=", "(", "beta", "**", "2", ")", "score", "=", "(", "(", "(", "1", "+", "factor", ")", "*", "(", "precision", "*", "recall", ")", ")", "/", "(", "(", "factor", "*", "precision", ")", "+", "recall", ")", ")", "return", "score" ]
calculates the corpus level chrf .
train
false
53,519
def _remotes_on(port, which_end): port = int(port) ret = set() proc_available = False for statf in ['/proc/net/tcp', '/proc/net/tcp6']: if os.path.isfile(statf): proc_available = True with salt.utils.fopen(statf, 'r') as fp_: for line in fp_: if line.strip().startswith('sl'): continue iret = _parse_tcp_line(line) sl = next(iter(iret)) if (iret[sl][which_end] == port): ret.add(iret[sl]['remote_addr']) if (not proc_available): if salt.utils.is_sunos(): return _sunos_remotes_on(port, which_end) if salt.utils.is_freebsd(): return _freebsd_remotes_on(port, which_end) if salt.utils.is_netbsd(): return _netbsd_remotes_on(port, which_end) if salt.utils.is_openbsd(): return _openbsd_remotes_on(port, which_end) if salt.utils.is_windows(): return _windows_remotes_on(port, which_end) return _linux_remotes_on(port, which_end) return ret
[ "def", "_remotes_on", "(", "port", ",", "which_end", ")", ":", "port", "=", "int", "(", "port", ")", "ret", "=", "set", "(", ")", "proc_available", "=", "False", "for", "statf", "in", "[", "'/proc/net/tcp'", ",", "'/proc/net/tcp6'", "]", ":", "if", "os", ".", "path", ".", "isfile", "(", "statf", ")", ":", "proc_available", "=", "True", "with", "salt", ".", "utils", ".", "fopen", "(", "statf", ",", "'r'", ")", "as", "fp_", ":", "for", "line", "in", "fp_", ":", "if", "line", ".", "strip", "(", ")", ".", "startswith", "(", "'sl'", ")", ":", "continue", "iret", "=", "_parse_tcp_line", "(", "line", ")", "sl", "=", "next", "(", "iter", "(", "iret", ")", ")", "if", "(", "iret", "[", "sl", "]", "[", "which_end", "]", "==", "port", ")", ":", "ret", ".", "add", "(", "iret", "[", "sl", "]", "[", "'remote_addr'", "]", ")", "if", "(", "not", "proc_available", ")", ":", "if", "salt", ".", "utils", ".", "is_sunos", "(", ")", ":", "return", "_sunos_remotes_on", "(", "port", ",", "which_end", ")", "if", "salt", ".", "utils", ".", "is_freebsd", "(", ")", ":", "return", "_freebsd_remotes_on", "(", "port", ",", "which_end", ")", "if", "salt", ".", "utils", ".", "is_netbsd", "(", ")", ":", "return", "_netbsd_remotes_on", "(", "port", ",", "which_end", ")", "if", "salt", ".", "utils", ".", "is_openbsd", "(", ")", ":", "return", "_openbsd_remotes_on", "(", "port", ",", "which_end", ")", "if", "salt", ".", "utils", ".", "is_windows", "(", ")", ":", "return", "_windows_remotes_on", "(", "port", ",", "which_end", ")", "return", "_linux_remotes_on", "(", "port", ",", "which_end", ")", "return", "ret" ]
return a set of ip addrs active tcp connections .
train
true
53,520
def connection_before_request(): CLIENT_POOL.acquire()
[ "def", "connection_before_request", "(", ")", ":", "CLIENT_POOL", ".", "acquire", "(", ")" ]
acquire a mongodb client from the pool .
train
false
53,521
@allow_public @cache_control(public=True, must_revalidate=True, max_age=((3600 * 24) * 7)) def search_plugin(request): return render(request, 'phonebook/search_opensearch.xml', content_type='application/opensearchdescription+xml')
[ "@", "allow_public", "@", "cache_control", "(", "public", "=", "True", ",", "must_revalidate", "=", "True", ",", "max_age", "=", "(", "(", "3600", "*", "24", ")", "*", "7", ")", ")", "def", "search_plugin", "(", "request", ")", ":", "return", "render", "(", "request", ",", "'phonebook/search_opensearch.xml'", ",", "content_type", "=", "'application/opensearchdescription+xml'", ")" ]
render an opensearch plugin .
train
false
53,522
def getNormalWeighted(begin, center, end): return (center - begin).cross((end - center))
[ "def", "getNormalWeighted", "(", "begin", ",", "center", ",", "end", ")", ":", "return", "(", "center", "-", "begin", ")", ".", "cross", "(", "(", "end", "-", "center", ")", ")" ]
get weighted normal .
train
false
53,524
def partitionby(func, seq): return map(tuple, pluck(1, itertools.groupby(seq, key=func)))
[ "def", "partitionby", "(", "func", ",", "seq", ")", ":", "return", "map", "(", "tuple", ",", "pluck", "(", "1", ",", "itertools", ".", "groupby", "(", "seq", ",", "key", "=", "func", ")", ")", ")" ]
partition a sequence according to a function partition s into a sequence of lists such that .
train
false
53,525
@handle_response_format @treeio_login_required def sla_delete(request, sla_id, response_format='html'): sla = get_object_or_404(ServiceLevelAgreement, pk=sla_id) if (not request.user.profile.has_permission(sla, mode='w')): return user_denied(request, message="You don't have access to this Service Level Agreement") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): sla.trash = True sla.save() else: sla.delete() return HttpResponseRedirect(reverse('services_sla_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id])) context = _get_default_context(request) context.update({'sla': sla}) return render_to_response('services/sla_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "sla_delete", "(", "request", ",", "sla_id", ",", "response_format", "=", "'html'", ")", ":", "sla", "=", "get_object_or_404", "(", "ServiceLevelAgreement", ",", "pk", "=", "sla_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "sla", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Service Level Agreement\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "sla", ".", "trash", "=", "True", "sla", ".", "save", "(", ")", "else", ":", "sla", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_sla_index'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_sla_view'", ",", "args", "=", "[", "sla", ".", "id", "]", ")", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'sla'", ":", "sla", "}", ")", "return", "render_to_response", "(", "'services/sla_delete'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
servicelevelagreement delete .
train
false
53,526
def empty_iter(): (yield iter([]).next())
[ "def", "empty_iter", "(", ")", ":", "(", "yield", "iter", "(", "[", "]", ")", ".", "next", "(", ")", ")" ]
returns an iterator containing no results .
train
false
53,528
def signed_permutations(t): return (type(t)(i) for j in permutations(t) for i in permute_signs(j))
[ "def", "signed_permutations", "(", "t", ")", ":", "return", "(", "type", "(", "t", ")", "(", "i", ")", "for", "j", "in", "permutations", "(", "t", ")", "for", "i", "in", "permute_signs", "(", "j", ")", ")" ]
return iterator in which the signs of non-zero elements of t and the order of the elements are permuted .
train
false
53,529
def update_record_field(table, sys_id, field, value): client = _get_client() client.table = table response = client.update({field: value}, sys_id) return response
[ "def", "update_record_field", "(", "table", ",", "sys_id", ",", "field", ",", "value", ")", ":", "client", "=", "_get_client", "(", ")", "client", ".", "table", "=", "table", "response", "=", "client", ".", "update", "(", "{", "field", ":", "value", "}", ",", "sys_id", ")", "return", "response" ]
update the value of a records field in a servicenow table .
train
true
53,531
def write_course_block_detail_report(course_data): with open('xblock_course_detail.csv', 'wb') as csvfile: detail_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) detail_writer.writerow(['XBLOCK_TYPE_NAME', 'COURSE_NAME', 'COURSE_ID', 'COURSE_START', 'COURSE_END', 'NUM_XBLOCK_INSTANCES']) for course in course_data: for (block_type, count) in course.get(BLOCK_COUNTS_KEY, []).items(): if (count > 0): detail_writer.writerow([block_type, course.get(COURSE_NAME_KEY, '').encode('utf-8'), course.get(COURSE_ID_KEY, ''), course.get(COURSE_START_KEY, ''), course.get(COURSE_END_KEY, ''), str(count)]) csvfile.close()
[ "def", "write_course_block_detail_report", "(", "course_data", ")", ":", "with", "open", "(", "'xblock_course_detail.csv'", ",", "'wb'", ")", "as", "csvfile", ":", "detail_writer", "=", "csv", ".", "writer", "(", "csvfile", ",", "delimiter", "=", "','", ",", "quotechar", "=", "'\"'", ",", "quoting", "=", "csv", ".", "QUOTE_ALL", ")", "detail_writer", ".", "writerow", "(", "[", "'XBLOCK_TYPE_NAME'", ",", "'COURSE_NAME'", ",", "'COURSE_ID'", ",", "'COURSE_START'", ",", "'COURSE_END'", ",", "'NUM_XBLOCK_INSTANCES'", "]", ")", "for", "course", "in", "course_data", ":", "for", "(", "block_type", ",", "count", ")", "in", "course", ".", "get", "(", "BLOCK_COUNTS_KEY", ",", "[", "]", ")", ".", "items", "(", ")", ":", "if", "(", "count", ">", "0", ")", ":", "detail_writer", ".", "writerow", "(", "[", "block_type", ",", "course", ".", "get", "(", "COURSE_NAME_KEY", ",", "''", ")", ".", "encode", "(", "'utf-8'", ")", ",", "course", ".", "get", "(", "COURSE_ID_KEY", ",", "''", ")", ",", "course", ".", "get", "(", "COURSE_START_KEY", ",", "''", ")", ",", "course", ".", "get", "(", "COURSE_END_KEY", ",", "''", ")", ",", "str", "(", "count", ")", "]", ")", "csvfile", ".", "close", "(", ")" ]
generate a csv file containing the detailed information about the xblocks available per course arguments: course_data : a list of course_data objects returns: nothing .
train
false
53,532
def set_session(session): global _SESSION _SESSION = session
[ "def", "set_session", "(", "session", ")", ":", "global", "_SESSION", "_SESSION", "=", "session" ]
sets the global tf session .
train
false
53,533
def create_categories(cr, categories): p_id = None category = [] while categories: category.append(categories[0]) xml_id = ('module_category_' + '_'.join(map((lambda x: x.lower()), category)).replace('&', 'and').replace(' ', '_')) cr.execute('SELECT res_id FROM ir_model_data WHERE name=%s AND module=%s AND model=%s', (xml_id, 'base', 'ir.module.category')) c_id = cr.fetchone() if (not c_id): cr.execute('INSERT INTO ir_module_category (name, parent_id) VALUES (%s, %s) RETURNING id', (categories[0], p_id)) c_id = cr.fetchone()[0] cr.execute('INSERT INTO ir_model_data (module, name, res_id, model) VALUES (%s, %s, %s, %s)', ('base', xml_id, c_id, 'ir.module.category')) else: c_id = c_id[0] p_id = c_id categories = categories[1:] return p_id
[ "def", "create_categories", "(", "cr", ",", "categories", ")", ":", "p_id", "=", "None", "category", "=", "[", "]", "while", "categories", ":", "category", ".", "append", "(", "categories", "[", "0", "]", ")", "xml_id", "=", "(", "'module_category_'", "+", "'_'", ".", "join", "(", "map", "(", "(", "lambda", "x", ":", "x", ".", "lower", "(", ")", ")", ",", "category", ")", ")", ".", "replace", "(", "'&'", ",", "'and'", ")", ".", "replace", "(", "' '", ",", "'_'", ")", ")", "cr", ".", "execute", "(", "'SELECT res_id FROM ir_model_data WHERE name=%s AND module=%s AND model=%s'", ",", "(", "xml_id", ",", "'base'", ",", "'ir.module.category'", ")", ")", "c_id", "=", "cr", ".", "fetchone", "(", ")", "if", "(", "not", "c_id", ")", ":", "cr", ".", "execute", "(", "'INSERT INTO ir_module_category (name, parent_id) VALUES (%s, %s) RETURNING id'", ",", "(", "categories", "[", "0", "]", ",", "p_id", ")", ")", "c_id", "=", "cr", ".", "fetchone", "(", ")", "[", "0", "]", "cr", ".", "execute", "(", "'INSERT INTO ir_model_data (module, name, res_id, model) VALUES (%s, %s, %s, %s)'", ",", "(", "'base'", ",", "xml_id", ",", "c_id", ",", "'ir.module.category'", ")", ")", "else", ":", "c_id", "=", "c_id", "[", "0", "]", "p_id", "=", "c_id", "categories", "=", "categories", "[", "1", ":", "]", "return", "p_id" ]
create the ir_module_category entries for some categories .
train
false
53,534
def flip_vertical(request, fileobjects): transpose_image(request, fileobjects, 1)
[ "def", "flip_vertical", "(", "request", ",", "fileobjects", ")", ":", "transpose_image", "(", "request", ",", "fileobjects", ",", "1", ")" ]
flip image vertically .
train
false
53,535
def test_renn_not_good_object(): nn = 'rnd' renn = RepeatedEditedNearestNeighbours(n_neighbors=nn, random_state=RND_SEED, kind_sel='mode') assert_raises(ValueError, renn.fit_sample, X, Y)
[ "def", "test_renn_not_good_object", "(", ")", ":", "nn", "=", "'rnd'", "renn", "=", "RepeatedEditedNearestNeighbours", "(", "n_neighbors", "=", "nn", ",", "random_state", "=", "RND_SEED", ",", "kind_sel", "=", "'mode'", ")", "assert_raises", "(", "ValueError", ",", "renn", ".", "fit_sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised while a wrong type of nn is given .
train
false
53,536
def format_header(caller, entry): width = (_HEADER_WIDTH - 10) if (caller.ndb.batch_batchmode != 'batch_commands'): entry = _RE_CODE_START.split(entry, 1)[1] entry = _RE_COMMENT.sub('', entry).strip() header = utils.crop(entry, width=width) ptr = (caller.ndb.batch_stackptr + 1) stacklen = len(caller.ndb.batch_stack) header = ('{w%02i/%02i{G: %s{n' % (ptr, stacklen, header)) header = ('%s%s' % (header, (' ' * (width - len(header))))) header = header.replace('\n', '\\n') return header
[ "def", "format_header", "(", "caller", ",", "entry", ")", ":", "width", "=", "(", "_HEADER_WIDTH", "-", "10", ")", "if", "(", "caller", ".", "ndb", ".", "batch_batchmode", "!=", "'batch_commands'", ")", ":", "entry", "=", "_RE_CODE_START", ".", "split", "(", "entry", ",", "1", ")", "[", "1", "]", "entry", "=", "_RE_COMMENT", ".", "sub", "(", "''", ",", "entry", ")", ".", "strip", "(", ")", "header", "=", "utils", ".", "crop", "(", "entry", ",", "width", "=", "width", ")", "ptr", "=", "(", "caller", ".", "ndb", ".", "batch_stackptr", "+", "1", ")", "stacklen", "=", "len", "(", "caller", ".", "ndb", ".", "batch_stack", ")", "header", "=", "(", "'{w%02i/%02i{G: %s{n'", "%", "(", "ptr", ",", "stacklen", ",", "header", ")", ")", "header", "=", "(", "'%s%s'", "%", "(", "header", ",", "(", "' '", "*", "(", "width", "-", "len", "(", "header", ")", ")", ")", ")", ")", "header", "=", "header", ".", "replace", "(", "'\\n'", ",", "'\\\\n'", ")", "return", "header" ]
formats a header .
train
false
53,537
def getlocale(category=LC_CTYPE): localename = _setlocale(category) if ((category == LC_ALL) and (';' in localename)): raise TypeError, 'category LC_ALL is not supported' return _parse_localename(localename)
[ "def", "getlocale", "(", "category", "=", "LC_CTYPE", ")", ":", "localename", "=", "_setlocale", "(", "category", ")", "if", "(", "(", "category", "==", "LC_ALL", ")", "and", "(", "';'", "in", "localename", ")", ")", ":", "raise", "TypeError", ",", "'category LC_ALL is not supported'", "return", "_parse_localename", "(", "localename", ")" ]
returns the current setting for the given locale category as tuple .
train
false
53,538
def _count_diff_all_purpose(actual, expected): (s, t) = (list(actual), list(expected)) (m, n) = (len(s), len(t)) NULL = object() result = [] for (i, elem) in enumerate(s): if (elem is NULL): continue cnt_s = cnt_t = 0 for j in range(i, m): if (s[j] == elem): cnt_s += 1 s[j] = NULL for (j, other_elem) in enumerate(t): if (other_elem == elem): cnt_t += 1 t[j] = NULL if (cnt_s != cnt_t): diff = _Mismatch(cnt_s, cnt_t, elem) result.append(diff) for (i, elem) in enumerate(t): if (elem is NULL): continue cnt_t = 0 for j in range(i, n): if (t[j] == elem): cnt_t += 1 t[j] = NULL diff = _Mismatch(0, cnt_t, elem) result.append(diff) return result
[ "def", "_count_diff_all_purpose", "(", "actual", ",", "expected", ")", ":", "(", "s", ",", "t", ")", "=", "(", "list", "(", "actual", ")", ",", "list", "(", "expected", ")", ")", "(", "m", ",", "n", ")", "=", "(", "len", "(", "s", ")", ",", "len", "(", "t", ")", ")", "NULL", "=", "object", "(", ")", "result", "=", "[", "]", "for", "(", "i", ",", "elem", ")", "in", "enumerate", "(", "s", ")", ":", "if", "(", "elem", "is", "NULL", ")", ":", "continue", "cnt_s", "=", "cnt_t", "=", "0", "for", "j", "in", "range", "(", "i", ",", "m", ")", ":", "if", "(", "s", "[", "j", "]", "==", "elem", ")", ":", "cnt_s", "+=", "1", "s", "[", "j", "]", "=", "NULL", "for", "(", "j", ",", "other_elem", ")", "in", "enumerate", "(", "t", ")", ":", "if", "(", "other_elem", "==", "elem", ")", ":", "cnt_t", "+=", "1", "t", "[", "j", "]", "=", "NULL", "if", "(", "cnt_s", "!=", "cnt_t", ")", ":", "diff", "=", "_Mismatch", "(", "cnt_s", ",", "cnt_t", ",", "elem", ")", "result", ".", "append", "(", "diff", ")", "for", "(", "i", ",", "elem", ")", "in", "enumerate", "(", "t", ")", ":", "if", "(", "elem", "is", "NULL", ")", ":", "continue", "cnt_t", "=", "0", "for", "j", "in", "range", "(", "i", ",", "n", ")", ":", "if", "(", "t", "[", "j", "]", "==", "elem", ")", ":", "cnt_t", "+=", "1", "t", "[", "j", "]", "=", "NULL", "diff", "=", "_Mismatch", "(", "0", ",", "cnt_t", ",", "elem", ")", "result", ".", "append", "(", "diff", ")", "return", "result" ]
returns list of triples where the counts differ .
train
false
53,539
def regex_tuple_from_key_alias(obj): return (re.compile(ordered_permutation_regex(' '.join(([obj.key] + obj.aliases.all()))), _RE_FLAGS), obj, obj.key)
[ "def", "regex_tuple_from_key_alias", "(", "obj", ")", ":", "return", "(", "re", ".", "compile", "(", "ordered_permutation_regex", "(", "' '", ".", "join", "(", "(", "[", "obj", ".", "key", "]", "+", "obj", ".", "aliases", ".", "all", "(", ")", ")", ")", ")", ",", "_RE_FLAGS", ")", ",", "obj", ",", "obj", ".", "key", ")" ]
this will build a regex tuple for any object .
train
false
53,540
def runtests(args=None): import pytest import os try: import faulthandler faulthandler.enable() except ImportError: pass rootdir = os.path.join(os.path.dirname(__file__), os.pardir) os.chdir(rootdir) return pytest.main(args=args)
[ "def", "runtests", "(", "args", "=", "None", ")", ":", "import", "pytest", "import", "os", "try", ":", "import", "faulthandler", "faulthandler", ".", "enable", "(", ")", "except", "ImportError", ":", "pass", "rootdir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "os", ".", "pardir", ")", "os", ".", "chdir", "(", "rootdir", ")", "return", "pytest", ".", "main", "(", "args", "=", "args", ")" ]
run the selected tests .
train
false
53,541
def PresentDialog_Confirm_Call(message): return call(message, [u'Ok', u'Cancel'])
[ "def", "PresentDialog_Confirm_Call", "(", "message", ")", ":", "return", "call", "(", "message", ",", "[", "u'Ok'", ",", "u'Cancel'", "]", ")" ]
return a mock .
train
false
53,542
@requires_h5py def test_io_stc_h5(): tempdir = _TempDir() stc = _fake_stc() assert_raises(ValueError, stc.save, op.join(tempdir, 'tmp'), ftype='foo') out_name = op.join(tempdir, 'tmp') stc.save(out_name, ftype='h5') stc.save(out_name, ftype='h5') stc3 = read_source_estimate(out_name) stc4 = read_source_estimate((out_name + '-stc.h5')) assert_raises(RuntimeError, read_source_estimate, out_name, subject='bar') for stc_new in (stc3, stc4): assert_equal(stc_new.subject, stc.subject) assert_array_equal(stc_new.data, stc.data) assert_array_equal(stc_new.tmin, stc.tmin) assert_array_equal(stc_new.tstep, stc.tstep) assert_equal(len(stc_new.vertices), len(stc.vertices)) for (v1, v2) in zip(stc_new.vertices, stc.vertices): assert_array_equal(v1, v2)
[ "@", "requires_h5py", "def", "test_io_stc_h5", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "stc", "=", "_fake_stc", "(", ")", "assert_raises", "(", "ValueError", ",", "stc", ".", "save", ",", "op", ".", "join", "(", "tempdir", ",", "'tmp'", ")", ",", "ftype", "=", "'foo'", ")", "out_name", "=", "op", ".", "join", "(", "tempdir", ",", "'tmp'", ")", "stc", ".", "save", "(", "out_name", ",", "ftype", "=", "'h5'", ")", "stc", ".", "save", "(", "out_name", ",", "ftype", "=", "'h5'", ")", "stc3", "=", "read_source_estimate", "(", "out_name", ")", "stc4", "=", "read_source_estimate", "(", "(", "out_name", "+", "'-stc.h5'", ")", ")", "assert_raises", "(", "RuntimeError", ",", "read_source_estimate", ",", "out_name", ",", "subject", "=", "'bar'", ")", "for", "stc_new", "in", "(", "stc3", ",", "stc4", ")", ":", "assert_equal", "(", "stc_new", ".", "subject", ",", "stc", ".", "subject", ")", "assert_array_equal", "(", "stc_new", ".", "data", ",", "stc", ".", "data", ")", "assert_array_equal", "(", "stc_new", ".", "tmin", ",", "stc", ".", "tmin", ")", "assert_array_equal", "(", "stc_new", ".", "tstep", ",", "stc", ".", "tstep", ")", "assert_equal", "(", "len", "(", "stc_new", ".", "vertices", ")", ",", "len", "(", "stc", ".", "vertices", ")", ")", "for", "(", "v1", ",", "v2", ")", "in", "zip", "(", "stc_new", ".", "vertices", ",", "stc", ".", "vertices", ")", ":", "assert_array_equal", "(", "v1", ",", "v2", ")" ]
test io for stc files using hdf5 .
train
false
53,543
def intranges_from_list(list_): sorted_list = sorted(list_) ranges = [] last_write = (-1) for i in range(len(sorted_list)): if ((i + 1) < len(sorted_list)): if (sorted_list[i] == (sorted_list[(i + 1)] - 1)): continue current_range = sorted_list[(last_write + 1):(i + 1)] range_tuple = (current_range[0], (current_range[(-1)] + 1)) ranges.append(range_tuple) last_write = i return tuple(ranges)
[ "def", "intranges_from_list", "(", "list_", ")", ":", "sorted_list", "=", "sorted", "(", "list_", ")", "ranges", "=", "[", "]", "last_write", "=", "(", "-", "1", ")", "for", "i", "in", "range", "(", "len", "(", "sorted_list", ")", ")", ":", "if", "(", "(", "i", "+", "1", ")", "<", "len", "(", "sorted_list", ")", ")", ":", "if", "(", "sorted_list", "[", "i", "]", "==", "(", "sorted_list", "[", "(", "i", "+", "1", ")", "]", "-", "1", ")", ")", ":", "continue", "current_range", "=", "sorted_list", "[", "(", "last_write", "+", "1", ")", ":", "(", "i", "+", "1", ")", "]", "range_tuple", "=", "(", "current_range", "[", "0", "]", ",", "(", "current_range", "[", "(", "-", "1", ")", "]", "+", "1", ")", ")", "ranges", ".", "append", "(", "range_tuple", ")", "last_write", "=", "i", "return", "tuple", "(", "ranges", ")" ]
represent a list of integers as a sequence of ranges: ( .
train
true
53,545
def set_name_by_naming_series(doc): if (not doc.naming_series): doc.naming_series = get_default_naming_series(doc.doctype) if (not doc.naming_series): frappe.throw(frappe._(u'Naming Series mandatory')) doc.name = make_autoname((doc.naming_series + u'.#####'), u'', doc)
[ "def", "set_name_by_naming_series", "(", "doc", ")", ":", "if", "(", "not", "doc", ".", "naming_series", ")", ":", "doc", ".", "naming_series", "=", "get_default_naming_series", "(", "doc", ".", "doctype", ")", "if", "(", "not", "doc", ".", "naming_series", ")", ":", "frappe", ".", "throw", "(", "frappe", ".", "_", "(", "u'Naming Series mandatory'", ")", ")", "doc", ".", "name", "=", "make_autoname", "(", "(", "doc", ".", "naming_series", "+", "u'.#####'", ")", ",", "u''", ",", "doc", ")" ]
sets name by the naming_series property .
train
false
53,546
def _qnwsimp1(n, a, b): if ((n % 2) == 0): print 'WARNING qnwsimp: n must be an odd integer. Increasing by 1' n += 1 nodes = np.linspace(a, b, n) dx = (nodes[1] - nodes[0]) weights = np.tile([2.0, 4.0], ((n + 1.0) / 2.0)) weights = weights[:n] weights[0] = weights[(-1)] = 1 weights = ((dx / 3.0) * weights) return (nodes, weights)
[ "def", "_qnwsimp1", "(", "n", ",", "a", ",", "b", ")", ":", "if", "(", "(", "n", "%", "2", ")", "==", "0", ")", ":", "print", "'WARNING qnwsimp: n must be an odd integer. Increasing by 1'", "n", "+=", "1", "nodes", "=", "np", ".", "linspace", "(", "a", ",", "b", ",", "n", ")", "dx", "=", "(", "nodes", "[", "1", "]", "-", "nodes", "[", "0", "]", ")", "weights", "=", "np", ".", "tile", "(", "[", "2.0", ",", "4.0", "]", ",", "(", "(", "n", "+", "1.0", ")", "/", "2.0", ")", ")", "weights", "=", "weights", "[", ":", "n", "]", "weights", "[", "0", "]", "=", "weights", "[", "(", "-", "1", ")", "]", "=", "1", "weights", "=", "(", "(", "dx", "/", "3.0", ")", "*", "weights", ")", "return", "(", "nodes", ",", "weights", ")" ]
compute univariate simpson quadrature nodes and weights parameters n : int the number of nodes a : int the lower endpoint b : int the upper endpoint returns nodes : np .
train
false
53,547
def store_ctx(x): if (not hasattr(x, 'ctx')): return x.ctx = ast.Store() if isinstance(x, (ast.Tuple, ast.List)): for e in x.elts: store_ctx(e) elif isinstance(x, ast.Starred): store_ctx(x.value)
[ "def", "store_ctx", "(", "x", ")", ":", "if", "(", "not", "hasattr", "(", "x", ",", "'ctx'", ")", ")", ":", "return", "x", ".", "ctx", "=", "ast", ".", "Store", "(", ")", "if", "isinstance", "(", "x", ",", "(", "ast", ".", "Tuple", ",", "ast", ".", "List", ")", ")", ":", "for", "e", "in", "x", ".", "elts", ":", "store_ctx", "(", "e", ")", "elif", "isinstance", "(", "x", ",", "ast", ".", "Starred", ")", ":", "store_ctx", "(", "x", ".", "value", ")" ]
recursively sets ctx to ast .
train
false
53,548
def _nested_output(obj): from salt.output import nested nested.__opts__ = {} ret = nested.output(obj).rstrip() return ret
[ "def", "_nested_output", "(", "obj", ")", ":", "from", "salt", ".", "output", "import", "nested", "nested", ".", "__opts__", "=", "{", "}", "ret", "=", "nested", ".", "output", "(", "obj", ")", ".", "rstrip", "(", ")", "return", "ret" ]
serialize obj and format for output .
train
true
53,549
def ValidateAttributes(tag, attributes, goodattributes): all_good = True for attr in attributes.keys(): if (not (attr in goodattributes)): output.Error(('Unknown %s attribute: %s' % (tag, attr))) all_good = False return all_good
[ "def", "ValidateAttributes", "(", "tag", ",", "attributes", ",", "goodattributes", ")", ":", "all_good", "=", "True", "for", "attr", "in", "attributes", ".", "keys", "(", ")", ":", "if", "(", "not", "(", "attr", "in", "goodattributes", ")", ")", ":", "output", ".", "Error", "(", "(", "'Unknown %s attribute: %s'", "%", "(", "tag", ",", "attr", ")", ")", ")", "all_good", "=", "False", "return", "all_good" ]
makes sure attributes does not contain any attribute not listed in goodattributes .
train
false
53,550
def _add_inline_definition(item, statement): global _current_statement backup = _current_statement (type_, options) = _expand_one_key_dictionary(item) _current_statement = UnnamedStatement(type=type_) _parse_statement(options) statement.add_child(_current_statement) _current_statement = backup
[ "def", "_add_inline_definition", "(", "item", ",", "statement", ")", ":", "global", "_current_statement", "backup", "=", "_current_statement", "(", "type_", ",", "options", ")", "=", "_expand_one_key_dictionary", "(", "item", ")", "_current_statement", "=", "UnnamedStatement", "(", "type", "=", "type_", ")", "_parse_statement", "(", "options", ")", "statement", ".", "add_child", "(", "_current_statement", ")", "_current_statement", "=", "backup" ]
adds an inline definition to statement .
train
true
53,551
def zkensemble(): from django.conf import settings if ('zookeeper' in settings.INSTALLED_APPS): try: from zookeeper.conf import CLUSTERS clusters = CLUSTERS.get() if (clusters['default'].HOST_PORTS.get() != 'localhost:2181'): return ('%s' % clusters['default'].HOST_PORTS.get()) except: LOG.warn('Could not get zookeeper ensemble from the zookeeper app') if ('search' in settings.INSTALLED_APPS): try: from search.conf import SOLR_URL parsed = urlparse(SOLR_URL.get()) return ('%s:2181' % (parsed.hostname or 'localhost')) except: LOG.warn('Could not get zookeeper ensemble from the search app') return 'localhost:2181'
[ "def", "zkensemble", "(", ")", ":", "from", "django", ".", "conf", "import", "settings", "if", "(", "'zookeeper'", "in", "settings", ".", "INSTALLED_APPS", ")", ":", "try", ":", "from", "zookeeper", ".", "conf", "import", "CLUSTERS", "clusters", "=", "CLUSTERS", ".", "get", "(", ")", "if", "(", "clusters", "[", "'default'", "]", ".", "HOST_PORTS", ".", "get", "(", ")", "!=", "'localhost:2181'", ")", ":", "return", "(", "'%s'", "%", "clusters", "[", "'default'", "]", ".", "HOST_PORTS", ".", "get", "(", ")", ")", "except", ":", "LOG", ".", "warn", "(", "'Could not get zookeeper ensemble from the zookeeper app'", ")", "if", "(", "'search'", "in", "settings", ".", "INSTALLED_APPS", ")", ":", "try", ":", "from", "search", ".", "conf", "import", "SOLR_URL", "parsed", "=", "urlparse", "(", "SOLR_URL", ".", "get", "(", ")", ")", "return", "(", "'%s:2181'", "%", "(", "parsed", ".", "hostname", "or", "'localhost'", ")", ")", "except", ":", "LOG", ".", "warn", "(", "'Could not get zookeeper ensemble from the search app'", ")", "return", "'localhost:2181'" ]
try to guess the value if no values are specified .
train
false
53,553
def _block2d_to_blocknd(values, placement, shape, labels, ref_items): from pandas.core.internals import make_block panel_shape = ((len(placement),) + shape) selector = _factor_indexer(shape[1:], labels) mask = np.zeros(np.prod(shape), dtype=bool) mask.put(selector, True) if mask.all(): pvalues = np.empty(panel_shape, dtype=values.dtype) else: (dtype, fill_value) = _maybe_promote(values.dtype) pvalues = np.empty(panel_shape, dtype=dtype) pvalues.fill(fill_value) values = values for i in range(len(placement)): pvalues[i].flat[mask] = values[:, i] return make_block(pvalues, placement=placement)
[ "def", "_block2d_to_blocknd", "(", "values", ",", "placement", ",", "shape", ",", "labels", ",", "ref_items", ")", ":", "from", "pandas", ".", "core", ".", "internals", "import", "make_block", "panel_shape", "=", "(", "(", "len", "(", "placement", ")", ",", ")", "+", "shape", ")", "selector", "=", "_factor_indexer", "(", "shape", "[", "1", ":", "]", ",", "labels", ")", "mask", "=", "np", ".", "zeros", "(", "np", ".", "prod", "(", "shape", ")", ",", "dtype", "=", "bool", ")", "mask", ".", "put", "(", "selector", ",", "True", ")", "if", "mask", ".", "all", "(", ")", ":", "pvalues", "=", "np", ".", "empty", "(", "panel_shape", ",", "dtype", "=", "values", ".", "dtype", ")", "else", ":", "(", "dtype", ",", "fill_value", ")", "=", "_maybe_promote", "(", "values", ".", "dtype", ")", "pvalues", "=", "np", ".", "empty", "(", "panel_shape", ",", "dtype", "=", "dtype", ")", "pvalues", ".", "fill", "(", "fill_value", ")", "values", "=", "values", "for", "i", "in", "range", "(", "len", "(", "placement", ")", ")", ":", "pvalues", "[", "i", "]", ".", "flat", "[", "mask", "]", "=", "values", "[", ":", ",", "i", "]", "return", "make_block", "(", "pvalues", ",", "placement", "=", "placement", ")" ]
pivot to the labels shape .
train
false
53,555
def reload_config(): _env_reloader.update()
[ "def", "reload_config", "(", ")", ":", "_env_reloader", ".", "update", "(", ")" ]
reload the configuration from environment variables .
train
false
53,556
def encode_morse(msg, sep='|', mapping=None): mapping = (mapping or char_morse) assert (sep not in mapping) word_sep = (2 * sep) mapping[' '] = word_sep suffix = (msg and (msg[(-1)] in whitespace)) msg = (' ' if word_sep else '').join(msg.split()) chars = set(''.join(msg.split())) ok = set(mapping.keys()) msg = translate(msg, None, ''.join((chars - ok))) morsestring = [] words = msg.split() for word in words: morseword = [] for letter in word: morseletter = mapping[letter] morseword.append(morseletter) word = sep.join(morseword) morsestring.append(word) return (word_sep.join(morsestring) + (word_sep if suffix else ''))
[ "def", "encode_morse", "(", "msg", ",", "sep", "=", "'|'", ",", "mapping", "=", "None", ")", ":", "mapping", "=", "(", "mapping", "or", "char_morse", ")", "assert", "(", "sep", "not", "in", "mapping", ")", "word_sep", "=", "(", "2", "*", "sep", ")", "mapping", "[", "' '", "]", "=", "word_sep", "suffix", "=", "(", "msg", "and", "(", "msg", "[", "(", "-", "1", ")", "]", "in", "whitespace", ")", ")", "msg", "=", "(", "' '", "if", "word_sep", "else", "''", ")", ".", "join", "(", "msg", ".", "split", "(", ")", ")", "chars", "=", "set", "(", "''", ".", "join", "(", "msg", ".", "split", "(", ")", ")", ")", "ok", "=", "set", "(", "mapping", ".", "keys", "(", ")", ")", "msg", "=", "translate", "(", "msg", ",", "None", ",", "''", ".", "join", "(", "(", "chars", "-", "ok", ")", ")", ")", "morsestring", "=", "[", "]", "words", "=", "msg", ".", "split", "(", ")", "for", "word", "in", "words", ":", "morseword", "=", "[", "]", "for", "letter", "in", "word", ":", "morseletter", "=", "mapping", "[", "letter", "]", "morseword", ".", "append", "(", "morseletter", ")", "word", "=", "sep", ".", "join", "(", "morseword", ")", "morsestring", ".", "append", "(", "word", ")", "return", "(", "word_sep", ".", "join", "(", "morsestring", ")", "+", "(", "word_sep", "if", "suffix", "else", "''", ")", ")" ]
encodes a plaintext into popular morse code with letters separated by sep and words by a double sep .
train
false
53,557
def test_boolean_field_choices_with_real_model_instances(): class BoolModelChoices(models.Model, ): field = models.BooleanField(choices=((True, u'Yes'), (False, u'No'))) class Meta: app_label = u'django_tables2_test' class Table(tables.Table, ): class Meta: model = BoolModelChoices table = Table([BoolModelChoices(field=True), BoolModelChoices(field=False)]) assert (table.rows[0].get_cell(u'field') == u'<span class="true">\u2714</span>') assert (table.rows[1].get_cell(u'field') == u'<span class="false">\u2718</span>')
[ "def", "test_boolean_field_choices_with_real_model_instances", "(", ")", ":", "class", "BoolModelChoices", "(", "models", ".", "Model", ",", ")", ":", "field", "=", "models", ".", "BooleanField", "(", "choices", "=", "(", "(", "True", ",", "u'Yes'", ")", ",", "(", "False", ",", "u'No'", ")", ")", ")", "class", "Meta", ":", "app_label", "=", "u'django_tables2_test'", "class", "Table", "(", "tables", ".", "Table", ",", ")", ":", "class", "Meta", ":", "model", "=", "BoolModelChoices", "table", "=", "Table", "(", "[", "BoolModelChoices", "(", "field", "=", "True", ")", ",", "BoolModelChoices", "(", "field", "=", "False", ")", "]", ")", "assert", "(", "table", ".", "rows", "[", "0", "]", ".", "get_cell", "(", "u'field'", ")", "==", "u'<span class=\"true\">\\u2714</span>'", ")", "assert", "(", "table", ".", "rows", "[", "1", "]", ".", "get_cell", "(", "u'field'", ")", "==", "u'<span class=\"false\">\\u2718</span>'", ")" ]
if a booleanfield has choices defined .
train
false
53,558
def p_constant_expression(t): pass
[ "def", "p_constant_expression", "(", "t", ")", ":", "pass" ]
constant_expression : conditional_expression .
train
false
53,561
def _toggle_proj(event, params): if ('proj_checks' in params): bools = [x[0].get_visible() for x in params['proj_checks'].lines] for (bi, (b, p)) in enumerate(zip(bools, params['projs'])): if ((not b) and p['active']): bools[bi] = True else: bools = ([True] * len(params['projs'])) compute_proj = False if ('proj_bools' not in params): compute_proj = True elif (not np.array_equal(bools, params['proj_bools'])): compute_proj = True if (compute_proj is True): params['plot_update_proj_callback'](params, bools)
[ "def", "_toggle_proj", "(", "event", ",", "params", ")", ":", "if", "(", "'proj_checks'", "in", "params", ")", ":", "bools", "=", "[", "x", "[", "0", "]", ".", "get_visible", "(", ")", "for", "x", "in", "params", "[", "'proj_checks'", "]", ".", "lines", "]", "for", "(", "bi", ",", "(", "b", ",", "p", ")", ")", "in", "enumerate", "(", "zip", "(", "bools", ",", "params", "[", "'projs'", "]", ")", ")", ":", "if", "(", "(", "not", "b", ")", "and", "p", "[", "'active'", "]", ")", ":", "bools", "[", "bi", "]", "=", "True", "else", ":", "bools", "=", "(", "[", "True", "]", "*", "len", "(", "params", "[", "'projs'", "]", ")", ")", "compute_proj", "=", "False", "if", "(", "'proj_bools'", "not", "in", "params", ")", ":", "compute_proj", "=", "True", "elif", "(", "not", "np", ".", "array_equal", "(", "bools", ",", "params", "[", "'proj_bools'", "]", ")", ")", ":", "compute_proj", "=", "True", "if", "(", "compute_proj", "is", "True", ")", ":", "params", "[", "'plot_update_proj_callback'", "]", "(", "params", ",", "bools", ")" ]
operation to perform when proj boxes clicked .
train
false
53,562
def negate_real(builder, val): return builder.fsub(lc.Constant.real(val.type, (-0.0)), val)
[ "def", "negate_real", "(", "builder", ",", "val", ")", ":", "return", "builder", ".", "fsub", "(", "lc", ".", "Constant", ".", "real", "(", "val", ".", "type", ",", "(", "-", "0.0", ")", ")", ",", "val", ")" ]
negate real number *val* .
train
false
53,563
def download_zlib(dest_dir, version): version_re = re.compile('zlib-([0-9.]+[0-9]).tar.gz') filename = 'zlib-%s.tar.gz' return download_library(dest_dir, ZLIB_LOCATION, 'zlib', version_re, filename, version=version)
[ "def", "download_zlib", "(", "dest_dir", ",", "version", ")", ":", "version_re", "=", "re", ".", "compile", "(", "'zlib-([0-9.]+[0-9]).tar.gz'", ")", "filename", "=", "'zlib-%s.tar.gz'", "return", "download_library", "(", "dest_dir", ",", "ZLIB_LOCATION", ",", "'zlib'", ",", "version_re", ",", "filename", ",", "version", "=", "version", ")" ]
downloads zlib .
train
false
53,564
def build_auxiliary_node_connectivity(G): directed = G.is_directed() mapping = {} H = nx.DiGraph() for (i, node) in enumerate(G): mapping[node] = i H.add_node(('%dA' % i), id=node) H.add_node(('%dB' % i), id=node) H.add_edge(('%dA' % i), ('%dB' % i), capacity=1) edges = [] for (source, target) in G.edges(): edges.append((('%sB' % mapping[source]), ('%sA' % mapping[target]))) if (not directed): edges.append((('%sB' % mapping[target]), ('%sA' % mapping[source]))) H.add_edges_from(edges, capacity=1) H.graph['mapping'] = mapping return H
[ "def", "build_auxiliary_node_connectivity", "(", "G", ")", ":", "directed", "=", "G", ".", "is_directed", "(", ")", "mapping", "=", "{", "}", "H", "=", "nx", ".", "DiGraph", "(", ")", "for", "(", "i", ",", "node", ")", "in", "enumerate", "(", "G", ")", ":", "mapping", "[", "node", "]", "=", "i", "H", ".", "add_node", "(", "(", "'%dA'", "%", "i", ")", ",", "id", "=", "node", ")", "H", ".", "add_node", "(", "(", "'%dB'", "%", "i", ")", ",", "id", "=", "node", ")", "H", ".", "add_edge", "(", "(", "'%dA'", "%", "i", ")", ",", "(", "'%dB'", "%", "i", ")", ",", "capacity", "=", "1", ")", "edges", "=", "[", "]", "for", "(", "source", ",", "target", ")", "in", "G", ".", "edges", "(", ")", ":", "edges", ".", "append", "(", "(", "(", "'%sB'", "%", "mapping", "[", "source", "]", ")", ",", "(", "'%sA'", "%", "mapping", "[", "target", "]", ")", ")", ")", "if", "(", "not", "directed", ")", ":", "edges", ".", "append", "(", "(", "(", "'%sB'", "%", "mapping", "[", "target", "]", ")", ",", "(", "'%sA'", "%", "mapping", "[", "source", "]", ")", ")", ")", "H", ".", "add_edges_from", "(", "edges", ",", "capacity", "=", "1", ")", "H", ".", "graph", "[", "'mapping'", "]", "=", "mapping", "return", "H" ]
creates a directed graph d from an undirected graph g to compute flow based node connectivity .
train
false
53,565
def get_mem_info_linux(): info = {} with open('/proc/meminfo', 'r') as f: for line in f: p = line.split() info[p[0].strip(':').lower()] = (float(p[1]) * 1000.0) return info
[ "def", "get_mem_info_linux", "(", ")", ":", "info", "=", "{", "}", "with", "open", "(", "'/proc/meminfo'", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "p", "=", "line", ".", "split", "(", ")", "info", "[", "p", "[", "0", "]", ".", "strip", "(", "':'", ")", ".", "lower", "(", ")", "]", "=", "(", "float", "(", "p", "[", "1", "]", ")", "*", "1000.0", ")", "return", "info" ]
get information about available memory .
train
false
53,566
def _mask_to_limits(a, limits, inclusive): (lower_limit, upper_limit) = limits (lower_include, upper_include) = inclusive am = ma.MaskedArray(a) if (lower_limit is not None): if lower_include: am = ma.masked_less(am, lower_limit) else: am = ma.masked_less_equal(am, lower_limit) if (upper_limit is not None): if upper_include: am = ma.masked_greater(am, upper_limit) else: am = ma.masked_greater_equal(am, upper_limit) if (am.count() == 0): raise ValueError('No array values within given limits') return am
[ "def", "_mask_to_limits", "(", "a", ",", "limits", ",", "inclusive", ")", ":", "(", "lower_limit", ",", "upper_limit", ")", "=", "limits", "(", "lower_include", ",", "upper_include", ")", "=", "inclusive", "am", "=", "ma", ".", "MaskedArray", "(", "a", ")", "if", "(", "lower_limit", "is", "not", "None", ")", ":", "if", "lower_include", ":", "am", "=", "ma", ".", "masked_less", "(", "am", ",", "lower_limit", ")", "else", ":", "am", "=", "ma", ".", "masked_less_equal", "(", "am", ",", "lower_limit", ")", "if", "(", "upper_limit", "is", "not", "None", ")", ":", "if", "upper_include", ":", "am", "=", "ma", ".", "masked_greater", "(", "am", ",", "upper_limit", ")", "else", ":", "am", "=", "ma", ".", "masked_greater_equal", "(", "am", ",", "upper_limit", ")", "if", "(", "am", ".", "count", "(", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'No array values within given limits'", ")", "return", "am" ]
mask an array for values outside of given limits .
train
false
53,567
def _split_digest_auth(data): values = [] curdata = [] state = 0 for char in data: if (state == 0): if (char == ','): values.append(''.join(curdata).strip()) curdata = [] else: if (char == '"'): state = 1 curdata.append(char) elif (state == 1): if (char == '"'): state = 0 curdata.append(char) values.append(''.join(curdata).strip()) if ((state == 1) and config.DEBUG): sys.stderr.write(('IVRE: WARNING: could not parse Digest auth data [%r]' % data)) return values
[ "def", "_split_digest_auth", "(", "data", ")", ":", "values", "=", "[", "]", "curdata", "=", "[", "]", "state", "=", "0", "for", "char", "in", "data", ":", "if", "(", "state", "==", "0", ")", ":", "if", "(", "char", "==", "','", ")", ":", "values", ".", "append", "(", "''", ".", "join", "(", "curdata", ")", ".", "strip", "(", ")", ")", "curdata", "=", "[", "]", "else", ":", "if", "(", "char", "==", "'\"'", ")", ":", "state", "=", "1", "curdata", ".", "append", "(", "char", ")", "elif", "(", "state", "==", "1", ")", ":", "if", "(", "char", "==", "'\"'", ")", ":", "state", "=", "0", "curdata", ".", "append", "(", "char", ")", "values", ".", "append", "(", "''", ".", "join", "(", "curdata", ")", ".", "strip", "(", ")", ")", "if", "(", "(", "state", "==", "1", ")", "and", "config", ".", "DEBUG", ")", ":", "sys", ".", "stderr", ".", "write", "(", "(", "'IVRE: WARNING: could not parse Digest auth data [%r]'", "%", "data", ")", ")", "return", "values" ]
this function handles authorization: digest values .
train
false
53,568
def check_named(option, opt, value): if isinstance(value, dict): return value values = [] for value in check_csv(option, opt, value): if (value.find('=') != (-1)): values.append(value.split('=', 1)) elif (value.find(':') != (-1)): values.append(value.split(':', 1)) if values: return dict(values) msg = 'option %s: invalid named value %r, should be <NAME>=<VALUE> or <NAME>:<VALUE>' raise OptionValueError((msg % (opt, value)))
[ "def", "check_named", "(", "option", ",", "opt", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "return", "value", "values", "=", "[", "]", "for", "value", "in", "check_csv", "(", "option", ",", "opt", ",", "value", ")", ":", "if", "(", "value", ".", "find", "(", "'='", ")", "!=", "(", "-", "1", ")", ")", ":", "values", ".", "append", "(", "value", ".", "split", "(", "'='", ",", "1", ")", ")", "elif", "(", "value", ".", "find", "(", "':'", ")", "!=", "(", "-", "1", ")", ")", ":", "values", ".", "append", "(", "value", ".", "split", "(", "':'", ",", "1", ")", ")", "if", "values", ":", "return", "dict", "(", "values", ")", "msg", "=", "'option %s: invalid named value %r, should be <NAME>=<VALUE> or <NAME>:<VALUE>'", "raise", "OptionValueError", "(", "(", "msg", "%", "(", "opt", ",", "value", ")", ")", ")" ]
check a named value return a dictionary containing associations .
train
false
53,570
def strip_version(idstr): parts = idstr.split('v') return parts[0]
[ "def", "strip_version", "(", "idstr", ")", ":", "parts", "=", "idstr", ".", "split", "(", "'v'", ")", "return", "parts", "[", "0", "]" ]
identity function if arxiv id has no version .
train
false
53,571
def open_tasks_for_project(): def prep(r): tablename = 'project_project' s3.crud_strings[tablename].title_list = T('Open Tasks for Project') s3.crud_labels.READ = s3.crud_labels.UPDATE = T('Select') s3db.configure(tablename, deletable=False, listadd=False) return True s3.prep = prep def postp(r, output): if (r.interactive and (not r.component)): tasklist_url = URL(f='task', vars={'project': '[id]'}) s3_action_buttons(r, deletable=False, read_url=tasklist_url, update_url=tasklist_url) return output s3.postp = postp return s3_rest_controller(module, 'project', hide_filter=False)
[ "def", "open_tasks_for_project", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "tablename", "=", "'project_project'", "s3", ".", "crud_strings", "[", "tablename", "]", ".", "title_list", "=", "T", "(", "'Open Tasks for Project'", ")", "s3", ".", "crud_labels", ".", "READ", "=", "s3", ".", "crud_labels", ".", "UPDATE", "=", "T", "(", "'Select'", ")", "s3db", ".", "configure", "(", "tablename", ",", "deletable", "=", "False", ",", "listadd", "=", "False", ")", "return", "True", "s3", ".", "prep", "=", "prep", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "(", "r", ".", "interactive", "and", "(", "not", "r", ".", "component", ")", ")", ":", "tasklist_url", "=", "URL", "(", "f", "=", "'task'", ",", "vars", "=", "{", "'project'", ":", "'[id]'", "}", ")", "s3_action_buttons", "(", "r", ",", "deletable", "=", "False", ",", "read_url", "=", "tasklist_url", ",", "update_url", "=", "tasklist_url", ")", "return", "output", "s3", ".", "postp", "=", "postp", "return", "s3_rest_controller", "(", "module", ",", "'project'", ",", "hide_filter", "=", "False", ")" ]
simplified controller to select a project and open the list of open tasks for it .
train
false
53,572
def date_range(start_date, end_date=None, num=None, delta=None): if (not delta): return [] if (end_date and (start_date > end_date)): raise Exception(u'Wait. start_date needs to be before end_date') if (end_date and num): raise Exception(u'Wait. Either specify end_date OR num') if ((not end_date) and (not num)): end_date = datetime.now() delta_iscron = False if isinstance(delta, six.string_types): delta_iscron = True cron = croniter(delta, start_date) elif isinstance(delta, timedelta): delta = abs(delta) l = [] if end_date: while (start_date <= end_date): l.append(start_date) if delta_iscron: start_date = cron.get_next(datetime) else: start_date += delta else: for i in range(abs(num)): l.append(start_date) if delta_iscron: if (num > 0): start_date = cron.get_next(datetime) else: start_date = cron.get_prev(datetime) elif (num > 0): start_date += delta else: start_date -= delta return sorted(l)
[ "def", "date_range", "(", "start_date", ",", "end_date", "=", "None", ",", "num", "=", "None", ",", "delta", "=", "None", ")", ":", "if", "(", "not", "delta", ")", ":", "return", "[", "]", "if", "(", "end_date", "and", "(", "start_date", ">", "end_date", ")", ")", ":", "raise", "Exception", "(", "u'Wait. start_date needs to be before end_date'", ")", "if", "(", "end_date", "and", "num", ")", ":", "raise", "Exception", "(", "u'Wait. Either specify end_date OR num'", ")", "if", "(", "(", "not", "end_date", ")", "and", "(", "not", "num", ")", ")", ":", "end_date", "=", "datetime", ".", "now", "(", ")", "delta_iscron", "=", "False", "if", "isinstance", "(", "delta", ",", "six", ".", "string_types", ")", ":", "delta_iscron", "=", "True", "cron", "=", "croniter", "(", "delta", ",", "start_date", ")", "elif", "isinstance", "(", "delta", ",", "timedelta", ")", ":", "delta", "=", "abs", "(", "delta", ")", "l", "=", "[", "]", "if", "end_date", ":", "while", "(", "start_date", "<=", "end_date", ")", ":", "l", ".", "append", "(", "start_date", ")", "if", "delta_iscron", ":", "start_date", "=", "cron", ".", "get_next", "(", "datetime", ")", "else", ":", "start_date", "+=", "delta", "else", ":", "for", "i", "in", "range", "(", "abs", "(", "num", ")", ")", ":", "l", ".", "append", "(", "start_date", ")", "if", "delta_iscron", ":", "if", "(", "num", ">", "0", ")", ":", "start_date", "=", "cron", ".", "get_next", "(", "datetime", ")", "else", ":", "start_date", "=", "cron", ".", "get_prev", "(", "datetime", ")", "elif", "(", "num", ">", "0", ")", ":", "start_date", "+=", "delta", "else", ":", "start_date", "-=", "delta", "return", "sorted", "(", "l", ")" ]
return a fixed frequency datetime index .
train
false
53,573
def p_external_declaration_2(t): pass
[ "def", "p_external_declaration_2", "(", "t", ")", ":", "pass" ]
external_declaration : declaration .
train
false
53,574
def update_cached_instance(sender, instance, **kwargs): if (not hasattr(instance, 'cache_instance')): return sender.cache_instance(instance)
[ "def", "update_cached_instance", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "if", "(", "not", "hasattr", "(", "instance", ",", "'cache_instance'", ")", ")", ":", "return", "sender", ".", "cache_instance", "(", "instance", ")" ]
re-cache the given instance in the idmapper cache .
train
false
53,575
def blank_lines(logical_line, blank_lines, indent_level, line_number, previous_logical, previous_indent_level): if ((line_number < 3) and (not previous_logical)): return if previous_logical.startswith('@'): if blank_lines: (yield (0, 'E304 blank lines found after function decorator')) elif ((blank_lines > 2) or (indent_level and (blank_lines == 2))): (yield (0, ('E303 too many blank lines (%d)' % blank_lines))) elif logical_line.startswith(('def ', 'class ', '@')): if indent_level: if (not (blank_lines or (previous_indent_level < indent_level) or DOCSTRING_REGEX.match(previous_logical))): (yield (0, 'E301 expected 1 blank line, found 0')) elif (blank_lines != 2): (yield (0, ('E302 expected 2 blank lines, found %d' % blank_lines)))
[ "def", "blank_lines", "(", "logical_line", ",", "blank_lines", ",", "indent_level", ",", "line_number", ",", "previous_logical", ",", "previous_indent_level", ")", ":", "if", "(", "(", "line_number", "<", "3", ")", "and", "(", "not", "previous_logical", ")", ")", ":", "return", "if", "previous_logical", ".", "startswith", "(", "'@'", ")", ":", "if", "blank_lines", ":", "(", "yield", "(", "0", ",", "'E304 blank lines found after function decorator'", ")", ")", "elif", "(", "(", "blank_lines", ">", "2", ")", "or", "(", "indent_level", "and", "(", "blank_lines", "==", "2", ")", ")", ")", ":", "(", "yield", "(", "0", ",", "(", "'E303 too many blank lines (%d)'", "%", "blank_lines", ")", ")", ")", "elif", "logical_line", ".", "startswith", "(", "(", "'def '", ",", "'class '", ",", "'@'", ")", ")", ":", "if", "indent_level", ":", "if", "(", "not", "(", "blank_lines", "or", "(", "previous_indent_level", "<", "indent_level", ")", "or", "DOCSTRING_REGEX", ".", "match", "(", "previous_logical", ")", ")", ")", ":", "(", "yield", "(", "0", ",", "'E301 expected 1 blank line, found 0'", ")", ")", "elif", "(", "blank_lines", "!=", "2", ")", ":", "(", "yield", "(", "0", ",", "(", "'E302 expected 2 blank lines, found %d'", "%", "blank_lines", ")", ")", ")" ]
separate top-level function and class definitions with two blank lines .
train
false
53,578
def sdm_LC(f, K): if (not f): return K.zero else: return f[0][1]
[ "def", "sdm_LC", "(", "f", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "return", "K", ".", "zero", "else", ":", "return", "f", "[", "0", "]", "[", "1", "]" ]
returns the leading coeffcient of f .
train
false
53,580
@retry(exception=(EnvironmentError, AssertionError), logfun=None, timeout=GLOBAL_TIMEOUT, interval=0.001) def wait_for_file(fname, delete_file=True, empty=False): with open(fname, 'rb') as f: data = f.read() if (not empty): assert data if delete_file: os.remove(fname) return data
[ "@", "retry", "(", "exception", "=", "(", "EnvironmentError", ",", "AssertionError", ")", ",", "logfun", "=", "None", ",", "timeout", "=", "GLOBAL_TIMEOUT", ",", "interval", "=", "0.001", ")", "def", "wait_for_file", "(", "fname", ",", "delete_file", "=", "True", ",", "empty", "=", "False", ")", ":", "with", "open", "(", "fname", ",", "'rb'", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", ")", "if", "(", "not", "empty", ")", ":", "assert", "data", "if", "delete_file", ":", "os", ".", "remove", "(", "fname", ")", "return", "data" ]
wait for a file to be written on disk with some content .
train
false
53,581
def py_encode_basestring_ascii(s, _PY3=PY3): if _PY3: if isinstance(s, binary_type): s = s.decode('utf-8') if (type(s) is not text_type): s = text_type(s) else: if (isinstance(s, str) and (HAS_UTF8.search(s) is not None)): s = s.decode('utf-8') if (type(s) not in string_types): s = text_type(s) def replace(match): s = match.group(0) try: return ESCAPE_DCT[s] except KeyError: n = ord(s) if (n < 65536): return ('\\u%04x' % (n,)) else: n -= 65536 s1 = (55296 | ((n >> 10) & 1023)) s2 = (56320 | (n & 1023)) return ('\\u%04x\\u%04x' % (s1, s2)) return (('"' + str(ESCAPE_ASCII.sub(replace, s))) + '"')
[ "def", "py_encode_basestring_ascii", "(", "s", ",", "_PY3", "=", "PY3", ")", ":", "if", "_PY3", ":", "if", "isinstance", "(", "s", ",", "binary_type", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "if", "(", "type", "(", "s", ")", "is", "not", "text_type", ")", ":", "s", "=", "text_type", "(", "s", ")", "else", ":", "if", "(", "isinstance", "(", "s", ",", "str", ")", "and", "(", "HAS_UTF8", ".", "search", "(", "s", ")", "is", "not", "None", ")", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "if", "(", "type", "(", "s", ")", "not", "in", "string_types", ")", ":", "s", "=", "text_type", "(", "s", ")", "def", "replace", "(", "match", ")", ":", "s", "=", "match", ".", "group", "(", "0", ")", "try", ":", "return", "ESCAPE_DCT", "[", "s", "]", "except", "KeyError", ":", "n", "=", "ord", "(", "s", ")", "if", "(", "n", "<", "65536", ")", ":", "return", "(", "'\\\\u%04x'", "%", "(", "n", ",", ")", ")", "else", ":", "n", "-=", "65536", "s1", "=", "(", "55296", "|", "(", "(", "n", ">>", "10", ")", "&", "1023", ")", ")", "s2", "=", "(", "56320", "|", "(", "n", "&", "1023", ")", ")", "return", "(", "'\\\\u%04x\\\\u%04x'", "%", "(", "s1", ",", "s2", ")", ")", "return", "(", "(", "'\"'", "+", "str", "(", "ESCAPE_ASCII", ".", "sub", "(", "replace", ",", "s", ")", ")", ")", "+", "'\"'", ")" ]
return an ascii-only json representation of a python string .
train
true
53,583
def dump_and_add_to_dump(object_, file_, parameters=None, to_add=None, use_cpickle=False, protocol=DEFAULT_PROTOCOL, **kwargs): dump(object_, file_, parameters=parameters, use_cpickle=use_cpickle, protocol=protocol, **kwargs) if (to_add is not None): for (name, obj) in six.iteritems(to_add): add_to_dump(obj, file_, name, parameters=parameters, use_cpickle=use_cpickle, protocol=protocol, **kwargs)
[ "def", "dump_and_add_to_dump", "(", "object_", ",", "file_", ",", "parameters", "=", "None", ",", "to_add", "=", "None", ",", "use_cpickle", "=", "False", ",", "protocol", "=", "DEFAULT_PROTOCOL", ",", "**", "kwargs", ")", ":", "dump", "(", "object_", ",", "file_", ",", "parameters", "=", "parameters", ",", "use_cpickle", "=", "use_cpickle", ",", "protocol", "=", "protocol", ",", "**", "kwargs", ")", "if", "(", "to_add", "is", "not", "None", ")", ":", "for", "(", "name", ",", "obj", ")", "in", "six", ".", "iteritems", "(", "to_add", ")", ":", "add_to_dump", "(", "obj", ",", "file_", ",", "name", ",", "parameters", "=", "parameters", ",", "use_cpickle", "=", "use_cpickle", ",", "protocol", "=", "protocol", ",", "**", "kwargs", ")" ]
calls both dump and add_to_dump to serialze several objects .
train
false
53,586
@handle_response_format @treeio_login_required def service_add(request, response_format='html'): if (not request.user.profile.is_admin('treeio.services')): return user_denied(request, message="You don't have administrator access to the Service Support module") if request.POST: if ('cancel' not in request.POST): service = Service() form = ServiceForm(request.user.profile, request.POST, instance=service) if form.is_valid(): service = form.save() service.set_user_from_request(request) return HttpResponseRedirect(reverse('services_service_view', args=[service.id])) else: return HttpResponseRedirect(reverse('services')) else: form = ServiceForm(request.user.profile) context = _get_default_context(request) context.update({'form': form}) return render_to_response('services/service_add', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "service_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio.services'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have administrator access to the Service Support module\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "service", "=", "Service", "(", ")", "form", "=", "ServiceForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "service", ")", "if", "form", ".", "is_valid", "(", ")", ":", "service", "=", "form", ".", "save", "(", ")", "service", ".", "set_user_from_request", "(", "request", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_service_view'", ",", "args", "=", "[", "service", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services'", ")", ")", "else", ":", "form", "=", "ServiceForm", "(", "request", ".", "user", ".", "profile", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'form'", ":", "form", "}", ")", "return", "render_to_response", "(", "'services/service_add'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
service add .
train
false
53,587
def openshift_deploy_canceller(registry, xml_parent, data): osb = XML.SubElement(xml_parent, 'com.openshift.jenkins.plugins.pipeline.OpenShiftDeployCanceller') mapping = [('api-url', 'apiURL', 'https://openshift.default.svc.cluster.local'), ('dep-cfg', 'depCfg', 'frontend'), ('namespace', 'namespace', 'test'), ('auth-token', 'authToken', ''), ('verbose', 'verbose', False)] helpers.convert_mapping_to_xml(osb, data, mapping, fail_required=True)
[ "def", "openshift_deploy_canceller", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "osb", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.openshift.jenkins.plugins.pipeline.OpenShiftDeployCanceller'", ")", "mapping", "=", "[", "(", "'api-url'", ",", "'apiURL'", ",", "'https://openshift.default.svc.cluster.local'", ")", ",", "(", "'dep-cfg'", ",", "'depCfg'", ",", "'frontend'", ")", ",", "(", "'namespace'", ",", "'namespace'", ",", "'test'", ")", ",", "(", "'auth-token'", ",", "'authToken'", ",", "''", ")", ",", "(", "'verbose'", ",", "'verbose'", ",", "False", ")", "]", "helpers", ".", "convert_mapping_to_xml", "(", "osb", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")" ]
yaml: openshift-deploy-canceller this action is intended to provide cleanup for any openshift deployments left running when the job completes; this step will allow you to perform the equivalent of a oc deploy --cancel for the provided deployment config .
train
false
53,588
@register.filter @stringfilter def cut(value, arg): safe = isinstance(value, SafeData) value = value.replace(arg, '') if (safe and (arg != ';')): return mark_safe(value) return value
[ "@", "register", ".", "filter", "@", "stringfilter", "def", "cut", "(", "value", ",", "arg", ")", ":", "safe", "=", "isinstance", "(", "value", ",", "SafeData", ")", "value", "=", "value", ".", "replace", "(", "arg", ",", "''", ")", "if", "(", "safe", "and", "(", "arg", "!=", "';'", ")", ")", ":", "return", "mark_safe", "(", "value", ")", "return", "value" ]
removes all values of arg from the given string .
train
false
53,590
def encoded_hash(sha): return urlsafe_b64encode(sha.digest()).decode('ascii').rstrip('=')
[ "def", "encoded_hash", "(", "sha", ")", ":", "return", "urlsafe_b64encode", "(", "sha", ".", "digest", "(", ")", ")", ".", "decode", "(", "'ascii'", ")", ".", "rstrip", "(", "'='", ")" ]
return a short .
train
false
53,591
def getRadiusAverage(radiusComplex): return math.sqrt((radiusComplex.real * radiusComplex.imag))
[ "def", "getRadiusAverage", "(", "radiusComplex", ")", ":", "return", "math", ".", "sqrt", "(", "(", "radiusComplex", ".", "real", "*", "radiusComplex", ".", "imag", ")", ")" ]
get average radius from radiuscomplex .
train
false
53,592
def simplefilter(action, category=Warning, lineno=0, append=0): assert (action in ('error', 'ignore', 'always', 'default', 'module', 'once')), ('invalid action: %r' % (action,)) assert (isinstance(lineno, int) and (lineno >= 0)), 'lineno must be an int >= 0' item = (action, None, category, None, lineno) if append: filters.append(item) else: filters.insert(0, item)
[ "def", "simplefilter", "(", "action", ",", "category", "=", "Warning", ",", "lineno", "=", "0", ",", "append", "=", "0", ")", ":", "assert", "(", "action", "in", "(", "'error'", ",", "'ignore'", ",", "'always'", ",", "'default'", ",", "'module'", ",", "'once'", ")", ")", ",", "(", "'invalid action: %r'", "%", "(", "action", ",", ")", ")", "assert", "(", "isinstance", "(", "lineno", ",", "int", ")", "and", "(", "lineno", ">=", "0", ")", ")", ",", "'lineno must be an int >= 0'", "item", "=", "(", "action", ",", "None", ",", "category", ",", "None", ",", "lineno", ")", "if", "append", ":", "filters", ".", "append", "(", "item", ")", "else", ":", "filters", ".", "insert", "(", "0", ",", "item", ")" ]
decorator that converts a function into a filter:: @simplefilter def lowercase: for ttype .
train
true
53,593
def tests_get_by_job_idx(job_idx): return Test.objects.filter(job=job_idx)
[ "def", "tests_get_by_job_idx", "(", "job_idx", ")", ":", "return", "Test", ".", "objects", ".", "filter", "(", "job", "=", "job_idx", ")" ]
returns all tests based on its job idx .
train
false
53,595
def xsym(sym): op = _xsym[sym] if _use_unicode: return op[1] else: return op[0]
[ "def", "xsym", "(", "sym", ")", ":", "op", "=", "_xsym", "[", "sym", "]", "if", "_use_unicode", ":", "return", "op", "[", "1", "]", "else", ":", "return", "op", "[", "0", "]" ]
get symbology for a character .
train
false
53,596
def require(source_module, target_module, all_macros=False, assignments={}, prefix=''): seen_names = set() if prefix: prefix += '.' for d in (_hy_macros, _hy_reader): for (name, macro) in d[source_module].items(): seen_names.add(name) if all_macros: d[target_module][(prefix + name)] = macro elif (name in assignments): d[target_module][(prefix + assignments[name])] = macro if (not all_macros): unseen = frozenset(assignments.keys()).difference(seen_names) if unseen: raise ImportError(('cannot require names: ' + repr(list(unseen))))
[ "def", "require", "(", "source_module", ",", "target_module", ",", "all_macros", "=", "False", ",", "assignments", "=", "{", "}", ",", "prefix", "=", "''", ")", ":", "seen_names", "=", "set", "(", ")", "if", "prefix", ":", "prefix", "+=", "'.'", "for", "d", "in", "(", "_hy_macros", ",", "_hy_reader", ")", ":", "for", "(", "name", ",", "macro", ")", "in", "d", "[", "source_module", "]", ".", "items", "(", ")", ":", "seen_names", ".", "add", "(", "name", ")", "if", "all_macros", ":", "d", "[", "target_module", "]", "[", "(", "prefix", "+", "name", ")", "]", "=", "macro", "elif", "(", "name", "in", "assignments", ")", ":", "d", "[", "target_module", "]", "[", "(", "prefix", "+", "assignments", "[", "name", "]", ")", "]", "=", "macro", "if", "(", "not", "all_macros", ")", ":", "unseen", "=", "frozenset", "(", "assignments", ".", "keys", "(", ")", ")", ".", "difference", "(", "seen_names", ")", "if", "unseen", ":", "raise", "ImportError", "(", "(", "'cannot require names: '", "+", "repr", "(", "list", "(", "unseen", ")", ")", ")", ")" ]
clear out divisions where required components are not present in left .
train
false
53,597
def _SecToUsec(t): return int((t * 1000000.0))
[ "def", "_SecToUsec", "(", "t", ")", ":", "return", "int", "(", "(", "t", "*", "1000000.0", ")", ")" ]
converts a time in seconds since the epoch to usec since the epoch .
train
false
53,598
def MapItemsIterator(function, items): return ItemsIterator(items=itertools.imap(function, items), total_count=items.total_count)
[ "def", "MapItemsIterator", "(", "function", ",", "items", ")", ":", "return", "ItemsIterator", "(", "items", "=", "itertools", ".", "imap", "(", "function", ",", "items", ")", ",", "total_count", "=", "items", ".", "total_count", ")" ]
maps itemsiterator via given function .
train
false
53,599
def cpu_freq(): (curr, max_) = cext.cpu_freq() min_ = 0.0 return [_common.scpufreq(float(curr), min_, float(max_))]
[ "def", "cpu_freq", "(", ")", ":", "(", "curr", ",", "max_", ")", "=", "cext", ".", "cpu_freq", "(", ")", "min_", "=", "0.0", "return", "[", "_common", ".", "scpufreq", "(", "float", "(", "curr", ")", ",", "min_", ",", "float", "(", "max_", ")", ")", "]" ]
return cpu frequency .
train
false
53,601
def save_sent_email(crispin_client, account_id, message_id): remote_save_sent(crispin_client, account_id, message_id)
[ "def", "save_sent_email", "(", "crispin_client", ",", "account_id", ",", "message_id", ")", ":", "remote_save_sent", "(", "crispin_client", ",", "account_id", ",", "message_id", ")" ]
create an email on the remote backend .
train
false
53,602
def safe_minidom_parse_string(xml_string): try: return minidom.parseString(xml_string, parser=ProtectedExpatParser()) except sax.SAXParseException as se: raise expat.ExpatError()
[ "def", "safe_minidom_parse_string", "(", "xml_string", ")", ":", "try", ":", "return", "minidom", ".", "parseString", "(", "xml_string", ",", "parser", "=", "ProtectedExpatParser", "(", ")", ")", "except", "sax", ".", "SAXParseException", "as", "se", ":", "raise", "expat", ".", "ExpatError", "(", ")" ]
parse an xml string using minidom safely .
train
false
53,603
def dent(individual, lambda_=0.85): d = (lambda_ * exp((- ((individual[0] - individual[1]) ** 2)))) f1 = ((0.5 * (((sqrt((1 + ((individual[0] + individual[1]) ** 2))) + sqrt((1 + ((individual[0] - individual[1]) ** 2)))) + individual[0]) - individual[1])) + d) f2 = ((0.5 * (((sqrt((1 + ((individual[0] + individual[1]) ** 2))) + sqrt((1 + ((individual[0] - individual[1]) ** 2)))) - individual[0]) + individual[1])) + d) return (f1, f2)
[ "def", "dent", "(", "individual", ",", "lambda_", "=", "0.85", ")", ":", "d", "=", "(", "lambda_", "*", "exp", "(", "(", "-", "(", "(", "individual", "[", "0", "]", "-", "individual", "[", "1", "]", ")", "**", "2", ")", ")", ")", ")", "f1", "=", "(", "(", "0.5", "*", "(", "(", "(", "sqrt", "(", "(", "1", "+", "(", "(", "individual", "[", "0", "]", "+", "individual", "[", "1", "]", ")", "**", "2", ")", ")", ")", "+", "sqrt", "(", "(", "1", "+", "(", "(", "individual", "[", "0", "]", "-", "individual", "[", "1", "]", ")", "**", "2", ")", ")", ")", ")", "+", "individual", "[", "0", "]", ")", "-", "individual", "[", "1", "]", ")", ")", "+", "d", ")", "f2", "=", "(", "(", "0.5", "*", "(", "(", "(", "sqrt", "(", "(", "1", "+", "(", "(", "individual", "[", "0", "]", "+", "individual", "[", "1", "]", ")", "**", "2", ")", ")", ")", "+", "sqrt", "(", "(", "1", "+", "(", "(", "individual", "[", "0", "]", "-", "individual", "[", "1", "]", ")", "**", "2", ")", ")", ")", ")", "-", "individual", "[", "0", "]", ")", "+", "individual", "[", "1", "]", ")", ")", "+", "d", ")", "return", "(", "f1", ",", "f2", ")" ]
test problem dent .
train
false
53,604
def hours(h): return (h / 24.0)
[ "def", "hours", "(", "h", ")", ":", "return", "(", "h", "/", "24.0", ")" ]
return hours as days .
train
false
53,605
def createFactoryCopy(state): stateId = state.get('id', None) if (stateId is None): raise RuntimeError(("factory copy state has no 'id' member %s" % (repr(state),))) if (not (stateId in SimpleFactoryCopy.allIDs)): raise RuntimeError(('factory class has no ID: %s' % (SimpleFactoryCopy.allIDs,))) inst = SimpleFactoryCopy.allIDs[stateId] if (not inst): raise RuntimeError('factory method found no object with id') return inst
[ "def", "createFactoryCopy", "(", "state", ")", ":", "stateId", "=", "state", ".", "get", "(", "'id'", ",", "None", ")", "if", "(", "stateId", "is", "None", ")", ":", "raise", "RuntimeError", "(", "(", "\"factory copy state has no 'id' member %s\"", "%", "(", "repr", "(", "state", ")", ",", ")", ")", ")", "if", "(", "not", "(", "stateId", "in", "SimpleFactoryCopy", ".", "allIDs", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'factory class has no ID: %s'", "%", "(", "SimpleFactoryCopy", ".", "allIDs", ",", ")", ")", ")", "inst", "=", "SimpleFactoryCopy", ".", "allIDs", "[", "stateId", "]", "if", "(", "not", "inst", ")", ":", "raise", "RuntimeError", "(", "'factory method found no object with id'", ")", "return", "inst" ]
factory of l{simplefactorycopy} .
train
false
53,606
def _warn_node(self, msg, node, *args, **kwargs): if (not msg.startswith('nonlocal image URI found:')): self._warnfunc(msg, ('%s:%s' % get_source_line(node)), *args, **kwargs)
[ "def", "_warn_node", "(", "self", ",", "msg", ",", "node", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "msg", ".", "startswith", "(", "'nonlocal image URI found:'", ")", ")", ":", "self", ".", "_warnfunc", "(", "msg", ",", "(", "'%s:%s'", "%", "get_source_line", "(", "node", ")", ")", ",", "*", "args", ",", "**", "kwargs", ")" ]
mute warnings that are like warning: nonlocal image uri found: URL .
train
true
53,607
def setup_masquerade(request, course_key, staff_access=False, reset_masquerade_data=False): if ((request.user is None) or (not settings.FEATURES.get('ENABLE_MASQUERADE', False)) or (not staff_access)): return (None, request.user) if reset_masquerade_data: request.session.pop(MASQUERADE_DATA_KEY, None) masquerade_settings = request.session.setdefault(MASQUERADE_SETTINGS_KEY, {}) request.user.masquerade_settings = masquerade_settings course_masquerade = masquerade_settings.get(course_key, None) masquerade_user = None if (course_masquerade and course_masquerade.user_name): try: masquerade_user = CourseEnrollment.objects.users_enrolled_in(course_key).get(username=course_masquerade.user_name) except User.DoesNotExist: course_masquerade = None del masquerade_settings[course_key] request.session.modified = True else: masquerade_user.masquerade_settings = request.user.masquerade_settings masquerade_user.real_user = request.user return (course_masquerade, (masquerade_user or request.user))
[ "def", "setup_masquerade", "(", "request", ",", "course_key", ",", "staff_access", "=", "False", ",", "reset_masquerade_data", "=", "False", ")", ":", "if", "(", "(", "request", ".", "user", "is", "None", ")", "or", "(", "not", "settings", ".", "FEATURES", ".", "get", "(", "'ENABLE_MASQUERADE'", ",", "False", ")", ")", "or", "(", "not", "staff_access", ")", ")", ":", "return", "(", "None", ",", "request", ".", "user", ")", "if", "reset_masquerade_data", ":", "request", ".", "session", ".", "pop", "(", "MASQUERADE_DATA_KEY", ",", "None", ")", "masquerade_settings", "=", "request", ".", "session", ".", "setdefault", "(", "MASQUERADE_SETTINGS_KEY", ",", "{", "}", ")", "request", ".", "user", ".", "masquerade_settings", "=", "masquerade_settings", "course_masquerade", "=", "masquerade_settings", ".", "get", "(", "course_key", ",", "None", ")", "masquerade_user", "=", "None", "if", "(", "course_masquerade", "and", "course_masquerade", ".", "user_name", ")", ":", "try", ":", "masquerade_user", "=", "CourseEnrollment", ".", "objects", ".", "users_enrolled_in", "(", "course_key", ")", ".", "get", "(", "username", "=", "course_masquerade", ".", "user_name", ")", "except", "User", ".", "DoesNotExist", ":", "course_masquerade", "=", "None", "del", "masquerade_settings", "[", "course_key", "]", "request", ".", "session", ".", "modified", "=", "True", "else", ":", "masquerade_user", ".", "masquerade_settings", "=", "request", ".", "user", ".", "masquerade_settings", "masquerade_user", ".", "real_user", "=", "request", ".", "user", "return", "(", "course_masquerade", ",", "(", "masquerade_user", "or", "request", ".", "user", ")", ")" ]
sets up masquerading for the current user within the current request .
train
false
53,608
def init_widgets(): post_save.connect(_increment_sync_num, sender=Group) post_save.connect(_increment_sync_num, sender=Repository) post_delete.connect(_increment_sync_num, sender=Group) post_delete.connect(_increment_sync_num, sender=Repository)
[ "def", "init_widgets", "(", ")", ":", "post_save", ".", "connect", "(", "_increment_sync_num", ",", "sender", "=", "Group", ")", "post_save", ".", "connect", "(", "_increment_sync_num", ",", "sender", "=", "Repository", ")", "post_delete", ".", "connect", "(", "_increment_sync_num", ",", "sender", "=", "Group", ")", "post_delete", ".", "connect", "(", "_increment_sync_num", ",", "sender", "=", "Repository", ")" ]
initialize the widgets subsystem .
train
false
53,610
def get_type_hints(obj, globalns=None, localns=None): return None
[ "def", "get_type_hints", "(", "obj", ",", "globalns", "=", "None", ",", "localns", "=", "None", ")", ":", "return", "None" ]
in python 2 this is not supported and always returns none .
train
false
53,611
def ms_payload(payload): return {'1': 'windows/shell_reverse_tcp', '2': 'windows/meterpreter/reverse_tcp', '3': 'windows/vncinject/reverse_tcp', '4': 'windows/x64/shell_reverse_tcp', '5': 'windows/x64/meterpreter/reverse_tcp', '6': 'windows/meterpreter/reverse_tcp_allports', '7': 'windows/meterpreter/reverse_https', '8': 'windows/meterpreter/reverse_tcp_dns', '9': 'windows/download_exec'}.get(payload, 'ERROR')
[ "def", "ms_payload", "(", "payload", ")", ":", "return", "{", "'1'", ":", "'windows/shell_reverse_tcp'", ",", "'2'", ":", "'windows/meterpreter/reverse_tcp'", ",", "'3'", ":", "'windows/vncinject/reverse_tcp'", ",", "'4'", ":", "'windows/x64/shell_reverse_tcp'", ",", "'5'", ":", "'windows/x64/meterpreter/reverse_tcp'", ",", "'6'", ":", "'windows/meterpreter/reverse_tcp_allports'", ",", "'7'", ":", "'windows/meterpreter/reverse_https'", ",", "'8'", ":", "'windows/meterpreter/reverse_tcp_dns'", ",", "'9'", ":", "'windows/download_exec'", "}", ".", "get", "(", "payload", ",", "'ERROR'", ")" ]
receives the input given by the user from create_payload .
train
false
53,613
@check_feature_enabled(feature_name='ENTRANCE_EXAMS') def create_entrance_exam(request, course_key, entrance_exam_minimum_score_pct): _delete_entrance_exam(request, course_key) return _create_entrance_exam(request=request, course_key=course_key, entrance_exam_minimum_score_pct=entrance_exam_minimum_score_pct)
[ "@", "check_feature_enabled", "(", "feature_name", "=", "'ENTRANCE_EXAMS'", ")", "def", "create_entrance_exam", "(", "request", ",", "course_key", ",", "entrance_exam_minimum_score_pct", ")", ":", "_delete_entrance_exam", "(", "request", ",", "course_key", ")", "return", "_create_entrance_exam", "(", "request", "=", "request", ",", "course_key", "=", "course_key", ",", "entrance_exam_minimum_score_pct", "=", "entrance_exam_minimum_score_pct", ")" ]
api method to create an entrance exam .
train
false
53,614
def request_authenticate(request, username, password): request.headers['Authorization'] = basic_auth_header(username, password)
[ "def", "request_authenticate", "(", "request", ",", "username", ",", "password", ")", ":", "request", ".", "headers", "[", "'Authorization'", "]", "=", "basic_auth_header", "(", "username", ",", "password", ")" ]
autenticate the given request using the http basic access authentication mechanism and the given username and password .
train
false
53,615
def siva(x, y): print x, y (x, y) = (y, x) print x, y
[ "def", "siva", "(", "x", ",", "y", ")", ":", "print", "x", ",", "y", "(", "x", ",", "y", ")", "=", "(", "y", ",", "x", ")", "print", "x", ",", "y" ]
made me fall in love with python .
train
false
53,616
def servicegroup_add(sg_name, sg_type='HTTP', **connection_args): ret = True if servicegroup_exists(sg_name): return False nitro = _connect(**connection_args) if (nitro is None): return False sg = NSServiceGroup() sg.set_servicegroupname(sg_name) sg.set_servicetype(sg_type.upper()) try: NSServiceGroup.add(nitro, sg) except NSNitroError as error: log.debug('netscaler module error - NSServiceGroup.add() failed: {0}'.format(error)) ret = False _disconnect(nitro) return ret
[ "def", "servicegroup_add", "(", "sg_name", ",", "sg_type", "=", "'HTTP'", ",", "**", "connection_args", ")", ":", "ret", "=", "True", "if", "servicegroup_exists", "(", "sg_name", ")", ":", "return", "False", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "False", "sg", "=", "NSServiceGroup", "(", ")", "sg", ".", "set_servicegroupname", "(", "sg_name", ")", "sg", ".", "set_servicetype", "(", "sg_type", ".", "upper", "(", ")", ")", "try", ":", "NSServiceGroup", ".", "add", "(", "nitro", ",", "sg", ")", "except", "NSNitroError", "as", "error", ":", "log", ".", "debug", "(", "'netscaler module error - NSServiceGroup.add() failed: {0}'", ".", "format", "(", "error", ")", ")", "ret", "=", "False", "_disconnect", "(", "nitro", ")", "return", "ret" ]
add a new service group if no service type is specified .
train
true
53,617
def _is_ipv4_like(s): parts = s.split('.') if (len(parts) != 4): return False for part in parts: try: int(part) except ValueError: return False return True
[ "def", "_is_ipv4_like", "(", "s", ")", ":", "parts", "=", "s", ".", "split", "(", "'.'", ")", "if", "(", "len", "(", "parts", ")", "!=", "4", ")", ":", "return", "False", "for", "part", "in", "parts", ":", "try", ":", "int", "(", "part", ")", "except", "ValueError", ":", "return", "False", "return", "True" ]
find if a string superficially looks like an ipv4 address .
train
true