id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
55,079
def test_warning_config_google_home_listen_port(): with patch.object(_LOGGER, 'warning') as mock_warn: Config(None, {'type': 'google_home', 'host_ip': '123.123.123.123', 'listen_port': 8300}) assert mock_warn.called assert (mock_warn.mock_calls[0][1][0] == 'When targetting Google Home, listening port has to be port 80')
[ "def", "test_warning_config_google_home_listen_port", "(", ")", ":", "with", "patch", ".", "object", "(", "_LOGGER", ",", "'warning'", ")", "as", "mock_warn", ":", "Config", "(", "None", ",", "{", "'type'", ":", "'google_home'", ",", "'host_ip'", ":", "'123.123.123.123'", ",", "'listen_port'", ":", "8300", "}", ")", "assert", "mock_warn", ".", "called", "assert", "(", "mock_warn", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "0", "]", "==", "'When targetting Google Home, listening port has to be port 80'", ")" ]
test we warn when non-default port is used for google home .
train
false
55,080
def auc(actual, posterior): r = tied_rank(posterior) num_positive = len([0 for x in actual if (x == 1)]) num_negative = (len(actual) - num_positive) sum_positive = sum([r[i] for i in range(len(r)) if (actual[i] == 1)]) auc = ((sum_positive - ((num_positive * (num_positive + 1)) / 2.0)) / (num_negative * num_positive)) return auc
[ "def", "auc", "(", "actual", ",", "posterior", ")", ":", "r", "=", "tied_rank", "(", "posterior", ")", "num_positive", "=", "len", "(", "[", "0", "for", "x", "in", "actual", "if", "(", "x", "==", "1", ")", "]", ")", "num_negative", "=", "(", "len", "(", "actual", ")", "-", "num_positive", ")", "sum_positive", "=", "sum", "(", "[", "r", "[", "i", "]", "for", "i", "in", "range", "(", "len", "(", "r", ")", ")", "if", "(", "actual", "[", "i", "]", "==", "1", ")", "]", ")", "auc", "=", "(", "(", "sum_positive", "-", "(", "(", "num_positive", "*", "(", "num_positive", "+", "1", ")", ")", "/", "2.0", ")", ")", "/", "(", "num_negative", "*", "num_positive", ")", ")", "return", "auc" ]
computes the area under the receiver-operater characteristic this function computes the auc error metric for binary classification .
train
false
55,081
def _get_channel_stub(): return apiproxy_stub_map.apiproxy.GetStub('channel')
[ "def", "_get_channel_stub", "(", ")", ":", "return", "apiproxy_stub_map", ".", "apiproxy", ".", "GetStub", "(", "'channel'", ")" ]
gets the channelservicestub instance from the api proxy stub map .
train
false
55,082
def is_lyrics(text, artist=None): if (not text): return False badTriggersOcc = [] nbLines = text.count('\n') if (nbLines <= 1): log.debug(u"Ignoring too short lyrics '{0}'".format(text)) return False elif (nbLines < 5): badTriggersOcc.append('too_short') else: text = remove_credits(text) badTriggers = ['lyrics', 'copyright', 'property', 'links'] if artist: badTriggersOcc += [artist] for item in badTriggers: badTriggersOcc += ([item] * len(re.findall(('\\W%s\\W' % item), text, re.I))) if badTriggersOcc: log.debug(u'Bad triggers detected: {0}'.format(badTriggersOcc)) return (len(badTriggersOcc) < 2)
[ "def", "is_lyrics", "(", "text", ",", "artist", "=", "None", ")", ":", "if", "(", "not", "text", ")", ":", "return", "False", "badTriggersOcc", "=", "[", "]", "nbLines", "=", "text", ".", "count", "(", "'\\n'", ")", "if", "(", "nbLines", "<=", "1", ")", ":", "log", ".", "debug", "(", "u\"Ignoring too short lyrics '{0}'\"", ".", "format", "(", "text", ")", ")", "return", "False", "elif", "(", "nbLines", "<", "5", ")", ":", "badTriggersOcc", ".", "append", "(", "'too_short'", ")", "else", ":", "text", "=", "remove_credits", "(", "text", ")", "badTriggers", "=", "[", "'lyrics'", ",", "'copyright'", ",", "'property'", ",", "'links'", "]", "if", "artist", ":", "badTriggersOcc", "+=", "[", "artist", "]", "for", "item", "in", "badTriggers", ":", "badTriggersOcc", "+=", "(", "[", "item", "]", "*", "len", "(", "re", ".", "findall", "(", "(", "'\\\\W%s\\\\W'", "%", "item", ")", ",", "text", ",", "re", ".", "I", ")", ")", ")", "if", "badTriggersOcc", ":", "log", ".", "debug", "(", "u'Bad triggers detected: {0}'", ".", "format", "(", "badTriggersOcc", ")", ")", "return", "(", "len", "(", "badTriggersOcc", ")", "<", "2", ")" ]
determine whether the text seems to be valid lyrics .
train
false
55,083
def _as_meg_type_evoked(evoked, ch_type='grad', mode='fast'): evoked = evoked.copy() if (ch_type not in ['mag', 'grad']): raise ValueError(('to_type must be "mag" or "grad", not "%s"' % ch_type)) pick_from = pick_types(evoked.info, meg=True, eeg=False, ref_meg=False) pick_to = pick_types(evoked.info, meg=ch_type, eeg=False, ref_meg=False) if (len(pick_to) == 0): raise ValueError('No channels matching the destination channel type found in info. Please pass an evoked containingboth the original and destination channels. Only the locations of the destination channels will be used for interpolation.') info_from = pick_info(evoked.info, pick_from) info_to = pick_info(evoked.info, pick_to) mapping = _map_meg_channels(info_from, info_to, mode=mode) data = np.dot(mapping, evoked.data[pick_from]) evoked.pick_types(meg=ch_type, eeg=False, ref_meg=False) evoked.data = data for ch in evoked.info['chs']: ch['ch_name'] += '_virtual' evoked.info._update_redundant() evoked.info._check_consistency() return evoked
[ "def", "_as_meg_type_evoked", "(", "evoked", ",", "ch_type", "=", "'grad'", ",", "mode", "=", "'fast'", ")", ":", "evoked", "=", "evoked", ".", "copy", "(", ")", "if", "(", "ch_type", "not", "in", "[", "'mag'", ",", "'grad'", "]", ")", ":", "raise", "ValueError", "(", "(", "'to_type must be \"mag\" or \"grad\", not \"%s\"'", "%", "ch_type", ")", ")", "pick_from", "=", "pick_types", "(", "evoked", ".", "info", ",", "meg", "=", "True", ",", "eeg", "=", "False", ",", "ref_meg", "=", "False", ")", "pick_to", "=", "pick_types", "(", "evoked", ".", "info", ",", "meg", "=", "ch_type", ",", "eeg", "=", "False", ",", "ref_meg", "=", "False", ")", "if", "(", "len", "(", "pick_to", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'No channels matching the destination channel type found in info. Please pass an evoked containingboth the original and destination channels. Only the locations of the destination channels will be used for interpolation.'", ")", "info_from", "=", "pick_info", "(", "evoked", ".", "info", ",", "pick_from", ")", "info_to", "=", "pick_info", "(", "evoked", ".", "info", ",", "pick_to", ")", "mapping", "=", "_map_meg_channels", "(", "info_from", ",", "info_to", ",", "mode", "=", "mode", ")", "data", "=", "np", ".", "dot", "(", "mapping", ",", "evoked", ".", "data", "[", "pick_from", "]", ")", "evoked", ".", "pick_types", "(", "meg", "=", "ch_type", ",", "eeg", "=", "False", ",", "ref_meg", "=", "False", ")", "evoked", ".", "data", "=", "data", "for", "ch", "in", "evoked", ".", "info", "[", "'chs'", "]", ":", "ch", "[", "'ch_name'", "]", "+=", "'_virtual'", "evoked", ".", "info", ".", "_update_redundant", "(", ")", "evoked", ".", "info", ".", "_check_consistency", "(", ")", "return", "evoked" ]
compute virtual evoked using interpolated fields in mag/grad channels .
train
false
55,084
def getFunctionsWithStringByFileNames(fileNames, searchString): functions = [] for fileName in fileNames: functions += getFunctionsWithStringByFileName(fileName, searchString) functions.sort() return functions
[ "def", "getFunctionsWithStringByFileNames", "(", "fileNames", ",", "searchString", ")", ":", "functions", "=", "[", "]", "for", "fileName", "in", "fileNames", ":", "functions", "+=", "getFunctionsWithStringByFileName", "(", "fileName", ",", "searchString", ")", "functions", ".", "sort", "(", ")", "return", "functions" ]
get the functions with the search string in the files .
train
false
55,085
def assert_snr(actual, desired, tol): from nose.tools import assert_true snr = (linalg.norm(desired, ord='fro') / linalg.norm((desired - actual), ord='fro')) assert_true((snr >= tol), msg=('%f < %f' % (snr, tol)))
[ "def", "assert_snr", "(", "actual", ",", "desired", ",", "tol", ")", ":", "from", "nose", ".", "tools", "import", "assert_true", "snr", "=", "(", "linalg", ".", "norm", "(", "desired", ",", "ord", "=", "'fro'", ")", "/", "linalg", ".", "norm", "(", "(", "desired", "-", "actual", ")", ",", "ord", "=", "'fro'", ")", ")", "assert_true", "(", "(", "snr", ">=", "tol", ")", ",", "msg", "=", "(", "'%f < %f'", "%", "(", "snr", ",", "tol", ")", ")", ")" ]
assert actual and desired arrays are within some snr tolerance .
train
false
55,086
def vacuum(verbose=False): ret = {} imgadm = _check_imgadm() cmd = '{0} vacuum -f'.format(imgadm) res = __salt__['cmd.run_all'](cmd) retcode = res['retcode'] if (retcode != 0): ret['Error'] = _exit_status(retcode) return ret result = {} for image in res['stdout'].splitlines(): image = [var for var in image.split(' ') if var] result[image[2]] = {'name': image[3][1:image[3].index('@')], 'version': image[3][(image[3].index('@') + 1):(-1)]} if verbose: return result else: return list(result.keys())
[ "def", "vacuum", "(", "verbose", "=", "False", ")", ":", "ret", "=", "{", "}", "imgadm", "=", "_check_imgadm", "(", ")", "cmd", "=", "'{0} vacuum -f'", ".", "format", "(", "imgadm", ")", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "retcode", "=", "res", "[", "'retcode'", "]", "if", "(", "retcode", "!=", "0", ")", ":", "ret", "[", "'Error'", "]", "=", "_exit_status", "(", "retcode", ")", "return", "ret", "result", "=", "{", "}", "for", "image", "in", "res", "[", "'stdout'", "]", ".", "splitlines", "(", ")", ":", "image", "=", "[", "var", "for", "var", "in", "image", ".", "split", "(", "' '", ")", "if", "var", "]", "result", "[", "image", "[", "2", "]", "]", "=", "{", "'name'", ":", "image", "[", "3", "]", "[", "1", ":", "image", "[", "3", "]", ".", "index", "(", "'@'", ")", "]", ",", "'version'", ":", "image", "[", "3", "]", "[", "(", "image", "[", "3", "]", ".", "index", "(", "'@'", ")", "+", "1", ")", ":", "(", "-", "1", ")", "]", "}", "if", "verbose", ":", "return", "result", "else", ":", "return", "list", "(", "result", ".", "keys", "(", ")", ")" ]
remove unused images verbose : boolean toggle verbose output cli example: .
train
true
55,087
def diff_tree(repo, old_tree, new_tree, outstream=sys.stdout): with open_repo_closing(repo) as r: write_tree_diff(outstream, r.object_store, old_tree, new_tree)
[ "def", "diff_tree", "(", "repo", ",", "old_tree", ",", "new_tree", ",", "outstream", "=", "sys", ".", "stdout", ")", ":", "with", "open_repo_closing", "(", "repo", ")", "as", "r", ":", "write_tree_diff", "(", "outstream", ",", "r", ".", "object_store", ",", "old_tree", ",", "new_tree", ")" ]
compares the content and mode of blobs found via two tree objects .
train
false
55,089
def getTypeFromProgID(prog_id): return Type.GetTypeFromProgID(prog_id)
[ "def", "getTypeFromProgID", "(", "prog_id", ")", ":", "return", "Type", ".", "GetTypeFromProgID", "(", "prog_id", ")" ]
returns the type object for prog_id .
train
false
55,091
def buildRequestBytes(headers, data, frameFactory=None, streamID=1): frames = buildRequestFrames(headers, data, frameFactory, streamID) return ''.join((f.serialize() for f in frames))
[ "def", "buildRequestBytes", "(", "headers", ",", "data", ",", "frameFactory", "=", "None", ",", "streamID", "=", "1", ")", ":", "frames", "=", "buildRequestFrames", "(", "headers", ",", "data", ",", "frameFactory", ",", "streamID", ")", "return", "''", ".", "join", "(", "(", "f", ".", "serialize", "(", ")", "for", "f", "in", "frames", ")", ")" ]
provides the byte sequence for a collection of http/2 frames representing the provided request .
train
false
55,092
def request_fingerprint(request, include_headers=None): if include_headers: include_headers = tuple([h.lower() for h in sorted(include_headers)]) cache = _fingerprint_cache.setdefault(request, {}) if (include_headers not in cache): fp = hashlib.sha1() fp.update(request.method) fp.update(canonicalize_url(request.url)) fp.update((request.body or '')) if include_headers: for hdr in include_headers: if (hdr in request.headers): fp.update(hdr) for v in request.headers.getlist(hdr): fp.update(v) cache[include_headers] = fp.hexdigest() return cache[include_headers]
[ "def", "request_fingerprint", "(", "request", ",", "include_headers", "=", "None", ")", ":", "if", "include_headers", ":", "include_headers", "=", "tuple", "(", "[", "h", ".", "lower", "(", ")", "for", "h", "in", "sorted", "(", "include_headers", ")", "]", ")", "cache", "=", "_fingerprint_cache", ".", "setdefault", "(", "request", ",", "{", "}", ")", "if", "(", "include_headers", "not", "in", "cache", ")", ":", "fp", "=", "hashlib", ".", "sha1", "(", ")", "fp", ".", "update", "(", "request", ".", "method", ")", "fp", ".", "update", "(", "canonicalize_url", "(", "request", ".", "url", ")", ")", "fp", ".", "update", "(", "(", "request", ".", "body", "or", "''", ")", ")", "if", "include_headers", ":", "for", "hdr", "in", "include_headers", ":", "if", "(", "hdr", "in", "request", ".", "headers", ")", ":", "fp", ".", "update", "(", "hdr", ")", "for", "v", "in", "request", ".", "headers", ".", "getlist", "(", "hdr", ")", ":", "fp", ".", "update", "(", "v", ")", "cache", "[", "include_headers", "]", "=", "fp", ".", "hexdigest", "(", ")", "return", "cache", "[", "include_headers", "]" ]
return the request fingerprint .
train
false
55,093
@mock_streams('stderr') @with_patched_object(output, 'warnings', True) def test_warn(): warn('Test') eq_('\nWarning: Test\n\n', sys.stderr.getvalue())
[ "@", "mock_streams", "(", "'stderr'", ")", "@", "with_patched_object", "(", "output", ",", "'warnings'", ",", "True", ")", "def", "test_warn", "(", ")", ":", "warn", "(", "'Test'", ")", "eq_", "(", "'\\nWarning: Test\\n\\n'", ",", "sys", ".", "stderr", ".", "getvalue", "(", ")", ")" ]
warn() should print warning plus given text .
train
false
55,094
def posixToNtSlashes(filepath): return filepath.replace('/', '\\')
[ "def", "posixToNtSlashes", "(", "filepath", ")", ":", "return", "filepath", ".", "replace", "(", "'/'", ",", "'\\\\'", ")" ]
replaces all occurances of posix slashes (/) in provided filepath with nt ones () .
train
false
55,095
def null_safe(rule): def null_safe_rl(expr): result = rule(expr) if (result is None): return expr else: return result return null_safe_rl
[ "def", "null_safe", "(", "rule", ")", ":", "def", "null_safe_rl", "(", "expr", ")", ":", "result", "=", "rule", "(", "expr", ")", "if", "(", "result", "is", "None", ")", ":", "return", "expr", "else", ":", "return", "result", "return", "null_safe_rl" ]
return original expr if rule returns none .
train
false
55,096
def lucene_search(trans, cntrller, search_term, search_url, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') full_url = ('%s/find?%s' % (search_url, urllib.urlencode({'kwd': search_term}))) response = urllib2.urlopen(full_url) ldda_ids = loads(response.read())['ids'] response.close() lddas = [trans.sa_session.query(trans.app.model.LibraryDatasetDatasetAssociation).get(ldda_id) for ldda_id in ldda_ids] return (status, message, get_sorted_accessible_library_items(trans, cntrller, lddas, 'name'))
[ "def", "lucene_search", "(", "trans", ",", "cntrller", ",", "search_term", ",", "search_url", ",", "**", "kwd", ")", ":", "message", "=", "escape", "(", "kwd", ".", "get", "(", "'message'", ",", "''", ")", ")", "status", "=", "kwd", ".", "get", "(", "'status'", ",", "'done'", ")", "full_url", "=", "(", "'%s/find?%s'", "%", "(", "search_url", ",", "urllib", ".", "urlencode", "(", "{", "'kwd'", ":", "search_term", "}", ")", ")", ")", "response", "=", "urllib2", ".", "urlopen", "(", "full_url", ")", "ldda_ids", "=", "loads", "(", "response", ".", "read", "(", ")", ")", "[", "'ids'", "]", "response", ".", "close", "(", ")", "lddas", "=", "[", "trans", ".", "sa_session", ".", "query", "(", "trans", ".", "app", ".", "model", ".", "LibraryDatasetDatasetAssociation", ")", ".", "get", "(", "ldda_id", ")", "for", "ldda_id", "in", "ldda_ids", "]", "return", "(", "status", ",", "message", ",", "get_sorted_accessible_library_items", "(", "trans", ",", "cntrller", ",", "lddas", ",", "'name'", ")", ")" ]
return display of results from a full-text lucene search of data libraries .
train
false
55,099
def record_usage_multi(prefix_slices): keys = [_make_ratelimit_cache_key(k, t) for (k, t) in prefix_slices] try: now = int(time.time()) for (key, (_, time_slice)) in zip(keys, prefix_slices): g.ratelimitcache.add(key, 0, time=((time_slice.end - now) + 1)) try: recent_usage = g.ratelimitcache.incr_multi(keys) except pylibmc.NotFound: now = int(time.time()) if (now < time_slice.end): recent_usage = [] for (key, (_, time_slice)) in zip(keys, prefix_slices): if g.ratelimitcache.add(key, 1, time=((time_slice.end - now) + 1)): recent_usage.append(1) g.stats.simple_event('ratelimit.eviction') else: recent_usage.append(g.ratelimitcache.get(key)) return recent_usage except pylibmc.Error as e: raise RatelimitError(e)
[ "def", "record_usage_multi", "(", "prefix_slices", ")", ":", "keys", "=", "[", "_make_ratelimit_cache_key", "(", "k", ",", "t", ")", "for", "(", "k", ",", "t", ")", "in", "prefix_slices", "]", "try", ":", "now", "=", "int", "(", "time", ".", "time", "(", ")", ")", "for", "(", "key", ",", "(", "_", ",", "time_slice", ")", ")", "in", "zip", "(", "keys", ",", "prefix_slices", ")", ":", "g", ".", "ratelimitcache", ".", "add", "(", "key", ",", "0", ",", "time", "=", "(", "(", "time_slice", ".", "end", "-", "now", ")", "+", "1", ")", ")", "try", ":", "recent_usage", "=", "g", ".", "ratelimitcache", ".", "incr_multi", "(", "keys", ")", "except", "pylibmc", ".", "NotFound", ":", "now", "=", "int", "(", "time", ".", "time", "(", ")", ")", "if", "(", "now", "<", "time_slice", ".", "end", ")", ":", "recent_usage", "=", "[", "]", "for", "(", "key", ",", "(", "_", ",", "time_slice", ")", ")", "in", "zip", "(", "keys", ",", "prefix_slices", ")", ":", "if", "g", ".", "ratelimitcache", ".", "add", "(", "key", ",", "1", ",", "time", "=", "(", "(", "time_slice", ".", "end", "-", "now", ")", "+", "1", ")", ")", ":", "recent_usage", ".", "append", "(", "1", ")", "g", ".", "stats", ".", "simple_event", "(", "'ratelimit.eviction'", ")", "else", ":", "recent_usage", ".", "append", "(", "g", ".", "ratelimitcache", ".", "get", "(", "key", ")", ")", "return", "recent_usage", "except", "pylibmc", ".", "Error", "as", "e", ":", "raise", "RatelimitError", "(", "e", ")" ]
record usage of multiple rate limits .
train
false
55,100
@command(name='hash', usage='compute hashes') def print_hash(args): import lixian_hash import lixian_cli_parser lixian_hash.main(lixian_cli_parser.expand_command_line(args))
[ "@", "command", "(", "name", "=", "'hash'", ",", "usage", "=", "'compute hashes'", ")", "def", "print_hash", "(", "args", ")", ":", "import", "lixian_hash", "import", "lixian_cli_parser", "lixian_hash", ".", "main", "(", "lixian_cli_parser", ".", "expand_command_line", "(", "args", ")", ")" ]
lx hash --sha1 file .
train
false
55,101
def for_signed_dtypes_combination(names=('dtype',), full=None): return for_dtypes_combination(_signed_dtypes, names=names, full=full)
[ "def", "for_signed_dtypes_combination", "(", "names", "=", "(", "'dtype'", ",", ")", ",", "full", "=", "None", ")", ":", "return", "for_dtypes_combination", "(", "_signed_dtypes", ",", "names", "=", "names", ",", "full", "=", "full", ")" ]
decorator for parameterized test w .
train
false
55,103
def _RecurseOverObject(obj, factory, parent=None): if _IsSudsIterable(obj): copy_of_obj = tuple(obj) for item in copy_of_obj: if _IsSudsIterable(item): if ('xsi_type' in item): if isinstance(obj, tuple): parent[obj[0]] = _PackForSuds(obj[1], factory) else: obj.remove(item) obj.append(_PackForSuds(item, factory)) _RecurseOverObject(item, factory, obj)
[ "def", "_RecurseOverObject", "(", "obj", ",", "factory", ",", "parent", "=", "None", ")", ":", "if", "_IsSudsIterable", "(", "obj", ")", ":", "copy_of_obj", "=", "tuple", "(", "obj", ")", "for", "item", "in", "copy_of_obj", ":", "if", "_IsSudsIterable", "(", "item", ")", ":", "if", "(", "'xsi_type'", "in", "item", ")", ":", "if", "isinstance", "(", "obj", ",", "tuple", ")", ":", "parent", "[", "obj", "[", "0", "]", "]", "=", "_PackForSuds", "(", "obj", "[", "1", "]", ",", "factory", ")", "else", ":", "obj", ".", "remove", "(", "item", ")", "obj", ".", "append", "(", "_PackForSuds", "(", "item", ",", "factory", ")", ")", "_RecurseOverObject", "(", "item", ",", "factory", ",", "obj", ")" ]
recurses over a nested structure to look for changes in suds objects .
train
true
55,104
def s_byte(value, endian='<', format='binary', signed=False, full_range=False, fuzzable=True, name=None): byte = primitives.byte(value, endian, format, signed, full_range, fuzzable, name) blocks.CURRENT.push(byte)
[ "def", "s_byte", "(", "value", ",", "endian", "=", "'<'", ",", "format", "=", "'binary'", ",", "signed", "=", "False", ",", "full_range", "=", "False", ",", "fuzzable", "=", "True", ",", "name", "=", "None", ")", ":", "byte", "=", "primitives", ".", "byte", "(", "value", ",", "endian", ",", "format", ",", "signed", ",", "full_range", ",", "fuzzable", ",", "name", ")", "blocks", ".", "CURRENT", ".", "push", "(", "byte", ")" ]
push a byte onto the current block stack .
train
false
55,105
def publish_collection(committer_id, collection_id): _publish_activity(committer_id, collection_id, feconf.ACTIVITY_TYPE_COLLECTION)
[ "def", "publish_collection", "(", "committer_id", ",", "collection_id", ")", ":", "_publish_activity", "(", "committer_id", ",", "collection_id", ",", "feconf", ".", "ACTIVITY_TYPE_COLLECTION", ")" ]
this is called by the publish_collection_and_update_user_profiles function in collection_services .
train
false
55,108
def agent_settings(name, contact, location, services=None): ret = {'name': name, 'changes': {}, 'comment': str(), 'result': None} ret_settings = {'changes': dict(), 'failures': dict()} if (not services): services = ['None'] services = sorted(set(services)) settings = {'contact': contact, 'location': location, 'services': services} current_settings = __salt__['win_snmp.get_agent_settings']() for setting in settings: if (str(settings[setting]) != str(current_settings[setting])): ret_settings['changes'][setting] = {'old': current_settings[setting], 'new': settings[setting]} if (not ret_settings['changes']): ret['comment'] = 'Agent settings already contain the provided values.' ret['result'] = True return ret elif __opts__['test']: ret['comment'] = 'Agent settings will be changed.' ret['changes'] = ret_settings return ret __salt__['win_snmp.set_agent_settings'](**settings) new_settings = __salt__['win_snmp.get_agent_settings']() for setting in settings: if (settings[setting] != new_settings[setting]): ret_settings['failures'][setting] = {'old': current_settings[setting], 'new': new_settings[setting]} ret_settings['changes'].pop(setting, None) if ret_settings['failures']: ret['comment'] = 'Some agent settings failed to change.' ret['changes'] = ret_settings ret['result'] = False else: ret['comment'] = 'Set agent settings to contain the provided values.' ret['changes'] = ret_settings['changes'] ret['result'] = True return ret
[ "def", "agent_settings", "(", "name", ",", "contact", ",", "location", ",", "services", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "str", "(", ")", ",", "'result'", ":", "None", "}", "ret_settings", "=", "{", "'changes'", ":", "dict", "(", ")", ",", "'failures'", ":", "dict", "(", ")", "}", "if", "(", "not", "services", ")", ":", "services", "=", "[", "'None'", "]", "services", "=", "sorted", "(", "set", "(", "services", ")", ")", "settings", "=", "{", "'contact'", ":", "contact", ",", "'location'", ":", "location", ",", "'services'", ":", "services", "}", "current_settings", "=", "__salt__", "[", "'win_snmp.get_agent_settings'", "]", "(", ")", "for", "setting", "in", "settings", ":", "if", "(", "str", "(", "settings", "[", "setting", "]", ")", "!=", "str", "(", "current_settings", "[", "setting", "]", ")", ")", ":", "ret_settings", "[", "'changes'", "]", "[", "setting", "]", "=", "{", "'old'", ":", "current_settings", "[", "setting", "]", ",", "'new'", ":", "settings", "[", "setting", "]", "}", "if", "(", "not", "ret_settings", "[", "'changes'", "]", ")", ":", "ret", "[", "'comment'", "]", "=", "'Agent settings already contain the provided values.'", "ret", "[", "'result'", "]", "=", "True", "return", "ret", "elif", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Agent settings will be changed.'", "ret", "[", "'changes'", "]", "=", "ret_settings", "return", "ret", "__salt__", "[", "'win_snmp.set_agent_settings'", "]", "(", "**", "settings", ")", "new_settings", "=", "__salt__", "[", "'win_snmp.get_agent_settings'", "]", "(", ")", "for", "setting", "in", "settings", ":", "if", "(", "settings", "[", "setting", "]", "!=", "new_settings", "[", "setting", "]", ")", ":", "ret_settings", "[", "'failures'", "]", "[", "setting", "]", "=", "{", "'old'", ":", "current_settings", "[", "setting", "]", ",", "'new'", ":", "new_settings", "[", "setting", "]", "}", "ret_settings", "[", "'changes'", "]", ".", "pop", "(", "setting", ",", "None", ")", "if", "ret_settings", "[", "'failures'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Some agent settings failed to change.'", "ret", "[", "'changes'", "]", "=", "ret_settings", "ret", "[", "'result'", "]", "=", "False", "else", ":", "ret", "[", "'comment'", "]", "=", "'Set agent settings to contain the provided values.'", "ret", "[", "'changes'", "]", "=", "ret_settings", "[", "'changes'", "]", "ret", "[", "'result'", "]", "=", "True", "return", "ret" ]
manage the snmp syscontact .
train
true
55,109
def test_extract_Array_float(): for a in np.arange(2.51, 3.49, 0.1): assert np.all((extract_array(np.arange(5), 3, a) == np.array([2, 3, 4])))
[ "def", "test_extract_Array_float", "(", ")", ":", "for", "a", "in", "np", ".", "arange", "(", "2.51", ",", "3.49", ",", "0.1", ")", ":", "assert", "np", ".", "all", "(", "(", "extract_array", "(", "np", ".", "arange", "(", "5", ")", ",", "3", ",", "a", ")", "==", "np", ".", "array", "(", "[", "2", ",", "3", ",", "4", "]", ")", ")", ")" ]
integer is at bin center .
train
false
55,110
def _heapify_max(x): n = len(x) for i in reversed(range((n // 2))): _siftup_max(x, i)
[ "def", "_heapify_max", "(", "x", ")", ":", "n", "=", "len", "(", "x", ")", "for", "i", "in", "reversed", "(", "range", "(", "(", "n", "//", "2", ")", ")", ")", ":", "_siftup_max", "(", "x", ",", "i", ")" ]
transform list into a maxheap .
train
true
55,111
def instance_type_access_remove(context, flavor_id, project_id): return IMPL.instance_type_access_remove(context, flavor_id, project_id)
[ "def", "instance_type_access_remove", "(", "context", ",", "flavor_id", ",", "project_id", ")", ":", "return", "IMPL", ".", "instance_type_access_remove", "(", "context", ",", "flavor_id", ",", "project_id", ")" ]
remove flavor access for project .
train
false
55,112
def quota_usage_update(context, project_id, resource, **kwargs): return IMPL.quota_usage_update(context, project_id, resource, **kwargs)
[ "def", "quota_usage_update", "(", "context", ",", "project_id", ",", "resource", ",", "**", "kwargs", ")", ":", "return", "IMPL", ".", "quota_usage_update", "(", "context", ",", "project_id", ",", "resource", ",", "**", "kwargs", ")" ]
update a quota usage or raise if it does not exist .
train
false
55,114
def emitter(p=0.03): while True: v = np.random.rand(1) if (v > p): (yield 0.0) else: (yield np.random.rand(1))
[ "def", "emitter", "(", "p", "=", "0.03", ")", ":", "while", "True", ":", "v", "=", "np", ".", "random", ".", "rand", "(", "1", ")", "if", "(", "v", ">", "p", ")", ":", "(", "yield", "0.0", ")", "else", ":", "(", "yield", "np", ".", "random", ".", "rand", "(", "1", ")", ")" ]
return a random value with probability p .
train
false
55,115
def _policyFileReplaceOrAppend(this_string, policy_data, append_only=False): if (not policy_data): policy_data = '' specialValueRegex = '(\\*\\*Del\\.|\\*\\*DelVals\\.){0,1}' item_key = None item_value_name = None data_to_replace = None if (not append_only): item_key = this_string.split('{0};'.format(chr(0)))[0].lstrip('[') item_value_name = re.sub(specialValueRegex, '', this_string.split('{0};'.format(chr(0)))[1], flags=re.IGNORECASE) log.debug('item value name is {0}'.format(item_value_name)) data_to_replace = _regexSearchKeyValueCombo(policy_data, item_key, item_value_name) if data_to_replace: log.debug('replacing {0} with {1}'.format([data_to_replace], [this_string])) policy_data = policy_data.replace(data_to_replace, this_string) else: log.debug('appending {0}'.format([this_string])) policy_data = ''.join([policy_data, this_string]) return policy_data
[ "def", "_policyFileReplaceOrAppend", "(", "this_string", ",", "policy_data", ",", "append_only", "=", "False", ")", ":", "if", "(", "not", "policy_data", ")", ":", "policy_data", "=", "''", "specialValueRegex", "=", "'(\\\\*\\\\*Del\\\\.|\\\\*\\\\*DelVals\\\\.){0,1}'", "item_key", "=", "None", "item_value_name", "=", "None", "data_to_replace", "=", "None", "if", "(", "not", "append_only", ")", ":", "item_key", "=", "this_string", ".", "split", "(", "'{0};'", ".", "format", "(", "chr", "(", "0", ")", ")", ")", "[", "0", "]", ".", "lstrip", "(", "'['", ")", "item_value_name", "=", "re", ".", "sub", "(", "specialValueRegex", ",", "''", ",", "this_string", ".", "split", "(", "'{0};'", ".", "format", "(", "chr", "(", "0", ")", ")", ")", "[", "1", "]", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "log", ".", "debug", "(", "'item value name is {0}'", ".", "format", "(", "item_value_name", ")", ")", "data_to_replace", "=", "_regexSearchKeyValueCombo", "(", "policy_data", ",", "item_key", ",", "item_value_name", ")", "if", "data_to_replace", ":", "log", ".", "debug", "(", "'replacing {0} with {1}'", ".", "format", "(", "[", "data_to_replace", "]", ",", "[", "this_string", "]", ")", ")", "policy_data", "=", "policy_data", ".", "replace", "(", "data_to_replace", ",", "this_string", ")", "else", ":", "log", ".", "debug", "(", "'appending {0}'", ".", "format", "(", "[", "this_string", "]", ")", ")", "policy_data", "=", "''", ".", "join", "(", "[", "policy_data", ",", "this_string", "]", ")", "return", "policy_data" ]
helper function to take a admx policy string for registry .
train
false
55,116
def uldap(): def construct(): try: secret_file = open('/etc/ldap.secret', 'r') bind_dn = 'cn=admin,{}'.format(base_dn()) except IOError: secret_file = open('/etc/machine.secret', 'r') bind_dn = config_registry()['ldap/hostdn'] pwd_line = secret_file.readline() pwd = re.sub('\n', '', pwd_line) import univention.admin.uldap return univention.admin.uldap.access(host=config_registry()['ldap/master'], base=base_dn(), binddn=bind_dn, bindpw=pwd, start_tls=1) return _singleton('uldap', construct)
[ "def", "uldap", "(", ")", ":", "def", "construct", "(", ")", ":", "try", ":", "secret_file", "=", "open", "(", "'/etc/ldap.secret'", ",", "'r'", ")", "bind_dn", "=", "'cn=admin,{}'", ".", "format", "(", "base_dn", "(", ")", ")", "except", "IOError", ":", "secret_file", "=", "open", "(", "'/etc/machine.secret'", ",", "'r'", ")", "bind_dn", "=", "config_registry", "(", ")", "[", "'ldap/hostdn'", "]", "pwd_line", "=", "secret_file", ".", "readline", "(", ")", "pwd", "=", "re", ".", "sub", "(", "'\\n'", ",", "''", ",", "pwd_line", ")", "import", "univention", ".", "admin", ".", "uldap", "return", "univention", ".", "admin", ".", "uldap", ".", "access", "(", "host", "=", "config_registry", "(", ")", "[", "'ldap/master'", "]", ",", "base", "=", "base_dn", "(", ")", ",", "binddn", "=", "bind_dn", ",", "bindpw", "=", "pwd", ",", "start_tls", "=", "1", ")", "return", "_singleton", "(", "'uldap'", ",", "construct", ")" ]
return a configured univention uldap object .
train
false
55,117
def _acquire_download_cache_lock(): lockdir = os.path.join(_get_download_cache_locs()[0], u'lock') for i in range(conf.download_cache_lock_attempts): try: os.mkdir(lockdir) with open(os.path.join(lockdir, u'pid'), u'w') as f: f.write(str(os.getpid())) except OSError: time.sleep(1) else: return msg = u"Unable to acquire lock for cache directory ({0} exists). You may need to delete the lock if the python interpreter wasn't shut down properly." raise RuntimeError(msg.format(lockdir))
[ "def", "_acquire_download_cache_lock", "(", ")", ":", "lockdir", "=", "os", ".", "path", ".", "join", "(", "_get_download_cache_locs", "(", ")", "[", "0", "]", ",", "u'lock'", ")", "for", "i", "in", "range", "(", "conf", ".", "download_cache_lock_attempts", ")", ":", "try", ":", "os", ".", "mkdir", "(", "lockdir", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "lockdir", ",", "u'pid'", ")", ",", "u'w'", ")", "as", "f", ":", "f", ".", "write", "(", "str", "(", "os", ".", "getpid", "(", ")", ")", ")", "except", "OSError", ":", "time", ".", "sleep", "(", "1", ")", "else", ":", "return", "msg", "=", "u\"Unable to acquire lock for cache directory ({0} exists). You may need to delete the lock if the python interpreter wasn't shut down properly.\"", "raise", "RuntimeError", "(", "msg", ".", "format", "(", "lockdir", ")", ")" ]
uses the lock directory method .
train
false
55,119
def wrap_paragraphs(text, ncols=80): paragraph_re = re.compile('\\n(\\s*\\n)+', re.MULTILINE) text = dedent(text).strip() paragraphs = paragraph_re.split(text)[::2] out_ps = [] indent_re = re.compile('\\n\\s+', re.MULTILINE) for p in paragraphs: if (indent_re.search(p) is None): p = textwrap.fill(p, ncols) out_ps.append(p) return out_ps
[ "def", "wrap_paragraphs", "(", "text", ",", "ncols", "=", "80", ")", ":", "paragraph_re", "=", "re", ".", "compile", "(", "'\\\\n(\\\\s*\\\\n)+'", ",", "re", ".", "MULTILINE", ")", "text", "=", "dedent", "(", "text", ")", ".", "strip", "(", ")", "paragraphs", "=", "paragraph_re", ".", "split", "(", "text", ")", "[", ":", ":", "2", "]", "out_ps", "=", "[", "]", "indent_re", "=", "re", ".", "compile", "(", "'\\\\n\\\\s+'", ",", "re", ".", "MULTILINE", ")", "for", "p", "in", "paragraphs", ":", "if", "(", "indent_re", ".", "search", "(", "p", ")", "is", "None", ")", ":", "p", "=", "textwrap", ".", "fill", "(", "p", ",", "ncols", ")", "out_ps", ".", "append", "(", "p", ")", "return", "out_ps" ]
wrap multiple paragraphs to fit a specified width .
train
true
55,120
def parse_content(documentation_type, content): try: to_index = PyQuery(content).text() except ValueError: return '' return to_index
[ "def", "parse_content", "(", "documentation_type", ",", "content", ")", ":", "try", ":", "to_index", "=", "PyQuery", "(", "content", ")", ".", "text", "(", ")", "except", "ValueError", ":", "return", "''", "return", "to_index" ]
prepare the text of the html file .
train
false
55,121
def setEntryText(entry, value): if (entry == None): return entry.delete(0, Tkinter.END) entry.insert(0, str(value))
[ "def", "setEntryText", "(", "entry", ",", "value", ")", ":", "if", "(", "entry", "==", "None", ")", ":", "return", "entry", ".", "delete", "(", "0", ",", "Tkinter", ".", "END", ")", "entry", ".", "insert", "(", "0", ",", "str", "(", "value", ")", ")" ]
set the entry text .
train
false
55,123
def ensure_dirs(filename): (dirname, _) = os.path.split(filename) if (dirname and (not os.path.exists(dirname))): os.makedirs(dirname)
[ "def", "ensure_dirs", "(", "filename", ")", ":", "(", "dirname", ",", "_", ")", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "if", "(", "dirname", "and", "(", "not", "os", ".", "path", ".", "exists", "(", "dirname", ")", ")", ")", ":", "os", ".", "makedirs", "(", "dirname", ")" ]
make sure the directories exist for filename .
train
true
55,124
def get_record(zone_id, record_id, profile): conn = _get_driver(profile=profile) return conn.get_record(zone_id, record_id)
[ "def", "get_record", "(", "zone_id", ",", "record_id", ",", "profile", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "return", "conn", ".", "get_record", "(", "zone_id", ",", "record_id", ")" ]
get record information for the given zone_id on the given profile .
train
true
55,125
def get_restart_power_failure(): ret = salt.utils.mac_utils.execute_return_result('systemsetup -getrestartpowerfailure') return (salt.utils.mac_utils.validate_enabled(salt.utils.mac_utils.parse_return(ret)) == 'on')
[ "def", "get_restart_power_failure", "(", ")", ":", "ret", "=", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_result", "(", "'systemsetup -getrestartpowerfailure'", ")", "return", "(", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "salt", ".", "utils", ".", "mac_utils", ".", "parse_return", "(", "ret", ")", ")", "==", "'on'", ")" ]
displays whether restart on power failure is on or off if supported :return: a string value representing the "restart on power failure" settings :rtype: string cli example: .
train
true
55,126
def _plot_ci_bars(ax, x, central_data, ci, color, err_kws, **kwargs): for (x_i, y_i, (low, high)) in zip(x, central_data, ci.T): ax.plot([x_i, x_i], [low, high], color=color, solid_capstyle='round', **err_kws)
[ "def", "_plot_ci_bars", "(", "ax", ",", "x", ",", "central_data", ",", "ci", ",", "color", ",", "err_kws", ",", "**", "kwargs", ")", ":", "for", "(", "x_i", ",", "y_i", ",", "(", "low", ",", "high", ")", ")", "in", "zip", "(", "x", ",", "central_data", ",", "ci", ".", "T", ")", ":", "ax", ".", "plot", "(", "[", "x_i", ",", "x_i", "]", ",", "[", "low", ",", "high", "]", ",", "color", "=", "color", ",", "solid_capstyle", "=", "'round'", ",", "**", "err_kws", ")" ]
plot error bars at each data point .
train
false
55,127
def encrypt_stream(mode, in_stream, out_stream, block_size=BLOCK_SIZE, padding=PADDING_DEFAULT): encrypter = Encrypter(mode, padding=padding) _feed_stream(encrypter, in_stream, out_stream, block_size)
[ "def", "encrypt_stream", "(", "mode", ",", "in_stream", ",", "out_stream", ",", "block_size", "=", "BLOCK_SIZE", ",", "padding", "=", "PADDING_DEFAULT", ")", ":", "encrypter", "=", "Encrypter", "(", "mode", ",", "padding", "=", "padding", ")", "_feed_stream", "(", "encrypter", ",", "in_stream", ",", "out_stream", ",", "block_size", ")" ]
encrypts a stream of bytes from in_stream to out_stream using mode .
train
true
55,128
def parent_dir(path): return os.path.abspath(os.path.join(path, os.pardir))
[ "def", "parent_dir", "(", "path", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "os", ".", "pardir", ")", ")" ]
return the parent of a directory .
train
true
55,129
def load_meta_sources(): meta_sources = {} for (module_path, class_name) in SOURCES.items(): module = import_module(((METASYNC_MODULE + '.') + module_path)) meta_sources[class_name.lower()] = getattr(module, class_name) return meta_sources
[ "def", "load_meta_sources", "(", ")", ":", "meta_sources", "=", "{", "}", "for", "(", "module_path", ",", "class_name", ")", "in", "SOURCES", ".", "items", "(", ")", ":", "module", "=", "import_module", "(", "(", "(", "METASYNC_MODULE", "+", "'.'", ")", "+", "module_path", ")", ")", "meta_sources", "[", "class_name", ".", "lower", "(", ")", "]", "=", "getattr", "(", "module", ",", "class_name", ")", "return", "meta_sources" ]
returns a dictionary of all the metasources e .
train
false
55,130
def _get_resources(context, alias): alias_sql = sqlalchemy.text(u'SELECT alias_of FROM "_table_metadata"\n WHERE name = :alias AND alias_of IS NOT NULL') results = context['connection'].execute(alias_sql, alias=alias).fetchall() return [x[0] for x in results]
[ "def", "_get_resources", "(", "context", ",", "alias", ")", ":", "alias_sql", "=", "sqlalchemy", ".", "text", "(", "u'SELECT alias_of FROM \"_table_metadata\"\\n WHERE name = :alias AND alias_of IS NOT NULL'", ")", "results", "=", "context", "[", "'connection'", "]", ".", "execute", "(", "alias_sql", ",", "alias", "=", "alias", ")", ".", "fetchall", "(", ")", "return", "[", "x", "[", "0", "]", "for", "x", "in", "results", "]" ]
get a list of resources for an alias .
train
false
55,131
def shell_init_func(): from simplewiki import database wiki = make_wiki() wiki.bind_to_context() return {'wiki': wiki, 'db': database}
[ "def", "shell_init_func", "(", ")", ":", "from", "simplewiki", "import", "database", "wiki", "=", "make_wiki", "(", ")", "wiki", ".", "bind_to_context", "(", ")", "return", "{", "'wiki'", ":", "wiki", ",", "'db'", ":", "database", "}" ]
called on shell initialization .
train
false
55,132
def get_log_for_pid(pid): found_pid = False pid_str = (' PID: %s ' % pid) for line in fileinput.input(glob.glob((static.DEBUG_FILE + '*'))): if (pid_str in line): (yield line) found_pid = True elif (found_pid and (' PID: ' not in line)): (yield line) else: found_pid = False
[ "def", "get_log_for_pid", "(", "pid", ")", ":", "found_pid", "=", "False", "pid_str", "=", "(", "' PID: %s '", "%", "pid", ")", "for", "line", "in", "fileinput", ".", "input", "(", "glob", ".", "glob", "(", "(", "static", ".", "DEBUG_FILE", "+", "'*'", ")", ")", ")", ":", "if", "(", "pid_str", "in", "line", ")", ":", "(", "yield", "line", ")", "found_pid", "=", "True", "elif", "(", "found_pid", "and", "(", "' PID: '", "not", "in", "line", ")", ")", ":", "(", "yield", "line", ")", "else", ":", "found_pid", "=", "False" ]
fetches the logs from the debug log file for a given starcluster run by pid .
train
false
55,133
@requires_good_network def test_fetch_file_html(): _test_fetch('http://google.com')
[ "@", "requires_good_network", "def", "test_fetch_file_html", "(", ")", ":", "_test_fetch", "(", "'http://google.com'", ")" ]
test file downloading over http .
train
false
55,134
def safe_extra(extra): return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
[ "def", "safe_extra", "(", "extra", ")", ":", "return", "re", ".", "sub", "(", "'[^A-Za-z0-9.-]+'", ",", "'_'", ",", "extra", ")", ".", "lower", "(", ")" ]
convert an arbitrary string to a standard extra name any runs of non-alphanumeric characters are replaced with a single _ .
train
false
55,135
def versions_report(): return '\n'.join(salt.version.versions_report())
[ "def", "versions_report", "(", ")", ":", "return", "'\\n'", ".", "join", "(", "salt", ".", "version", ".", "versions_report", "(", ")", ")" ]
returns versions of components used by salt cli example: .
train
false
55,138
def _check_all_tasks(tasks): running_tasks_data = [] for task in tasks: if task.isAlive(): running_tasks_data.append((' %s (started %s)' % (task.name, time.strftime('%H:%M:%S', time.localtime(task.start_time))))) if task.exception: ALL_ERRORS.append(task.exception) if running_tasks_data: log('----------------------------------------') log('Tasks still running:') for task_details in running_tasks_data: log(task_details)
[ "def", "_check_all_tasks", "(", "tasks", ")", ":", "running_tasks_data", "=", "[", "]", "for", "task", "in", "tasks", ":", "if", "task", ".", "isAlive", "(", ")", ":", "running_tasks_data", ".", "append", "(", "(", "' %s (started %s)'", "%", "(", "task", ".", "name", ",", "time", ".", "strftime", "(", "'%H:%M:%S'", ",", "time", ".", "localtime", "(", "task", ".", "start_time", ")", ")", ")", ")", ")", "if", "task", ".", "exception", ":", "ALL_ERRORS", ".", "append", "(", "task", ".", "exception", ")", "if", "running_tasks_data", ":", "log", "(", "'----------------------------------------'", ")", "log", "(", "'Tasks still running:'", ")", "for", "task_details", "in", "running_tasks_data", ":", "log", "(", "task_details", ")" ]
checks the results of all tasks .
train
false
55,139
def _option(value): if (value in __opts__): return __opts__[value] master_opts = __pillar__.get('master', {}) if (value in master_opts): return master_opts[value] if (value in __pillar__): return __pillar__[value]
[ "def", "_option", "(", "value", ")", ":", "if", "(", "value", "in", "__opts__", ")", ":", "return", "__opts__", "[", "value", "]", "master_opts", "=", "__pillar__", ".", "get", "(", "'master'", ",", "{", "}", ")", "if", "(", "value", "in", "master_opts", ")", ":", "return", "master_opts", "[", "value", "]", "if", "(", "value", "in", "__pillar__", ")", ":", "return", "__pillar__", "[", "value", "]" ]
look up the value for an option .
train
true
55,140
def socket_pair(): port = socket() port.bind(('', 0)) port.listen(1) client = socket() client.setblocking(False) client.connect_ex(('127.0.0.1', port.getsockname()[1])) client.setblocking(True) server = port.accept()[0] server.send(b('x')) assert (client.recv(1024) == b('x')) client.send(b('y')) assert (server.recv(1024) == b('y')) server.setblocking(False) client.setblocking(False) return (server, client)
[ "def", "socket_pair", "(", ")", ":", "port", "=", "socket", "(", ")", "port", ".", "bind", "(", "(", "''", ",", "0", ")", ")", "port", ".", "listen", "(", "1", ")", "client", "=", "socket", "(", ")", "client", ".", "setblocking", "(", "False", ")", "client", ".", "connect_ex", "(", "(", "'127.0.0.1'", ",", "port", ".", "getsockname", "(", ")", "[", "1", "]", ")", ")", "client", ".", "setblocking", "(", "True", ")", "server", "=", "port", ".", "accept", "(", ")", "[", "0", "]", "server", ".", "send", "(", "b", "(", "'x'", ")", ")", "assert", "(", "client", ".", "recv", "(", "1024", ")", "==", "b", "(", "'x'", ")", ")", "client", ".", "send", "(", "b", "(", "'y'", ")", ")", "assert", "(", "server", ".", "recv", "(", "1024", ")", "==", "b", "(", "'y'", ")", ")", "server", ".", "setblocking", "(", "False", ")", "client", ".", "setblocking", "(", "False", ")", "return", "(", "server", ",", "client", ")" ]
establish and return a pair of network sockets connected to each other .
train
false
55,141
def cbServerGreeting(proto, username, password): tp = TrivialPrompter() stdio.StandardIO(tp) proto.prompt = tp.prompt proto.display = tp.display return proto.authenticate(password).addCallback(cbAuthentication, proto).addErrback(ebAuthentication, proto, username, password)
[ "def", "cbServerGreeting", "(", "proto", ",", "username", ",", "password", ")", ":", "tp", "=", "TrivialPrompter", "(", ")", "stdio", ".", "StandardIO", "(", "tp", ")", "proto", ".", "prompt", "=", "tp", ".", "prompt", "proto", ".", "display", "=", "tp", ".", "display", "return", "proto", ".", "authenticate", "(", "password", ")", ".", "addCallback", "(", "cbAuthentication", ",", "proto", ")", ".", "addErrback", "(", "ebAuthentication", ",", "proto", ",", "username", ",", "password", ")" ]
initial callback - invoked after the server sends us its greet message .
train
false
55,142
def Ql(filter_, thing): res = Q(filter_, thing) if isinstance(filter_, type({})): for k in res: res[k] = list(res[k]) return res else: return list(res)
[ "def", "Ql", "(", "filter_", ",", "thing", ")", ":", "res", "=", "Q", "(", "filter_", ",", "thing", ")", "if", "isinstance", "(", "filter_", ",", "type", "(", "{", "}", ")", ")", ":", "for", "k", "in", "res", ":", "res", "[", "k", "]", "=", "list", "(", "res", "[", "k", "]", ")", "return", "res", "else", ":", "return", "list", "(", "res", ")" ]
same as q .
train
false
55,143
def check_complete(task, out_queue): logger.debug('Checking if %s is complete', task) try: is_complete = task.complete() except Exception: is_complete = TracebackWrapper(traceback.format_exc()) out_queue.put((task, is_complete))
[ "def", "check_complete", "(", "task", ",", "out_queue", ")", ":", "logger", ".", "debug", "(", "'Checking if %s is complete'", ",", "task", ")", "try", ":", "is_complete", "=", "task", ".", "complete", "(", ")", "except", "Exception", ":", "is_complete", "=", "TracebackWrapper", "(", "traceback", ".", "format_exc", "(", ")", ")", "out_queue", ".", "put", "(", "(", "task", ",", "is_complete", ")", ")" ]
checks if task is complete .
train
true
55,145
def create_test_db(): from inbox.config import config database_hosts = config.get_required('DATABASE_HOSTS') schemas = [(shard['SCHEMA_NAME'], host['HOSTNAME']) for host in database_hosts for shard in host['SHARDS']] assert all([('test' in s) for (s, h) in schemas]) for (name, host) in schemas: cmd = 'DROP DATABASE IF EXISTS {name}; CREATE DATABASE IF NOT EXISTS {name} DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE utf8mb4_general_ci'.format(name=name) subprocess.check_call('mysql -h {} -uinboxtest -pinboxtest -e "{}"'.format(host, cmd), shell=True)
[ "def", "create_test_db", "(", ")", ":", "from", "inbox", ".", "config", "import", "config", "database_hosts", "=", "config", ".", "get_required", "(", "'DATABASE_HOSTS'", ")", "schemas", "=", "[", "(", "shard", "[", "'SCHEMA_NAME'", "]", ",", "host", "[", "'HOSTNAME'", "]", ")", "for", "host", "in", "database_hosts", "for", "shard", "in", "host", "[", "'SHARDS'", "]", "]", "assert", "all", "(", "[", "(", "'test'", "in", "s", ")", "for", "(", "s", ",", "h", ")", "in", "schemas", "]", ")", "for", "(", "name", ",", "host", ")", "in", "schemas", ":", "cmd", "=", "'DROP DATABASE IF EXISTS {name}; CREATE DATABASE IF NOT EXISTS {name} DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE utf8mb4_general_ci'", ".", "format", "(", "name", "=", "name", ")", "subprocess", ".", "check_call", "(", "'mysql -h {} -uinboxtest -pinboxtest -e \"{}\"'", ".", "format", "(", "host", ",", "cmd", ")", ",", "shell", "=", "True", ")" ]
creates new .
train
false
55,146
def _unpickle_appattr(reverse_name, args): return get_current_app()._rgetattr(reverse_name)(*args)
[ "def", "_unpickle_appattr", "(", "reverse_name", ",", "args", ")", ":", "return", "get_current_app", "(", ")", ".", "_rgetattr", "(", "reverse_name", ")", "(", "*", "args", ")" ]
unpickle app .
train
false
55,147
def is_mobile_available_for_user(user, descriptor): return (auth.user_has_role(user, CourseBetaTesterRole(descriptor.id)) or _has_staff_access_to_descriptor(user, descriptor, descriptor.id) or _is_descriptor_mobile_available(descriptor))
[ "def", "is_mobile_available_for_user", "(", "user", ",", "descriptor", ")", ":", "return", "(", "auth", ".", "user_has_role", "(", "user", ",", "CourseBetaTesterRole", "(", "descriptor", ".", "id", ")", ")", "or", "_has_staff_access_to_descriptor", "(", "user", ",", "descriptor", ",", "descriptor", ".", "id", ")", "or", "_is_descriptor_mobile_available", "(", "descriptor", ")", ")" ]
returns whether the given course is mobile_available for the given user .
train
false
55,148
@conf.commands.register def sr1(x, promisc=None, filter=None, iface=None, nofilter=0, *args, **kargs): if (not kargs.has_key('timeout')): kargs['timeout'] = (-1) s = conf.L3socket(promisc=promisc, filter=filter, nofilter=nofilter, iface=iface) (a, b) = sndrcv(s, x, *args, **kargs) s.close() if (len(a) > 0): return a[0][1] else: return None
[ "@", "conf", ".", "commands", ".", "register", "def", "sr1", "(", "x", ",", "promisc", "=", "None", ",", "filter", "=", "None", ",", "iface", "=", "None", ",", "nofilter", "=", "0", ",", "*", "args", ",", "**", "kargs", ")", ":", "if", "(", "not", "kargs", ".", "has_key", "(", "'timeout'", ")", ")", ":", "kargs", "[", "'timeout'", "]", "=", "(", "-", "1", ")", "s", "=", "conf", ".", "L3socket", "(", "promisc", "=", "promisc", ",", "filter", "=", "filter", ",", "nofilter", "=", "nofilter", ",", "iface", "=", "iface", ")", "(", "a", ",", "b", ")", "=", "sndrcv", "(", "s", ",", "x", ",", "*", "args", ",", "**", "kargs", ")", "s", ".", "close", "(", ")", "if", "(", "len", "(", "a", ")", ">", "0", ")", ":", "return", "a", "[", "0", "]", "[", "1", "]", "else", ":", "return", "None" ]
send packets at layer 3 and return only the first answer nofilter: put 1 to avoid use of bpf filters retry: if positive .
train
true
55,149
def length_is(value, arg): return (len(value) == int(arg))
[ "def", "length_is", "(", "value", ",", "arg", ")", ":", "return", "(", "len", "(", "value", ")", "==", "int", "(", "arg", ")", ")" ]
returns a boolean of whether the values length is the argument .
train
false
55,151
def base64_b64encode(instr): if six.PY3: b = salt.utils.to_bytes(instr) b64 = base64.b64encode(b) return salt.utils.to_str(b64) return base64.b64encode(instr)
[ "def", "base64_b64encode", "(", "instr", ")", ":", "if", "six", ".", "PY3", ":", "b", "=", "salt", ".", "utils", ".", "to_bytes", "(", "instr", ")", "b64", "=", "base64", ".", "b64encode", "(", "b", ")", "return", "salt", ".", "utils", ".", "to_str", "(", "b64", ")", "return", "base64", ".", "b64encode", "(", "instr", ")" ]
encode a string as base64 using the "modern" python interface .
train
false
55,153
def has_default_value(option, value): return ((option in helpful_parser.defaults) and (helpful_parser.defaults[option] == value))
[ "def", "has_default_value", "(", "option", ",", "value", ")", ":", "return", "(", "(", "option", "in", "helpful_parser", ".", "defaults", ")", "and", "(", "helpful_parser", ".", "defaults", "[", "option", "]", "==", "value", ")", ")" ]
does option have the default value? if the default value of option is not known .
train
false
55,154
def rate_id(context, id_, val=3): def rate_element(driver): try: inner_wrapper = find_id_with_wait(context, id_, wait_time=2) els = inner_wrapper.find_elements_by_class_name(STAR_RATING_OPTION_CLASS) rating_el = [el for el in filter((lambda x: (int(x.get_attribute('data-val')) == val)), els)][0] rating_el.click() return True except (NoSuchElementException, StaleElementReferenceException, TimeoutException, IndexError): return False try: WebDriverWait(context.browser, 30).until(rate_element) except TimeoutException: raise Exception("Unable to enter rating for container with id '{id:s}'".format(id=id_))
[ "def", "rate_id", "(", "context", ",", "id_", ",", "val", "=", "3", ")", ":", "def", "rate_element", "(", "driver", ")", ":", "try", ":", "inner_wrapper", "=", "find_id_with_wait", "(", "context", ",", "id_", ",", "wait_time", "=", "2", ")", "els", "=", "inner_wrapper", ".", "find_elements_by_class_name", "(", "STAR_RATING_OPTION_CLASS", ")", "rating_el", "=", "[", "el", "for", "el", "in", "filter", "(", "(", "lambda", "x", ":", "(", "int", "(", "x", ".", "get_attribute", "(", "'data-val'", ")", ")", "==", "val", ")", ")", ",", "els", ")", "]", "[", "0", "]", "rating_el", ".", "click", "(", ")", "return", "True", "except", "(", "NoSuchElementException", ",", "StaleElementReferenceException", ",", "TimeoutException", ",", "IndexError", ")", ":", "return", "False", "try", ":", "WebDriverWait", "(", "context", ".", "browser", ",", "30", ")", ".", "until", "(", "rate_element", ")", "except", "TimeoutException", ":", "raise", "Exception", "(", "\"Unable to enter rating for container with id '{id:s}'\"", ".", "format", "(", "id", "=", "id_", ")", ")" ]
enter a star rating given the id of the container .
train
false
55,155
def p_expr_list_2(t): pass
[ "def", "p_expr_list_2", "(", "t", ")", ":", "pass" ]
exprlist : expression .
train
false
55,156
def p_relational_expression_5(t): pass
[ "def", "p_relational_expression_5", "(", "t", ")", ":", "pass" ]
relational_expression : relational_expression ge shift_expression .
train
false
55,157
def getProfileDirectory(): craftTypeName = getCraftTypeName() return os.path.join(craftTypeName, getProfileName(craftTypeName))
[ "def", "getProfileDirectory", "(", ")", ":", "craftTypeName", "=", "getCraftTypeName", "(", ")", "return", "os", ".", "path", ".", "join", "(", "craftTypeName", ",", "getProfileName", "(", "craftTypeName", ")", ")" ]
get the profile directory .
train
false
55,159
def nagios_from_file(results_file): data = open(results_file).read().strip() pieces = data.split('|') if (not (len(pieces) == 4)): state = 'UNKNOWN' ret = 3 data = 'Results file malformed' else: timestamp = int(pieces[0]) time_diff = (time.time() - timestamp) if (time_diff > (60 * 2)): ret = 3 state = 'UNKNOWN' data = 'Results file is stale' else: ret = int(pieces[1]) state = pieces[2] data = pieces[3] return (ret, ('%s: %s' % (state, data)))
[ "def", "nagios_from_file", "(", "results_file", ")", ":", "data", "=", "open", "(", "results_file", ")", ".", "read", "(", ")", ".", "strip", "(", ")", "pieces", "=", "data", ".", "split", "(", "'|'", ")", "if", "(", "not", "(", "len", "(", "pieces", ")", "==", "4", ")", ")", ":", "state", "=", "'UNKNOWN'", "ret", "=", "3", "data", "=", "'Results file malformed'", "else", ":", "timestamp", "=", "int", "(", "pieces", "[", "0", "]", ")", "time_diff", "=", "(", "time", ".", "time", "(", ")", "-", "timestamp", ")", "if", "(", "time_diff", ">", "(", "60", "*", "2", ")", ")", ":", "ret", "=", "3", "state", "=", "'UNKNOWN'", "data", "=", "'Results file is stale'", "else", ":", "ret", "=", "int", "(", "pieces", "[", "1", "]", ")", "state", "=", "pieces", "[", "2", "]", "data", "=", "pieces", "[", "3", "]", "return", "(", "ret", ",", "(", "'%s: %s'", "%", "(", "state", ",", "data", ")", ")", ")" ]
returns a nagios-appropriate string and return code obtained by parsing the desired file on disk .
train
false
55,160
@pytest.mark.django_db def test_max_revision(revision, project0_nongnu, store0): store0.sync() store0.update(store0.file.store) initial_max_revision = Unit.max_revision() initial_revision = Revision.get() assert (initial_max_revision == initial_revision) for i in range(10): _update_translation(store0, 0, {'target': str(i)}, sync=False) end_max_revision = Unit.max_revision() end_revision = Revision.get() assert (end_max_revision == end_revision) assert (end_max_revision != initial_max_revision) assert (end_revision != initial_revision) assert (end_revision == (10 + initial_revision))
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_max_revision", "(", "revision", ",", "project0_nongnu", ",", "store0", ")", ":", "store0", ".", "sync", "(", ")", "store0", ".", "update", "(", "store0", ".", "file", ".", "store", ")", "initial_max_revision", "=", "Unit", ".", "max_revision", "(", ")", "initial_revision", "=", "Revision", ".", "get", "(", ")", "assert", "(", "initial_max_revision", "==", "initial_revision", ")", "for", "i", "in", "range", "(", "10", ")", ":", "_update_translation", "(", "store0", ",", "0", ",", "{", "'target'", ":", "str", "(", "i", ")", "}", ",", "sync", "=", "False", ")", "end_max_revision", "=", "Unit", ".", "max_revision", "(", ")", "end_revision", "=", "Revision", ".", "get", "(", ")", "assert", "(", "end_max_revision", "==", "end_revision", ")", "assert", "(", "end_max_revision", "!=", "initial_max_revision", ")", "assert", "(", "end_revision", "!=", "initial_revision", ")", "assert", "(", "end_revision", "==", "(", "10", "+", "initial_revision", ")", ")" ]
tests max_revision() gets the latest revision .
train
false
55,161
def json_http_response(data): return JsonResponse(data)
[ "def", "json_http_response", "(", "data", ")", ":", "return", "JsonResponse", "(", "data", ")" ]
return an httpresponse with the data json-serialized and the right content type header .
train
false
55,162
def expect_mc(dist, func=(lambda x: 1), size=50000): def fun(x): return func(x) rvs = dist.rvs(size=size) return fun(rvs).mean(0)
[ "def", "expect_mc", "(", "dist", ",", "func", "=", "(", "lambda", "x", ":", "1", ")", ",", "size", "=", "50000", ")", ":", "def", "fun", "(", "x", ")", ":", "return", "func", "(", "x", ")", "rvs", "=", "dist", ".", "rvs", "(", "size", "=", "size", ")", "return", "fun", "(", "rvs", ")", ".", "mean", "(", "0", ")" ]
calculate expected value of function by monte carlo integration parameters dist : distribution instance needs to have rvs defined as a method for drawing random numbers func : callable function for which expectation is calculated .
train
false
55,163
def limitedTime(second, func, *args, **kw): return func(*args, **kw)
[ "def", "limitedTime", "(", "second", ",", "func", ",", "*", "args", ",", "**", "kw", ")", ":", "return", "func", "(", "*", "args", ",", "**", "kw", ")" ]
call func with a timeout of second seconds .
train
false
55,164
def rnn_helper(inp, length, cell_type=None, direction='forward', name=None, *args, **kwargs): assert (cell_type is not None) rnn_func = None if (cell_type == 'lstm'): rnn_func = lstm_layer assert (rnn_func is not None) assert (direction in ['forward', 'backward', 'bidirectional']) with tf.variable_scope(name): if (direction in ['forward', 'bidirectional']): forward = rnn_func(inp=inp, length=length, backward=False, name='forward', *args, **kwargs) if isinstance(forward, tuple): forward = forward[0] if (direction in ['backward', 'bidirectional']): backward = rnn_func(inp=inp, length=length, backward=True, name='backward', *args, **kwargs) if isinstance(backward, tuple): backward = backward[0] if (direction == 'forward'): out = forward elif (direction == 'backward'): out = backward else: out = tf.concat(2, [forward, backward]) return out
[ "def", "rnn_helper", "(", "inp", ",", "length", ",", "cell_type", "=", "None", ",", "direction", "=", "'forward'", ",", "name", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "assert", "(", "cell_type", "is", "not", "None", ")", "rnn_func", "=", "None", "if", "(", "cell_type", "==", "'lstm'", ")", ":", "rnn_func", "=", "lstm_layer", "assert", "(", "rnn_func", "is", "not", "None", ")", "assert", "(", "direction", "in", "[", "'forward'", ",", "'backward'", ",", "'bidirectional'", "]", ")", "with", "tf", ".", "variable_scope", "(", "name", ")", ":", "if", "(", "direction", "in", "[", "'forward'", ",", "'bidirectional'", "]", ")", ":", "forward", "=", "rnn_func", "(", "inp", "=", "inp", ",", "length", "=", "length", ",", "backward", "=", "False", ",", "name", "=", "'forward'", ",", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "forward", ",", "tuple", ")", ":", "forward", "=", "forward", "[", "0", "]", "if", "(", "direction", "in", "[", "'backward'", ",", "'bidirectional'", "]", ")", ":", "backward", "=", "rnn_func", "(", "inp", "=", "inp", ",", "length", "=", "length", ",", "backward", "=", "True", ",", "name", "=", "'backward'", ",", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "backward", ",", "tuple", ")", ":", "backward", "=", "backward", "[", "0", "]", "if", "(", "direction", "==", "'forward'", ")", ":", "out", "=", "forward", "elif", "(", "direction", "==", "'backward'", ")", ":", "out", "=", "backward", "else", ":", "out", "=", "tf", ".", "concat", "(", "2", ",", "[", "forward", ",", "backward", "]", ")", "return", "out" ]
adds ops for a recurrent neural network layer .
train
false
55,165
def scheme_node_from_element(node_el, registry): try: widget_desc = registry.widget(node_el.get('qualified_name')) except KeyError as ex: raise UnknownWidgetDefinition(*ex.args) title = node_el.get('title') pos = node_el.get('position') if (pos is not None): pos = tuple_eval(pos) return SchemeNode(widget_desc, title=title, position=pos)
[ "def", "scheme_node_from_element", "(", "node_el", ",", "registry", ")", ":", "try", ":", "widget_desc", "=", "registry", ".", "widget", "(", "node_el", ".", "get", "(", "'qualified_name'", ")", ")", "except", "KeyError", "as", "ex", ":", "raise", "UnknownWidgetDefinition", "(", "*", "ex", ".", "args", ")", "title", "=", "node_el", ".", "get", "(", "'title'", ")", "pos", "=", "node_el", ".", "get", "(", "'position'", ")", "if", "(", "pos", "is", "not", "None", ")", ":", "pos", "=", "tuple_eval", "(", "pos", ")", "return", "SchemeNode", "(", "widget_desc", ",", "title", "=", "title", ",", "position", "=", "pos", ")" ]
create a schemenode from an element instance .
train
false
55,166
def pull_dkr(url, name, index): return _pull_image('dkr', url, name, index=index)
[ "def", "pull_dkr", "(", "url", ",", "name", ",", "index", ")", ":", "return", "_pull_image", "(", "'dkr'", ",", "url", ",", "name", ",", "index", "=", "index", ")" ]
execute a machinectl pull-dkr to download a docker image and add it to /var/lib/machines as a new container .
train
false
55,167
def managedcloud(vm_): return config.get_cloud_config_value('managedcloud', vm_, __opts__, default='False', search_global=False)
[ "def", "managedcloud", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'managedcloud'", ",", "vm_", ",", "__opts__", ",", "default", "=", "'False'", ",", "search_global", "=", "False", ")" ]
determine if we should wait for the managed cloud automation before running .
train
true
55,168
def confirmation_option(*param_decls, **attrs): def decorator(f): def callback(ctx, param, value): if (not value): ctx.abort() attrs.setdefault('is_flag', True) attrs.setdefault('callback', callback) attrs.setdefault('expose_value', False) attrs.setdefault('prompt', 'Do you want to continue?') attrs.setdefault('help', 'Confirm the action without prompting.') return option(*(param_decls or ('--yes',)), **attrs)(f) return decorator
[ "def", "confirmation_option", "(", "*", "param_decls", ",", "**", "attrs", ")", ":", "def", "decorator", "(", "f", ")", ":", "def", "callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "if", "(", "not", "value", ")", ":", "ctx", ".", "abort", "(", ")", "attrs", ".", "setdefault", "(", "'is_flag'", ",", "True", ")", "attrs", ".", "setdefault", "(", "'callback'", ",", "callback", ")", "attrs", ".", "setdefault", "(", "'expose_value'", ",", "False", ")", "attrs", ".", "setdefault", "(", "'prompt'", ",", "'Do you want to continue?'", ")", "attrs", ".", "setdefault", "(", "'help'", ",", "'Confirm the action without prompting.'", ")", "return", "option", "(", "*", "(", "param_decls", "or", "(", "'--yes'", ",", ")", ")", ",", "**", "attrs", ")", "(", "f", ")", "return", "decorator" ]
shortcut for confirmation prompts that can be ignored by passing --yes as parameter .
train
true
55,169
@inspect_command(alias=u'dump_schedule') def scheduled(state, **kwargs): return list(_iter_schedule_requests(state.consumer.timer))
[ "@", "inspect_command", "(", "alias", "=", "u'dump_schedule'", ")", "def", "scheduled", "(", "state", ",", "**", "kwargs", ")", ":", "return", "list", "(", "_iter_schedule_requests", "(", "state", ".", "consumer", ".", "timer", ")", ")" ]
list of currently scheduled eta/countdown tasks .
train
false
55,170
@instrumented_task(name='sentry.tasks.post_process.plugin_post_process_group', stat_suffix=(lambda plugin_slug, *a, **k: plugin_slug)) def plugin_post_process_group(plugin_slug, event, **kwargs): Raven.tags_context({'project': event.project_id}) plugin = plugins.get(plugin_slug) safe_execute(plugin.post_process, event=event, group=event.group, **kwargs)
[ "@", "instrumented_task", "(", "name", "=", "'sentry.tasks.post_process.plugin_post_process_group'", ",", "stat_suffix", "=", "(", "lambda", "plugin_slug", ",", "*", "a", ",", "**", "k", ":", "plugin_slug", ")", ")", "def", "plugin_post_process_group", "(", "plugin_slug", ",", "event", ",", "**", "kwargs", ")", ":", "Raven", ".", "tags_context", "(", "{", "'project'", ":", "event", ".", "project_id", "}", ")", "plugin", "=", "plugins", ".", "get", "(", "plugin_slug", ")", "safe_execute", "(", "plugin", ".", "post_process", ",", "event", "=", "event", ",", "group", "=", "event", ".", "group", ",", "**", "kwargs", ")" ]
fires post processing hooks for a group .
train
false
55,171
def is_valid_asn(asn): return (isinstance(asn, numbers.Integral) and (0 <= asn <= 4294967295))
[ "def", "is_valid_asn", "(", "asn", ")", ":", "return", "(", "isinstance", "(", "asn", ",", "numbers", ".", "Integral", ")", "and", "(", "0", "<=", "asn", "<=", "4294967295", ")", ")" ]
returns true if the given as number is two or four octet .
train
false
55,172
@lockutils.synchronized('storage-registry-lock', 'nova-', external=True) def get_storage_users(storage_path): d = {} id_path = os.path.join(storage_path, 'compute_nodes') if os.path.exists(id_path): with open(id_path) as f: d = json.loads(f.read()) recent_users = [] for node in d: if ((time.time() - d[node]) < TWENTY_FOUR_HOURS): recent_users.append(node) return recent_users
[ "@", "lockutils", ".", "synchronized", "(", "'storage-registry-lock'", ",", "'nova-'", ",", "external", "=", "True", ")", "def", "get_storage_users", "(", "storage_path", ")", ":", "d", "=", "{", "}", "id_path", "=", "os", ".", "path", ".", "join", "(", "storage_path", ",", "'compute_nodes'", ")", "if", "os", ".", "path", ".", "exists", "(", "id_path", ")", ":", "with", "open", "(", "id_path", ")", "as", "f", ":", "d", "=", "json", ".", "loads", "(", "f", ".", "read", "(", ")", ")", "recent_users", "=", "[", "]", "for", "node", "in", "d", ":", "if", "(", "(", "time", ".", "time", "(", ")", "-", "d", "[", "node", "]", ")", "<", "TWENTY_FOUR_HOURS", ")", ":", "recent_users", ".", "append", "(", "node", ")", "return", "recent_users" ]
get a list of all the users of this storage path .
train
false
55,173
@event(u'manager.startup') def init_parsers(manager): for parser_type in PARSER_TYPES: parsers[parser_type] = {} for p in plugin.get_plugins(interface=(parser_type + u'_parser')): parsers[parser_type][p.name.replace(u'parser_', u'')] = p.instance func_name = (u'parse_' + parser_type) default_parsers[parser_type] = max(iter(parsers[parser_type].items()), key=(lambda p: getattr(getattr(p[1], func_name), u'priority', 0)))[0] log.debug((u'setting default %s parser to %s. (options: %s)' % (parser_type, default_parsers[parser_type], parsers[parser_type])))
[ "@", "event", "(", "u'manager.startup'", ")", "def", "init_parsers", "(", "manager", ")", ":", "for", "parser_type", "in", "PARSER_TYPES", ":", "parsers", "[", "parser_type", "]", "=", "{", "}", "for", "p", "in", "plugin", ".", "get_plugins", "(", "interface", "=", "(", "parser_type", "+", "u'_parser'", ")", ")", ":", "parsers", "[", "parser_type", "]", "[", "p", ".", "name", ".", "replace", "(", "u'parser_'", ",", "u''", ")", "]", "=", "p", ".", "instance", "func_name", "=", "(", "u'parse_'", "+", "parser_type", ")", "default_parsers", "[", "parser_type", "]", "=", "max", "(", "iter", "(", "parsers", "[", "parser_type", "]", ".", "items", "(", ")", ")", ",", "key", "=", "(", "lambda", "p", ":", "getattr", "(", "getattr", "(", "p", "[", "1", "]", ",", "func_name", ")", ",", "u'priority'", ",", "0", ")", ")", ")", "[", "0", "]", "log", ".", "debug", "(", "(", "u'setting default %s parser to %s. (options: %s)'", "%", "(", "parser_type", ",", "default_parsers", "[", "parser_type", "]", ",", "parsers", "[", "parser_type", "]", ")", ")", ")" ]
prepare our list of parsing plugins and default parsers .
train
false
55,174
def parseline(line): fields = [] (i, n) = (0, len(line)) while (i < n): (field, i) = parsefield(line, i, n) fields.append(field) i = (i + 1) if (len(fields) < 2): return (None, None) (key, view, rest) = (fields[0], fields[1], fields[2:]) fields = {'view': view} for field in rest: i = field.find('=') if (i < 0): fkey = field fvalue = '' else: fkey = field[:i].strip() fvalue = field[(i + 1):].strip() if (fkey in fields): pass else: fields[fkey] = fvalue return (key, fields)
[ "def", "parseline", "(", "line", ")", ":", "fields", "=", "[", "]", "(", "i", ",", "n", ")", "=", "(", "0", ",", "len", "(", "line", ")", ")", "while", "(", "i", "<", "n", ")", ":", "(", "field", ",", "i", ")", "=", "parsefield", "(", "line", ",", "i", ",", "n", ")", "fields", ".", "append", "(", "field", ")", "i", "=", "(", "i", "+", "1", ")", "if", "(", "len", "(", "fields", ")", "<", "2", ")", ":", "return", "(", "None", ",", "None", ")", "(", "key", ",", "view", ",", "rest", ")", "=", "(", "fields", "[", "0", "]", ",", "fields", "[", "1", "]", ",", "fields", "[", "2", ":", "]", ")", "fields", "=", "{", "'view'", ":", "view", "}", "for", "field", "in", "rest", ":", "i", "=", "field", ".", "find", "(", "'='", ")", "if", "(", "i", "<", "0", ")", ":", "fkey", "=", "field", "fvalue", "=", "''", "else", ":", "fkey", "=", "field", "[", ":", "i", "]", ".", "strip", "(", ")", "fvalue", "=", "field", "[", "(", "i", "+", "1", ")", ":", "]", ".", "strip", "(", ")", "if", "(", "fkey", "in", "fields", ")", ":", "pass", "else", ":", "fields", "[", "fkey", "]", "=", "fvalue", "return", "(", "key", ",", "fields", ")" ]
parse one entry in a mailcap file and return a dictionary .
train
false
55,176
def TRANGE(barDs, count): return call_talib_with_hlc(barDs, count, talib.TRANGE)
[ "def", "TRANGE", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_hlc", "(", "barDs", ",", "count", ",", "talib", ".", "TRANGE", ")" ]
true range .
train
false
55,177
def valid_year(year): return (1920 <= year < 2030)
[ "def", "valid_year", "(", "year", ")", ":", "return", "(", "1920", "<=", "year", "<", "2030", ")" ]
check if number is a valid year .
train
false
55,178
def orchestrate_high(data, test=None, queue=False, pillar=None, **kwargs): if ((pillar is not None) and (not isinstance(pillar, dict))): raise SaltInvocationError('Pillar data must be formatted as a dictionary') __opts__['file_client'] = 'local' minion = salt.minion.MasterMinion(__opts__) running = minion.functions['state.high'](data, test=None, queue=False, pillar=pillar, **kwargs) ret = {minion.opts['id']: running} __jid_event__.fire_event({'data': ret, 'outputter': 'highstate'}, 'progress') return ret
[ "def", "orchestrate_high", "(", "data", ",", "test", "=", "None", ",", "queue", "=", "False", ",", "pillar", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "(", "pillar", "is", "not", "None", ")", "and", "(", "not", "isinstance", "(", "pillar", ",", "dict", ")", ")", ")", ":", "raise", "SaltInvocationError", "(", "'Pillar data must be formatted as a dictionary'", ")", "__opts__", "[", "'file_client'", "]", "=", "'local'", "minion", "=", "salt", ".", "minion", ".", "MasterMinion", "(", "__opts__", ")", "running", "=", "minion", ".", "functions", "[", "'state.high'", "]", "(", "data", ",", "test", "=", "None", ",", "queue", "=", "False", ",", "pillar", "=", "pillar", ",", "**", "kwargs", ")", "ret", "=", "{", "minion", ".", "opts", "[", "'id'", "]", ":", "running", "}", "__jid_event__", ".", "fire_event", "(", "{", "'data'", ":", "ret", ",", "'outputter'", ":", "'highstate'", "}", ",", "'progress'", ")", "return", "ret" ]
execute a single state orchestration routine .
train
true
55,179
def getAlterationLines(fileName): return archive.getTextLines(getAlterationFile(fileName))
[ "def", "getAlterationLines", "(", "fileName", ")", ":", "return", "archive", ".", "getTextLines", "(", "getAlterationFile", "(", "fileName", ")", ")" ]
get the text lines from the filename in the alterations directories .
train
false
55,180
def isFileLocked(checkfile, writeLockCheck=False): checkfile = ek(os.path.abspath, checkfile) if (not ek(os.path.exists, checkfile)): return True try: f = ek(io.open, checkfile, u'rb') f.close() except IOError: return True if writeLockCheck: lockFile = (checkfile + u'.lckchk') if ek(os.path.exists, lockFile): ek(os.remove, lockFile) try: ek(os.rename, checkfile, lockFile) time.sleep(1) ek(os.rename, lockFile, checkfile) except (OSError, IOError): return True return False
[ "def", "isFileLocked", "(", "checkfile", ",", "writeLockCheck", "=", "False", ")", ":", "checkfile", "=", "ek", "(", "os", ".", "path", ".", "abspath", ",", "checkfile", ")", "if", "(", "not", "ek", "(", "os", ".", "path", ".", "exists", ",", "checkfile", ")", ")", ":", "return", "True", "try", ":", "f", "=", "ek", "(", "io", ".", "open", ",", "checkfile", ",", "u'rb'", ")", "f", ".", "close", "(", ")", "except", "IOError", ":", "return", "True", "if", "writeLockCheck", ":", "lockFile", "=", "(", "checkfile", "+", "u'.lckchk'", ")", "if", "ek", "(", "os", ".", "path", ".", "exists", ",", "lockFile", ")", ":", "ek", "(", "os", ".", "remove", ",", "lockFile", ")", "try", ":", "ek", "(", "os", ".", "rename", ",", "checkfile", ",", "lockFile", ")", "time", ".", "sleep", "(", "1", ")", "ek", "(", "os", ".", "rename", ",", "lockFile", ",", "checkfile", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "True", "return", "False" ]
checks to see if a file is locked .
train
false
55,181
def build_pdf(branch): os.chdir(os.path.join(gitdname, 'statsmodels', 'docs')) sphinx_dir = os.path.join(virtual_dir, 'bin') retcode = subprocess.call(' '.join(['make', 'latexpdf', (('SPHINXBUILD=' + sphinx_dir) + '/sphinx-build')]), shell=True) if (retcode != 0): msg = ('Could not build the pdf docs for branch %s' % branch) raise Exception(msg) os.chdir(dname)
[ "def", "build_pdf", "(", "branch", ")", ":", "os", ".", "chdir", "(", "os", ".", "path", ".", "join", "(", "gitdname", ",", "'statsmodels'", ",", "'docs'", ")", ")", "sphinx_dir", "=", "os", ".", "path", ".", "join", "(", "virtual_dir", ",", "'bin'", ")", "retcode", "=", "subprocess", ".", "call", "(", "' '", ".", "join", "(", "[", "'make'", ",", "'latexpdf'", ",", "(", "(", "'SPHINXBUILD='", "+", "sphinx_dir", ")", "+", "'/sphinx-build'", ")", "]", ")", ",", "shell", "=", "True", ")", "if", "(", "retcode", "!=", "0", ")", ":", "msg", "=", "(", "'Could not build the pdf docs for branch %s'", "%", "branch", ")", "raise", "Exception", "(", "msg", ")", "os", ".", "chdir", "(", "dname", ")" ]
changes into new_branch_dir and builds the docs using sphinx in the buildenv virtualenv .
train
false
55,182
def attach_ordered_steps(workflow, steps): ordered_steps = order_workflow_steps(steps) workflow.has_cycles = (not bool(ordered_steps)) for (i, step) in enumerate((ordered_steps or steps)): step.order_index = i workflow.steps.append(step)
[ "def", "attach_ordered_steps", "(", "workflow", ",", "steps", ")", ":", "ordered_steps", "=", "order_workflow_steps", "(", "steps", ")", "workflow", ".", "has_cycles", "=", "(", "not", "bool", "(", "ordered_steps", ")", ")", "for", "(", "i", ",", "step", ")", "in", "enumerate", "(", "(", "ordered_steps", "or", "steps", ")", ")", ":", "step", ".", "order_index", "=", "i", "workflow", ".", "steps", ".", "append", "(", "step", ")" ]
attempt to topologically order steps and attach to workflow .
train
false
55,183
def test_scenario_has_name(): scenario = Scenario.from_string(SCENARIO1) assert isinstance(scenario, Scenario) assert_equals(scenario.name, 'Adding some students to my university database')
[ "def", "test_scenario_has_name", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "SCENARIO1", ")", "assert", "isinstance", "(", "scenario", ",", "Scenario", ")", "assert_equals", "(", "scenario", ".", "name", ",", "'Adding some students to my university database'", ")" ]
it should extract the name of the scenario .
train
false
55,184
def quote_chinese(url, encodeing='utf-8'): if isinstance(url, six.text_type): return quote_chinese(url.encode(encodeing)) if six.PY3: res = [(six.int2byte(b).decode('latin-1') if (b < 128) else ('%%%02X' % b)) for b in url] else: res = [(b if (ord(b) < 128) else ('%%%02X' % ord(b))) for b in url] return ''.join(res)
[ "def", "quote_chinese", "(", "url", ",", "encodeing", "=", "'utf-8'", ")", ":", "if", "isinstance", "(", "url", ",", "six", ".", "text_type", ")", ":", "return", "quote_chinese", "(", "url", ".", "encode", "(", "encodeing", ")", ")", "if", "six", ".", "PY3", ":", "res", "=", "[", "(", "six", ".", "int2byte", "(", "b", ")", ".", "decode", "(", "'latin-1'", ")", "if", "(", "b", "<", "128", ")", "else", "(", "'%%%02X'", "%", "b", ")", ")", "for", "b", "in", "url", "]", "else", ":", "res", "=", "[", "(", "b", "if", "(", "ord", "(", "b", ")", "<", "128", ")", "else", "(", "'%%%02X'", "%", "ord", "(", "b", ")", ")", ")", "for", "b", "in", "url", "]", "return", "''", ".", "join", "(", "res", ")" ]
quote non-ascii characters .
train
true
55,185
def order_by_precedence(media_type_lst): ret = [set(), set(), set(), set()] for media_type in media_type_lst: precedence = _MediaType(media_type).precedence ret[(3 - precedence)].add(media_type) return [media_types for media_types in ret if media_types]
[ "def", "order_by_precedence", "(", "media_type_lst", ")", ":", "ret", "=", "[", "set", "(", ")", ",", "set", "(", ")", ",", "set", "(", ")", ",", "set", "(", ")", "]", "for", "media_type", "in", "media_type_lst", ":", "precedence", "=", "_MediaType", "(", "media_type", ")", ".", "precedence", "ret", "[", "(", "3", "-", "precedence", ")", "]", ".", "add", "(", "media_type", ")", "return", "[", "media_types", "for", "media_types", "in", "ret", "if", "media_types", "]" ]
returns a list of sets of media type strings .
train
true
55,186
def filter_label_1(context, label): return False
[ "def", "filter_label_1", "(", "context", ",", "label", ")", ":", "return", "False" ]
test filter label 1 .
train
false
55,188
def assert_samelines(testcase, text1, text2, msg=None): testcase.assertEqual(text1.splitlines(), text2.splitlines(), msg)
[ "def", "assert_samelines", "(", "testcase", ",", "text1", ",", "text2", ",", "msg", "=", "None", ")", ":", "testcase", ".", "assertEqual", "(", "text1", ".", "splitlines", "(", ")", ",", "text2", ".", "splitlines", "(", ")", ",", "msg", ")" ]
asserts text1 and text2 have the same lines .
train
false
55,189
def is_sorted(exp): return _contains(exp, Sorted)
[ "def", "is_sorted", "(", "exp", ")", ":", "return", "_contains", "(", "exp", ",", "Sorted", ")" ]
does exp contain a reduced node .
train
false
55,190
def extra_job_filters(not_yet_run=False, running=False, finished=False): assert (not ((not_yet_run and running) or (not_yet_run and finished) or (running and finished))), 'Cannot specify more than one filter to this function' not_queued = ('(SELECT job_id FROM afe_host_queue_entries WHERE status != "%s")' % models.HostQueueEntry.Status.QUEUED) not_finished = '(SELECT job_id FROM afe_host_queue_entries WHERE not complete)' if not_yet_run: where = [('id NOT IN ' + not_queued)] elif running: where = [('(id IN %s) AND (id IN %s)' % (not_queued, not_finished))] elif finished: where = [('id NOT IN ' + not_finished)] else: return {} return {'where': where}
[ "def", "extra_job_filters", "(", "not_yet_run", "=", "False", ",", "running", "=", "False", ",", "finished", "=", "False", ")", ":", "assert", "(", "not", "(", "(", "not_yet_run", "and", "running", ")", "or", "(", "not_yet_run", "and", "finished", ")", "or", "(", "running", "and", "finished", ")", ")", ")", ",", "'Cannot specify more than one filter to this function'", "not_queued", "=", "(", "'(SELECT job_id FROM afe_host_queue_entries WHERE status != \"%s\")'", "%", "models", ".", "HostQueueEntry", ".", "Status", ".", "QUEUED", ")", "not_finished", "=", "'(SELECT job_id FROM afe_host_queue_entries WHERE not complete)'", "if", "not_yet_run", ":", "where", "=", "[", "(", "'id NOT IN '", "+", "not_queued", ")", "]", "elif", "running", ":", "where", "=", "[", "(", "'(id IN %s) AND (id IN %s)'", "%", "(", "not_queued", ",", "not_finished", ")", ")", "]", "elif", "finished", ":", "where", "=", "[", "(", "'id NOT IN '", "+", "not_finished", ")", "]", "else", ":", "return", "{", "}", "return", "{", "'where'", ":", "where", "}" ]
generate a sql where clause for job status filtering .
train
false
55,191
def not_friends(user, other_user): return all((not_the_same(friend, other_user) for friend in user['friends']))
[ "def", "not_friends", "(", "user", ",", "other_user", ")", ":", "return", "all", "(", "(", "not_the_same", "(", "friend", ",", "other_user", ")", "for", "friend", "in", "user", "[", "'friends'", "]", ")", ")" ]
other_user is not a friend if hes not in user["friends"]; that is .
train
false
55,192
def _LC(f): ring = f.ring k = ring.ngens yring = ring.clone(symbols=ring.symbols[(k - 1)]) y = yring.gens[0] degf = _deg(f) lcf = yring.zero for (monom, coeff) in f.iterterms(): if (monom[:(-1)] == degf): lcf += (coeff * (y ** monom[(-1)])) return lcf
[ "def", "_LC", "(", "f", ")", ":", "ring", "=", "f", ".", "ring", "k", "=", "ring", ".", "ngens", "yring", "=", "ring", ".", "clone", "(", "symbols", "=", "ring", ".", "symbols", "[", "(", "k", "-", "1", ")", "]", ")", "y", "=", "yring", ".", "gens", "[", "0", "]", "degf", "=", "_deg", "(", "f", ")", "lcf", "=", "yring", ".", "zero", "for", "(", "monom", ",", "coeff", ")", "in", "f", ".", "iterterms", "(", ")", ":", "if", "(", "monom", "[", ":", "(", "-", "1", ")", "]", "==", "degf", ")", ":", "lcf", "+=", "(", "coeff", "*", "(", "y", "**", "monom", "[", "(", "-", "1", ")", "]", ")", ")", "return", "lcf" ]
compute the leading coefficient of a multivariate polynomial f in k[x_0 .
train
false
55,193
def initialize_plugin(pelican_obj): if (_MAIN_SETTINGS is None): initialize_dbs(pelican_obj.settings) subscribe_filter_to_signals(pelican_obj.settings)
[ "def", "initialize_plugin", "(", "pelican_obj", ")", ":", "if", "(", "_MAIN_SETTINGS", "is", "None", ")", ":", "initialize_dbs", "(", "pelican_obj", ".", "settings", ")", "subscribe_filter_to_signals", "(", "pelican_obj", ".", "settings", ")" ]
initialize plugin variables and pelican settings .
train
false
55,194
def shutdown_datastore(): logging.info('Shutting down Cassandra.') monit_interface.stop(cassandra_interface.CASSANDRA_MONIT_WATCH_NAME, is_group=False) logging.warning('Done!') return True
[ "def", "shutdown_datastore", "(", ")", ":", "logging", ".", "info", "(", "'Shutting down Cassandra.'", ")", "monit_interface", ".", "stop", "(", "cassandra_interface", ".", "CASSANDRA_MONIT_WATCH_NAME", ",", "is_group", "=", "False", ")", "logging", ".", "warning", "(", "'Done!'", ")", "return", "True" ]
top level function for bringing down cassandra .
train
false
55,196
def inherit_from_std_ex(node): if ((node.name in ('Exception', 'BaseException')) and (node.root().name == EXCEPTIONS_MODULE)): return True return any((inherit_from_std_ex(parent) for parent in node.ancestors(recurs=False)))
[ "def", "inherit_from_std_ex", "(", "node", ")", ":", "if", "(", "(", "node", ".", "name", "in", "(", "'Exception'", ",", "'BaseException'", ")", ")", "and", "(", "node", ".", "root", "(", ")", ".", "name", "==", "EXCEPTIONS_MODULE", ")", ")", ":", "return", "True", "return", "any", "(", "(", "inherit_from_std_ex", "(", "parent", ")", "for", "parent", "in", "node", ".", "ancestors", "(", "recurs", "=", "False", ")", ")", ")" ]
return true if the given class node is subclass of exceptions .
train
false
55,197
def is_python_proxy(parameter): try: is_proxy = (len(parameter.Java.implements) > 0) except Exception: is_proxy = False return is_proxy
[ "def", "is_python_proxy", "(", "parameter", ")", ":", "try", ":", "is_proxy", "=", "(", "len", "(", "parameter", ".", "Java", ".", "implements", ")", ">", "0", ")", "except", "Exception", ":", "is_proxy", "=", "False", "return", "is_proxy" ]
determines whether parameter is a python proxy .
train
false