id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
43,353
@receiver(post_save, sender=StockRecord) def update_stock_alerts(sender, instance, created, **kwargs): if (created or kwargs.get('raw', False)): return stockrecord = instance try: alert = StockAlert.objects.get(stockrecord=stockrecord, status=StockAlert.OPEN) except StockAlert.DoesNotExist: alert = None if (stockrecord.is_below_threshold and (not alert)): StockAlert.objects.create(stockrecord=stockrecord, threshold=stockrecord.low_stock_threshold) elif ((not stockrecord.is_below_threshold) and alert): alert.close()
[ "@", "receiver", "(", "post_save", ",", "sender", "=", "StockRecord", ")", "def", "update_stock_alerts", "(", "sender", ",", "instance", ",", "created", ",", "**", "kwargs", ")", ":", "if", "(", "created", "or", "kwargs", ".", "get", "(", "'raw'", ",", "False", ")", ")", ":", "return", "stockrecord", "=", "instance", "try", ":", "alert", "=", "StockAlert", ".", "objects", ".", "get", "(", "stockrecord", "=", "stockrecord", ",", "status", "=", "StockAlert", ".", "OPEN", ")", "except", "StockAlert", ".", "DoesNotExist", ":", "alert", "=", "None", "if", "(", "stockrecord", ".", "is_below_threshold", "and", "(", "not", "alert", ")", ")", ":", "StockAlert", ".", "objects", ".", "create", "(", "stockrecord", "=", "stockrecord", ",", "threshold", "=", "stockrecord", ".", "low_stock_threshold", ")", "elif", "(", "(", "not", "stockrecord", ".", "is_below_threshold", ")", "and", "alert", ")", ":", "alert", ".", "close", "(", ")" ]
update low-stock alerts .
train
false
43,354
def libvlc_media_list_unlock(p_ml): f = (_Cfunctions.get('libvlc_media_list_unlock', None) or _Cfunction('libvlc_media_list_unlock', ((1,),), None, None, MediaList)) return f(p_ml)
[ "def", "libvlc_media_list_unlock", "(", "p_ml", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_unlock'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_unlock'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaList", ")", ")", "return", "f", "(", "p_ml", ")" ]
release lock on media list items the l{libvlc_media_list_lock} should be held upon entering this function .
train
false
43,355
def fetch_pack_index(index_url=None, logger=None, allow_empty=False): logger = (logger or LOG) index_urls = _build_index_list(index_url) (index, status) = _fetch_and_compile_index(index_urls, logger) if ((not index) and (not allow_empty)): raise ValueError(('No results from the %s: tried %s.\nStatus: %s' % (('index' if (len(index_urls) == 1) else 'indexes'), ', '.join(index_urls), json.dumps(status, indent=4)))) return (index, status)
[ "def", "fetch_pack_index", "(", "index_url", "=", "None", ",", "logger", "=", "None", ",", "allow_empty", "=", "False", ")", ":", "logger", "=", "(", "logger", "or", "LOG", ")", "index_urls", "=", "_build_index_list", "(", "index_url", ")", "(", "index", ",", "status", ")", "=", "_fetch_and_compile_index", "(", "index_urls", ",", "logger", ")", "if", "(", "(", "not", "index", ")", "and", "(", "not", "allow_empty", ")", ")", ":", "raise", "ValueError", "(", "(", "'No results from the %s: tried %s.\\nStatus: %s'", "%", "(", "(", "'index'", "if", "(", "len", "(", "index_urls", ")", "==", "1", ")", "else", "'indexes'", ")", ",", "', '", ".", "join", "(", "index_urls", ")", ",", "json", ".", "dumps", "(", "status", ",", "indent", "=", "4", ")", ")", ")", ")", "return", "(", "index", ",", "status", ")" ]
fetch the pack indexes and return the object .
train
false
43,356
def cuda_error(): return None
[ "def", "cuda_error", "(", ")", ":", "return", "None" ]
returns none or an exception if the cuda driver fails to initialize .
train
false
43,357
def message_from_string(s, *args, **kws): from email.parser import Parser return Parser(*args, **kws).parsestr(s)
[ "def", "message_from_string", "(", "s", ",", "*", "args", ",", "**", "kws", ")", ":", "from", "email", ".", "parser", "import", "Parser", "return", "Parser", "(", "*", "args", ",", "**", "kws", ")", ".", "parsestr", "(", "s", ")" ]
parse a string into a message object model .
train
true
43,358
def suggestDType(x): if (isinstance(x, list) or isinstance(x, tuple)): if (len(x) == 0): raise Exception('can not determine dtype for empty list') x = x[0] if hasattr(x, 'dtype'): return x.dtype elif isinstance(x, float): return float elif isinstance(x, int): return int else: return object
[ "def", "suggestDType", "(", "x", ")", ":", "if", "(", "isinstance", "(", "x", ",", "list", ")", "or", "isinstance", "(", "x", ",", "tuple", ")", ")", ":", "if", "(", "len", "(", "x", ")", "==", "0", ")", ":", "raise", "Exception", "(", "'can not determine dtype for empty list'", ")", "x", "=", "x", "[", "0", "]", "if", "hasattr", "(", "x", ",", "'dtype'", ")", ":", "return", "x", ".", "dtype", "elif", "isinstance", "(", "x", ",", "float", ")", ":", "return", "float", "elif", "isinstance", "(", "x", ",", "int", ")", ":", "return", "int", "else", ":", "return", "object" ]
return a suitable dtype for x .
train
false
43,359
def _find_subnets(subnet_name=None, vpc_id=None, cidr=None, tags=None, conn=None): if (not any(subnet_name, tags, cidr)): raise SaltInvocationError('At least one of the following must be specified: subnet_name, cidr or tags.') filter_parameters = {'filters': {}} if cidr: filter_parameters['filters']['cidr'] = cidr if subnet_name: filter_parameters['filters']['tag:Name'] = subnet_name if vpc_id: filter_parameters['filters']['VpcId'] = vpc_id if tags: for (tag_name, tag_value) in six.iteritems(tags): filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value subnets = conn.get_all_subnets(**filter_parameters) log.debug('The filters criteria {0} matched the following subnets: {1}'.format(filter_parameters, subnets)) if subnets: return [subnet.id for subnet in subnets] else: return False
[ "def", "_find_subnets", "(", "subnet_name", "=", "None", ",", "vpc_id", "=", "None", ",", "cidr", "=", "None", ",", "tags", "=", "None", ",", "conn", "=", "None", ")", ":", "if", "(", "not", "any", "(", "subnet_name", ",", "tags", ",", "cidr", ")", ")", ":", "raise", "SaltInvocationError", "(", "'At least one of the following must be specified: subnet_name, cidr or tags.'", ")", "filter_parameters", "=", "{", "'filters'", ":", "{", "}", "}", "if", "cidr", ":", "filter_parameters", "[", "'filters'", "]", "[", "'cidr'", "]", "=", "cidr", "if", "subnet_name", ":", "filter_parameters", "[", "'filters'", "]", "[", "'tag:Name'", "]", "=", "subnet_name", "if", "vpc_id", ":", "filter_parameters", "[", "'filters'", "]", "[", "'VpcId'", "]", "=", "vpc_id", "if", "tags", ":", "for", "(", "tag_name", ",", "tag_value", ")", "in", "six", ".", "iteritems", "(", "tags", ")", ":", "filter_parameters", "[", "'filters'", "]", "[", "'tag:{0}'", ".", "format", "(", "tag_name", ")", "]", "=", "tag_value", "subnets", "=", "conn", ".", "get_all_subnets", "(", "**", "filter_parameters", ")", "log", ".", "debug", "(", "'The filters criteria {0} matched the following subnets: {1}'", ".", "format", "(", "filter_parameters", ",", "subnets", ")", ")", "if", "subnets", ":", "return", "[", "subnet", ".", "id", "for", "subnet", "in", "subnets", "]", "else", ":", "return", "False" ]
given subnet properties .
train
true
43,360
def track_created_event(request, event_name, course, obj, data): if (len(obj.body) > TRACKING_MAX_FORUM_BODY): data['truncated'] = True else: data['truncated'] = False data['body'] = obj.body[:TRACKING_MAX_FORUM_BODY] track_forum_event(request, event_name, course, obj, data)
[ "def", "track_created_event", "(", "request", ",", "event_name", ",", "course", ",", "obj", ",", "data", ")", ":", "if", "(", "len", "(", "obj", ".", "body", ")", ">", "TRACKING_MAX_FORUM_BODY", ")", ":", "data", "[", "'truncated'", "]", "=", "True", "else", ":", "data", "[", "'truncated'", "]", "=", "False", "data", "[", "'body'", "]", "=", "obj", ".", "body", "[", ":", "TRACKING_MAX_FORUM_BODY", "]", "track_forum_event", "(", "request", ",", "event_name", ",", "course", ",", "obj", ",", "data", ")" ]
send analytics event for a newly created thread .
train
false
43,361
def updateStimText(stim, text=None): stime = (core.getTime() * 1000.0) if text: stim.setText(text) stim.draw() gl.glFinish() etime = (core.getTime() * 1000.0) return (etime - stime)
[ "def", "updateStimText", "(", "stim", ",", "text", "=", "None", ")", ":", "stime", "=", "(", "core", ".", "getTime", "(", ")", "*", "1000.0", ")", "if", "text", ":", "stim", ".", "setText", "(", "text", ")", "stim", ".", "draw", "(", ")", "gl", ".", "glFinish", "(", ")", "etime", "=", "(", "core", ".", "getTime", "(", ")", "*", "1000.0", ")", "return", "(", "etime", "-", "stime", ")" ]
function used by all text stim types for redrawing the stim .
train
false
43,362
def _sed_esc(string, escape_all=False): special_chars = '^.[$()|*+?{' string = string.replace("'", '\'"\'"\'').replace('/', '\\/') if (escape_all is True): for char in special_chars: string = string.replace(char, ('\\' + char)) return string
[ "def", "_sed_esc", "(", "string", ",", "escape_all", "=", "False", ")", ":", "special_chars", "=", "'^.[$()|*+?{'", "string", "=", "string", ".", "replace", "(", "\"'\"", ",", "'\\'\"\\'\"\\''", ")", ".", "replace", "(", "'/'", ",", "'\\\\/'", ")", "if", "(", "escape_all", "is", "True", ")", ":", "for", "char", "in", "special_chars", ":", "string", "=", "string", ".", "replace", "(", "char", ",", "(", "'\\\\'", "+", "char", ")", ")", "return", "string" ]
escape single quotes and forward slashes .
train
true
43,363
def getWord(form, pos='noun'): return _dictionaryFor(pos).getWord(form)
[ "def", "getWord", "(", "form", ",", "pos", "=", "'noun'", ")", ":", "return", "_dictionaryFor", "(", "pos", ")", ".", "getWord", "(", "form", ")" ]
return a word with the given lexical form and pos .
train
false
43,365
def getPointsFromFile(numPoints, file): points = [] for pointIndex in xrange(numPoints): x = getLittleEndianFloatGivenFile(file) y = getLittleEndianFloatGivenFile(file) points.append(complex(x, y)) return points
[ "def", "getPointsFromFile", "(", "numPoints", ",", "file", ")", ":", "points", "=", "[", "]", "for", "pointIndex", "in", "xrange", "(", "numPoints", ")", ":", "x", "=", "getLittleEndianFloatGivenFile", "(", "file", ")", "y", "=", "getLittleEndianFloatGivenFile", "(", "file", ")", "points", ".", "append", "(", "complex", "(", "x", ",", "y", ")", ")", "return", "points" ]
process the vertice points for a given boundary .
train
false
43,368
def get_tile(filename, coord): db = _connect(filename) db.text_factory = bytes formats = {'png': 'image/png', 'jpg': 'image/jpeg', 'json': 'application/json', None: None} format = db.execute("SELECT value FROM metadata WHERE name='format'").fetchone() format = ((format and format[0]) or None) mime_type = formats[format] tile_row = (((2 ** coord.zoom) - 1) - coord.row) q = 'SELECT tile_data FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?' content = db.execute(q, (coord.zoom, coord.column, tile_row)).fetchone() content = ((content and content[0]) or None) return (mime_type, content)
[ "def", "get_tile", "(", "filename", ",", "coord", ")", ":", "db", "=", "_connect", "(", "filename", ")", "db", ".", "text_factory", "=", "bytes", "formats", "=", "{", "'png'", ":", "'image/png'", ",", "'jpg'", ":", "'image/jpeg'", ",", "'json'", ":", "'application/json'", ",", "None", ":", "None", "}", "format", "=", "db", ".", "execute", "(", "\"SELECT value FROM metadata WHERE name='format'\"", ")", ".", "fetchone", "(", ")", "format", "=", "(", "(", "format", "and", "format", "[", "0", "]", ")", "or", "None", ")", "mime_type", "=", "formats", "[", "format", "]", "tile_row", "=", "(", "(", "(", "2", "**", "coord", ".", "zoom", ")", "-", "1", ")", "-", "coord", ".", "row", ")", "q", "=", "'SELECT tile_data FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?'", "content", "=", "db", ".", "execute", "(", "q", ",", "(", "coord", ".", "zoom", ",", "coord", ".", "column", ",", "tile_row", ")", ")", ".", "fetchone", "(", ")", "content", "=", "(", "(", "content", "and", "content", "[", "0", "]", ")", "or", "None", ")", "return", "(", "mime_type", ",", "content", ")" ]
retrieve the mime-type and raw content of a tile by coordinate .
train
false
43,369
def sdram_freq_config_set(kodi_setting, all_settings): try: version = PiVersion() except IOError: version = 'PiB' if (version == 'PiB'): if (int(kodi_setting) == 400): return 'remove_this_line' elif (version == 'Pi2'): if (int(kodi_setting) == 450): return 'remove_this_line' return kodi_setting
[ "def", "sdram_freq_config_set", "(", "kodi_setting", ",", "all_settings", ")", ":", "try", ":", "version", "=", "PiVersion", "(", ")", "except", "IOError", ":", "version", "=", "'PiB'", "if", "(", "version", "==", "'PiB'", ")", ":", "if", "(", "int", "(", "kodi_setting", ")", "==", "400", ")", ":", "return", "'remove_this_line'", "elif", "(", "version", "==", "'Pi2'", ")", ":", "if", "(", "int", "(", "kodi_setting", ")", "==", "450", ")", ":", "return", "'remove_this_line'", "return", "kodi_setting" ]
checks if the frequency setting is the same as the default pi setting .
train
false
43,370
def js_to_url_function(converter): if hasattr(converter, 'js_to_url_function'): data = converter.js_to_url_function() else: for cls in getmro(type(converter)): if (cls in js_to_url_functions): data = js_to_url_functions[cls](converter) break else: return 'encodeURIComponent' return ('(function(value) { %s })' % data)
[ "def", "js_to_url_function", "(", "converter", ")", ":", "if", "hasattr", "(", "converter", ",", "'js_to_url_function'", ")", ":", "data", "=", "converter", ".", "js_to_url_function", "(", ")", "else", ":", "for", "cls", "in", "getmro", "(", "type", "(", "converter", ")", ")", ":", "if", "(", "cls", "in", "js_to_url_functions", ")", ":", "data", "=", "js_to_url_functions", "[", "cls", "]", "(", "converter", ")", "break", "else", ":", "return", "'encodeURIComponent'", "return", "(", "'(function(value) { %s })'", "%", "data", ")" ]
get the javascript converter function from a rule .
train
true
43,371
@pytest.mark.parametrize(u'text', [u'We were scared', u'We Were Scared']) def test_issue744(en_tokenizer, text): tokens = en_tokenizer(text) assert (len(tokens) == 3) assert (tokens[1].text.lower() == u'were')
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "u'text'", ",", "[", "u'We were scared'", ",", "u'We Were Scared'", "]", ")", "def", "test_issue744", "(", "en_tokenizer", ",", "text", ")", ":", "tokens", "=", "en_tokenizer", "(", "text", ")", "assert", "(", "len", "(", "tokens", ")", "==", "3", ")", "assert", "(", "tokens", "[", "1", "]", ".", "text", ".", "lower", "(", ")", "==", "u'were'", ")" ]
test that were and were are excluded from the contractions generated by the english tokenizer exceptions .
train
false
43,372
def acovf_explicit(ar, ma, nobs): ir = arma_impulse_response(ar, ma) acovfexpl = [np.dot(ir[:(nobs - t)], ir[t:nobs]) for t in range(10)] return acovfexpl
[ "def", "acovf_explicit", "(", "ar", ",", "ma", ",", "nobs", ")", ":", "ir", "=", "arma_impulse_response", "(", "ar", ",", "ma", ")", "acovfexpl", "=", "[", "np", ".", "dot", "(", "ir", "[", ":", "(", "nobs", "-", "t", ")", "]", ",", "ir", "[", "t", ":", "nobs", "]", ")", "for", "t", "in", "range", "(", "10", ")", "]", "return", "acovfexpl" ]
add correlation of ma representation explicitely .
train
false
43,374
def require_registration(resource_name): def real_decorator_wrapper(handler): def real_decorator_wrapper_fn(request, *args, **kwargs): if (Device.get_own_device().is_registered() or (not am_i_online(settings.CENTRAL_SERVER_URL))): return handler(request, *args, **kwargs) else: messages.warning(request, _(('In order to access %(resource_name)s, you must register your device first.' % {'resource_name': unicode(resource_name)}))) return HttpResponseRedirect(set_query_params(reverse('register_public_key'), {'next': request.path})) return real_decorator_wrapper_fn return real_decorator_wrapper
[ "def", "require_registration", "(", "resource_name", ")", ":", "def", "real_decorator_wrapper", "(", "handler", ")", ":", "def", "real_decorator_wrapper_fn", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "Device", ".", "get_own_device", "(", ")", ".", "is_registered", "(", ")", "or", "(", "not", "am_i_online", "(", "settings", ".", "CENTRAL_SERVER_URL", ")", ")", ")", ":", "return", "handler", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "messages", ".", "warning", "(", "request", ",", "_", "(", "(", "'In order to access %(resource_name)s, you must register your device first.'", "%", "{", "'resource_name'", ":", "unicode", "(", "resource_name", ")", "}", ")", ")", ")", "return", "HttpResponseRedirect", "(", "set_query_params", "(", "reverse", "(", "'register_public_key'", ")", ",", "{", "'next'", ":", "request", ".", "path", "}", ")", ")", "return", "real_decorator_wrapper_fn", "return", "real_decorator_wrapper" ]
gets id of requested user .
train
false
43,375
def _depth_limited_walk(top, max_depth=None): for (root, dirs, files) in os.walk(top): if (max_depth is not None): rel_depth = (root.count(os.sep) - top.count(os.sep)) if (rel_depth >= max_depth): del dirs[:] (yield (str(root), list(dirs), list(files)))
[ "def", "_depth_limited_walk", "(", "top", ",", "max_depth", "=", "None", ")", ":", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "top", ")", ":", "if", "(", "max_depth", "is", "not", "None", ")", ":", "rel_depth", "=", "(", "root", ".", "count", "(", "os", ".", "sep", ")", "-", "top", ".", "count", "(", "os", ".", "sep", ")", ")", "if", "(", "rel_depth", ">=", "max_depth", ")", ":", "del", "dirs", "[", ":", "]", "(", "yield", "(", "str", "(", "root", ")", ",", "list", "(", "dirs", ")", ",", "list", "(", "files", ")", ")", ")" ]
walk the directory tree under root up till reaching max_depth .
train
false
43,377
def get_group_dict(user=None, include_default=True): if ((HAS_GRP is False) or (HAS_PWD is False)): return {} group_dict = {} group_names = get_group_list(user, include_default=include_default) for group in group_names: group_dict.update({group: grp.getgrnam(group).gr_gid}) return group_dict
[ "def", "get_group_dict", "(", "user", "=", "None", ",", "include_default", "=", "True", ")", ":", "if", "(", "(", "HAS_GRP", "is", "False", ")", "or", "(", "HAS_PWD", "is", "False", ")", ")", ":", "return", "{", "}", "group_dict", "=", "{", "}", "group_names", "=", "get_group_list", "(", "user", ",", "include_default", "=", "include_default", ")", "for", "group", "in", "group_names", ":", "group_dict", ".", "update", "(", "{", "group", ":", "grp", ".", "getgrnam", "(", "group", ")", ".", "gr_gid", "}", ")", "return", "group_dict" ]
returns a dict of all of the system groups as keys .
train
true
43,378
def get_zonecode(): return False
[ "def", "get_zonecode", "(", ")", ":", "return", "False" ]
get current timezone cli example: .
train
false
43,379
def continued_fraction_convergents(cf): (p_2, q_2) = (Integer(0), Integer(1)) (p_1, q_1) = (Integer(1), Integer(0)) for a in cf: (p, q) = (((a * p_1) + p_2), ((a * q_1) + q_2)) (p_2, q_2) = (p_1, q_1) (p_1, q_1) = (p, q) (yield (p / q))
[ "def", "continued_fraction_convergents", "(", "cf", ")", ":", "(", "p_2", ",", "q_2", ")", "=", "(", "Integer", "(", "0", ")", ",", "Integer", "(", "1", ")", ")", "(", "p_1", ",", "q_1", ")", "=", "(", "Integer", "(", "1", ")", ",", "Integer", "(", "0", ")", ")", "for", "a", "in", "cf", ":", "(", "p", ",", "q", ")", "=", "(", "(", "(", "a", "*", "p_1", ")", "+", "p_2", ")", ",", "(", "(", "a", "*", "q_1", ")", "+", "q_2", ")", ")", "(", "p_2", ",", "q_2", ")", "=", "(", "p_1", ",", "q_1", ")", "(", "p_1", ",", "q_1", ")", "=", "(", "p", ",", "q", ")", "(", "yield", "(", "p", "/", "q", ")", ")" ]
return an iterator over the convergents of a continued fraction .
train
false
43,380
def create_ports_tree(): _check_config_exists() cmd = 'poudriere ports -c' ret = __salt__['cmd.run'](cmd) return ret
[ "def", "create_ports_tree", "(", ")", ":", "_check_config_exists", "(", ")", "cmd", "=", "'poudriere ports -c'", "ret", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", "return", "ret" ]
not working need to run portfetch non interactive .
train
false
43,381
def _kname(obj): if isinstance(obj, dict): return [obj.get('metadata', {}).get('name', '')] elif isinstance(obj, (list, tuple)): names = [] for i in obj: names.append(i.get('metadata', {}).get('name', '')) return names else: return 'Unknown type'
[ "def", "_kname", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "[", "obj", ".", "get", "(", "'metadata'", ",", "{", "}", ")", ".", "get", "(", "'name'", ",", "''", ")", "]", "elif", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "names", "=", "[", "]", "for", "i", "in", "obj", ":", "names", ".", "append", "(", "i", ".", "get", "(", "'metadata'", ",", "{", "}", ")", ".", "get", "(", "'name'", ",", "''", ")", ")", "return", "names", "else", ":", "return", "'Unknown type'" ]
get name or names out of json result from api server .
train
true
43,383
def isSegmentAround(aroundSegmentsDictionary, aroundSegmentsDictionaryKey, segment): if (aroundSegmentsDictionaryKey not in aroundSegmentsDictionary): return False for aroundSegment in aroundSegmentsDictionary[aroundSegmentsDictionaryKey]: endpoint = aroundSegment[0] if isSegmentInX(segment, endpoint.point.real, endpoint.otherEndpoint.point.real): return True return False
[ "def", "isSegmentAround", "(", "aroundSegmentsDictionary", ",", "aroundSegmentsDictionaryKey", ",", "segment", ")", ":", "if", "(", "aroundSegmentsDictionaryKey", "not", "in", "aroundSegmentsDictionary", ")", ":", "return", "False", "for", "aroundSegment", "in", "aroundSegmentsDictionary", "[", "aroundSegmentsDictionaryKey", "]", ":", "endpoint", "=", "aroundSegment", "[", "0", "]", "if", "isSegmentInX", "(", "segment", ",", "endpoint", ".", "point", ".", "real", ",", "endpoint", ".", "otherEndpoint", ".", "point", ".", "real", ")", ":", "return", "True", "return", "False" ]
determine if there is another segment around .
train
false
43,384
def _getMessageStructure(message): (main, subtype, attrs) = _getContentType(message) if (main is not None): main = main.lower() if (subtype is not None): subtype = subtype.lower() if (main == 'multipart'): return _MultipartMessageStructure(message, subtype, attrs) elif ((main, subtype) == ('message', 'rfc822')): return _RFC822MessageStructure(message, main, subtype, attrs) elif (main == 'text'): return _TextMessageStructure(message, main, subtype, attrs) else: return _SinglepartMessageStructure(message, main, subtype, attrs)
[ "def", "_getMessageStructure", "(", "message", ")", ":", "(", "main", ",", "subtype", ",", "attrs", ")", "=", "_getContentType", "(", "message", ")", "if", "(", "main", "is", "not", "None", ")", ":", "main", "=", "main", ".", "lower", "(", ")", "if", "(", "subtype", "is", "not", "None", ")", ":", "subtype", "=", "subtype", ".", "lower", "(", ")", "if", "(", "main", "==", "'multipart'", ")", ":", "return", "_MultipartMessageStructure", "(", "message", ",", "subtype", ",", "attrs", ")", "elif", "(", "(", "main", ",", "subtype", ")", "==", "(", "'message'", ",", "'rfc822'", ")", ")", ":", "return", "_RFC822MessageStructure", "(", "message", ",", "main", ",", "subtype", ",", "attrs", ")", "elif", "(", "main", "==", "'text'", ")", ":", "return", "_TextMessageStructure", "(", "message", ",", "main", ",", "subtype", ",", "attrs", ")", "else", ":", "return", "_SinglepartMessageStructure", "(", "message", ",", "main", ",", "subtype", ",", "attrs", ")" ]
construct an appropriate type of message structure object for the given message object .
train
false
43,385
def RemovePrefix(a, prefix): return (a[len(prefix):] if a.startswith(prefix) else a)
[ "def", "RemovePrefix", "(", "a", ",", "prefix", ")", ":", "return", "(", "a", "[", "len", "(", "prefix", ")", ":", "]", "if", "a", ".", "startswith", "(", "prefix", ")", "else", "a", ")" ]
returns a without prefix if it starts with prefix .
train
false
43,386
def BBANDS(ds, count, timeperiod=(- (2 ** 31)), nbdevup=(-4e+37), nbdevdn=(-4e+37), matype=0): ret = call_talib_with_ds(ds, count, talib.BBANDS, timeperiod, nbdevup, nbdevdn, matype) if (ret is None): ret = (None, None, None) return ret
[ "def", "BBANDS", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ",", "nbdevup", "=", "(", "-", "4e+37", ")", ",", "nbdevdn", "=", "(", "-", "4e+37", ")", ",", "matype", "=", "0", ")", ":", "ret", "=", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "BBANDS", ",", "timeperiod", ",", "nbdevup", ",", "nbdevdn", ",", "matype", ")", "if", "(", "ret", "is", "None", ")", ":", "ret", "=", "(", "None", ",", "None", ",", "None", ")", "return", "ret" ]
bollinger bands .
train
false
43,387
def test_grouping(): t = QTable(MIXIN_COLS) t['index'] = ['a', 'b', 'b', 'c'] with pytest.raises(NotImplementedError): t.group_by('index')
[ "def", "test_grouping", "(", ")", ":", "t", "=", "QTable", "(", "MIXIN_COLS", ")", "t", "[", "'index'", "]", "=", "[", "'a'", ",", "'b'", ",", "'b'", ",", "'c'", "]", "with", "pytest", ".", "raises", "(", "NotImplementedError", ")", ":", "t", ".", "group_by", "(", "'index'", ")" ]
test grouping with mixin columns .
train
false
43,390
def vpc_exists(module, vpc, name, cidr_block, multi): matched_vpc = None try: matching_vpcs = vpc.get_all_vpcs(filters={'tag:Name': name, 'cidr-block': cidr_block}) except Exception as e: e_msg = boto_exception(e) module.fail_json(msg=e_msg) if (len(matching_vpcs) == 1): matched_vpc = matching_vpcs[0] elif (len(matching_vpcs) > 1): if multi: module.fail_json(msg=('Currently there are %d VPCs that have the same name and CIDR block you specified. If you would like to create the VPC anyway please pass True to the multi_ok param.' % len(matching_vpcs))) return matched_vpc
[ "def", "vpc_exists", "(", "module", ",", "vpc", ",", "name", ",", "cidr_block", ",", "multi", ")", ":", "matched_vpc", "=", "None", "try", ":", "matching_vpcs", "=", "vpc", ".", "get_all_vpcs", "(", "filters", "=", "{", "'tag:Name'", ":", "name", ",", "'cidr-block'", ":", "cidr_block", "}", ")", "except", "Exception", "as", "e", ":", "e_msg", "=", "boto_exception", "(", "e", ")", "module", ".", "fail_json", "(", "msg", "=", "e_msg", ")", "if", "(", "len", "(", "matching_vpcs", ")", "==", "1", ")", ":", "matched_vpc", "=", "matching_vpcs", "[", "0", "]", "elif", "(", "len", "(", "matching_vpcs", ")", ">", "1", ")", ":", "if", "multi", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "'Currently there are %d VPCs that have the same name and CIDR block you specified. If you would like to create the VPC anyway please pass True to the multi_ok param.'", "%", "len", "(", "matching_vpcs", ")", ")", ")", "return", "matched_vpc" ]
returns true or false in regards to the existence of a vpc .
train
false
43,392
def generateApiKey(): logger.log(u'Generating New API key') secure_hash = hashlib.sha512(str(time.time())) secure_hash.update(str(random.SystemRandom().getrandbits(4096))) return secure_hash.hexdigest()[:32]
[ "def", "generateApiKey", "(", ")", ":", "logger", ".", "log", "(", "u'Generating New API key'", ")", "secure_hash", "=", "hashlib", ".", "sha512", "(", "str", "(", "time", ".", "time", "(", ")", ")", ")", "secure_hash", ".", "update", "(", "str", "(", "random", ".", "SystemRandom", "(", ")", ".", "getrandbits", "(", "4096", ")", ")", ")", "return", "secure_hash", ".", "hexdigest", "(", ")", "[", ":", "32", "]" ]
return a new randomized api_key .
train
false
43,393
def serve_application(application, prefix, port=None, host=None, max_children=None): class SCGIAppHandler(SWAP, ): def __init__(self, *args, **kwargs): self.prefix = prefix self.app_obj = application SWAP.__init__(self, *args, **kwargs) kwargs = dict(handler_class=SCGIAppHandler) for kwarg in ('host', 'port', 'max_children'): if (locals()[kwarg] is not None): kwargs[kwarg] = locals()[kwarg] scgi_server.SCGIServer(**kwargs).serve()
[ "def", "serve_application", "(", "application", ",", "prefix", ",", "port", "=", "None", ",", "host", "=", "None", ",", "max_children", "=", "None", ")", ":", "class", "SCGIAppHandler", "(", "SWAP", ",", ")", ":", "def", "__init__", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "self", ".", "prefix", "=", "prefix", "self", ".", "app_obj", "=", "application", "SWAP", ".", "__init__", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "kwargs", "=", "dict", "(", "handler_class", "=", "SCGIAppHandler", ")", "for", "kwarg", "in", "(", "'host'", ",", "'port'", ",", "'max_children'", ")", ":", "if", "(", "locals", "(", ")", "[", "kwarg", "]", "is", "not", "None", ")", ":", "kwargs", "[", "kwarg", "]", "=", "locals", "(", ")", "[", "kwarg", "]", "scgi_server", ".", "SCGIServer", "(", "**", "kwargs", ")", ".", "serve", "(", ")" ]
serve the specified wsgi application via scgi proxy .
train
false
43,394
def extra_host_filters(multiple_labels=()): extra_args = {} where_str = 'afe_hosts.id in (select host_id from afe_hosts_labels where label_id=%s)' extra_args['where'] = ([where_str] * len(multiple_labels)) extra_args['params'] = [models.Label.smart_get(label).id for label in multiple_labels] return extra_args
[ "def", "extra_host_filters", "(", "multiple_labels", "=", "(", ")", ")", ":", "extra_args", "=", "{", "}", "where_str", "=", "'afe_hosts.id in (select host_id from afe_hosts_labels where label_id=%s)'", "extra_args", "[", "'where'", "]", "=", "(", "[", "where_str", "]", "*", "len", "(", "multiple_labels", ")", ")", "extra_args", "[", "'params'", "]", "=", "[", "models", ".", "Label", ".", "smart_get", "(", "label", ")", ".", "id", "for", "label", "in", "multiple_labels", "]", "return", "extra_args" ]
generate sql where clauses for matching hosts in an intersection of labels .
train
false
43,396
def load_shortcuts(): global SHORTCUTS, CUSTOM_SHORTCUTS settings = QSettings(SETTINGS_PATH, QSettings.IniFormat) for action in SHORTCUTS: default_action = SHORTCUTS[action].toString() shortcut_action = settings.value((u'shortcuts/%s' % action), default_action) CUSTOM_SHORTCUTS[action] = QKeySequence(shortcut_action)
[ "def", "load_shortcuts", "(", ")", ":", "global", "SHORTCUTS", ",", "CUSTOM_SHORTCUTS", "settings", "=", "QSettings", "(", "SETTINGS_PATH", ",", "QSettings", ".", "IniFormat", ")", "for", "action", "in", "SHORTCUTS", ":", "default_action", "=", "SHORTCUTS", "[", "action", "]", ".", "toString", "(", ")", "shortcut_action", "=", "settings", ".", "value", "(", "(", "u'shortcuts/%s'", "%", "action", ")", ",", "default_action", ")", "CUSTOM_SHORTCUTS", "[", "action", "]", "=", "QKeySequence", "(", "shortcut_action", ")" ]
loads the shortcuts from qsettings .
train
false
43,397
def generate_index_page(index_links, index_fp, order=[_index_headers['run_summary']]): top_level_dir = split(split(index_fp)[0])[1] index_page_header = get_index_page_header() index_lines = [index_page_header] d = {} for e in index_links: try: d[e[2]].append((e[0], e[1])) except KeyError: d[e[2]] = [(e[0], e[1])] index_lines.append('<table border=1>\n') ordered_table_entries = (order + [k for k in d if (k not in order)]) for k in ordered_table_entries: v = d[k] index_lines.append(('<tr colspan=2 align=center bgcolor=#e8e8e8><td colspan=2 align=center>%s</td></tr>\n' % k)) for (description, path) in v: path = re.sub(('^.*%s\\/' % top_level_dir), './', path) index_lines.append(('<tr>%s</tr>\n' % format_index_link(description, path))) index_lines.append('</table>\n') index_page_footer = get_index_page_footer() index_lines.append(index_page_footer) open(index_fp, 'w').write(''.join(index_lines))
[ "def", "generate_index_page", "(", "index_links", ",", "index_fp", ",", "order", "=", "[", "_index_headers", "[", "'run_summary'", "]", "]", ")", ":", "top_level_dir", "=", "split", "(", "split", "(", "index_fp", ")", "[", "0", "]", ")", "[", "1", "]", "index_page_header", "=", "get_index_page_header", "(", ")", "index_lines", "=", "[", "index_page_header", "]", "d", "=", "{", "}", "for", "e", "in", "index_links", ":", "try", ":", "d", "[", "e", "[", "2", "]", "]", ".", "append", "(", "(", "e", "[", "0", "]", ",", "e", "[", "1", "]", ")", ")", "except", "KeyError", ":", "d", "[", "e", "[", "2", "]", "]", "=", "[", "(", "e", "[", "0", "]", ",", "e", "[", "1", "]", ")", "]", "index_lines", ".", "append", "(", "'<table border=1>\\n'", ")", "ordered_table_entries", "=", "(", "order", "+", "[", "k", "for", "k", "in", "d", "if", "(", "k", "not", "in", "order", ")", "]", ")", "for", "k", "in", "ordered_table_entries", ":", "v", "=", "d", "[", "k", "]", "index_lines", ".", "append", "(", "(", "'<tr colspan=2 align=center bgcolor=#e8e8e8><td colspan=2 align=center>%s</td></tr>\\n'", "%", "k", ")", ")", "for", "(", "description", ",", "path", ")", "in", "v", ":", "path", "=", "re", ".", "sub", "(", "(", "'^.*%s\\\\/'", "%", "top_level_dir", ")", ",", "'./'", ",", "path", ")", "index_lines", ".", "append", "(", "(", "'<tr>%s</tr>\\n'", "%", "format_index_link", "(", "description", ",", "path", ")", ")", ")", "index_lines", ".", "append", "(", "'</table>\\n'", ")", "index_page_footer", "=", "get_index_page_footer", "(", ")", "index_lines", ".", "append", "(", "index_page_footer", ")", "open", "(", "index_fp", ",", "'w'", ")", ".", "write", "(", "''", ".", "join", "(", "index_lines", ")", ")" ]
generate the top-level index page .
train
false
43,398
def TR12(rv, first=True): def f(rv): if (not (rv.func == tan)): return rv arg = rv.args[0] if arg.is_Add: if first: args = list(ordered(arg.args)) else: args = list(arg.args) a = args.pop() b = Add._from_args(args) if b.is_Add: tb = TR12(tan(b), first=False) else: tb = tan(b) return ((tan(a) + tb) / (1 - (tan(a) * tb))) return rv return bottom_up(rv, f)
[ "def", "TR12", "(", "rv", ",", "first", "=", "True", ")", ":", "def", "f", "(", "rv", ")", ":", "if", "(", "not", "(", "rv", ".", "func", "==", "tan", ")", ")", ":", "return", "rv", "arg", "=", "rv", ".", "args", "[", "0", "]", "if", "arg", ".", "is_Add", ":", "if", "first", ":", "args", "=", "list", "(", "ordered", "(", "arg", ".", "args", ")", ")", "else", ":", "args", "=", "list", "(", "arg", ".", "args", ")", "a", "=", "args", ".", "pop", "(", ")", "b", "=", "Add", ".", "_from_args", "(", "args", ")", "if", "b", ".", "is_Add", ":", "tb", "=", "TR12", "(", "tan", "(", "b", ")", ",", "first", "=", "False", ")", "else", ":", "tb", "=", "tan", "(", "b", ")", "return", "(", "(", "tan", "(", "a", ")", "+", "tb", ")", "/", "(", "1", "-", "(", "tan", "(", "a", ")", "*", "tb", ")", ")", ")", "return", "rv", "return", "bottom_up", "(", "rv", ",", "f", ")" ]
separate sums in tan .
train
false
43,399
def generate_template(template_name, **context): context.update(href=href, format_datetime=format_datetime) return template_loader.load(template_name).generate(**context)
[ "def", "generate_template", "(", "template_name", ",", "**", "context", ")", ":", "context", ".", "update", "(", "href", "=", "href", ",", "format_datetime", "=", "format_datetime", ")", "return", "template_loader", ".", "load", "(", "template_name", ")", ".", "generate", "(", "**", "context", ")" ]
load and generate a template .
train
true
43,401
def show_in_pager(self, data, start, screen_lines): raise TryNext
[ "def", "show_in_pager", "(", "self", ",", "data", ",", "start", ",", "screen_lines", ")", ":", "raise", "TryNext" ]
run a string through pager .
train
false
43,404
def validate_path(path, element_validator=validate_path_element_default): parts = path.split('/') for p in parts: if (not element_validator(p)): return False else: return True
[ "def", "validate_path", "(", "path", ",", "element_validator", "=", "validate_path_element_default", ")", ":", "parts", "=", "path", ".", "split", "(", "'/'", ")", "for", "p", "in", "parts", ":", "if", "(", "not", "element_validator", "(", "p", ")", ")", ":", "return", "False", "else", ":", "return", "True" ]
default path validator that just checks for .
train
false
43,405
def _extract_key_val(kv, delimiter='='): pieces = kv.split(delimiter) key = pieces[0] val = delimiter.join(pieces[1:]) return (key, val)
[ "def", "_extract_key_val", "(", "kv", ",", "delimiter", "=", "'='", ")", ":", "pieces", "=", "kv", ".", "split", "(", "delimiter", ")", "key", "=", "pieces", "[", "0", "]", "val", "=", "delimiter", ".", "join", "(", "pieces", "[", "1", ":", "]", ")", "return", "(", "key", ",", "val", ")" ]
extract key and value from key=val string .
train
true
43,408
def logAttrib(obj, log, attrib, value=None): if (log or ((log is None) and obj.autoLog)): if (value is None): value = getattr(obj, attrib) message = ('%s: %s = %s' % (obj.name, attrib, value.__repr__())) try: obj.win.logOnFlip(message, level=logging.EXP, obj=obj) except AttributeError: logging.log(message, level=logging.EXP, obj=obj)
[ "def", "logAttrib", "(", "obj", ",", "log", ",", "attrib", ",", "value", "=", "None", ")", ":", "if", "(", "log", "or", "(", "(", "log", "is", "None", ")", "and", "obj", ".", "autoLog", ")", ")", ":", "if", "(", "value", "is", "None", ")", ":", "value", "=", "getattr", "(", "obj", ",", "attrib", ")", "message", "=", "(", "'%s: %s = %s'", "%", "(", "obj", ".", "name", ",", "attrib", ",", "value", ".", "__repr__", "(", ")", ")", ")", "try", ":", "obj", ".", "win", ".", "logOnFlip", "(", "message", ",", "level", "=", "logging", ".", "EXP", ",", "obj", "=", "obj", ")", "except", "AttributeError", ":", "logging", ".", "log", "(", "message", ",", "level", "=", "logging", ".", "EXP", ",", "obj", "=", "obj", ")" ]
logs a change of a visual attribute on the next window .
train
false
43,409
def mac_str_to_bytes(mac_str): if (len(mac_str) == 12): pass elif (len(mac_str) == 17): sep = mac_str[2] mac_str = mac_str.replace(sep, '') else: raise ValueError('Invalid MAC address') if six.PY3: mac_bytes = bytes((int(mac_str[s:(s + 2)], 16) for s in range(0, 12, 2))) else: mac_bytes = ''.join((chr(int(mac_str[s:(s + 2)], 16)) for s in range(0, 12, 2))) return mac_bytes
[ "def", "mac_str_to_bytes", "(", "mac_str", ")", ":", "if", "(", "len", "(", "mac_str", ")", "==", "12", ")", ":", "pass", "elif", "(", "len", "(", "mac_str", ")", "==", "17", ")", ":", "sep", "=", "mac_str", "[", "2", "]", "mac_str", "=", "mac_str", ".", "replace", "(", "sep", ",", "''", ")", "else", ":", "raise", "ValueError", "(", "'Invalid MAC address'", ")", "if", "six", ".", "PY3", ":", "mac_bytes", "=", "bytes", "(", "(", "int", "(", "mac_str", "[", "s", ":", "(", "s", "+", "2", ")", "]", ",", "16", ")", "for", "s", "in", "range", "(", "0", ",", "12", ",", "2", ")", ")", ")", "else", ":", "mac_bytes", "=", "''", ".", "join", "(", "(", "chr", "(", "int", "(", "mac_str", "[", "s", ":", "(", "s", "+", "2", ")", "]", ",", "16", ")", ")", "for", "s", "in", "range", "(", "0", ",", "12", ",", "2", ")", ")", ")", "return", "mac_bytes" ]
convert a mac address string into bytes .
train
false
43,410
def add_xsl_link(resourcebase): urlpath = reverse('prefix_xsl_line', args=[resourcebase.id]) url = '{}{}'.format(settings.SITEURL, urlpath) (link, created) = Link.objects.get_or_create(resource=resourcebase, url=url, defaults=dict(name=ISO_XSL_NAME, extension='xml', mime='text/xml', link_type='metadata')) return created
[ "def", "add_xsl_link", "(", "resourcebase", ")", ":", "urlpath", "=", "reverse", "(", "'prefix_xsl_line'", ",", "args", "=", "[", "resourcebase", ".", "id", "]", ")", "url", "=", "'{}{}'", ".", "format", "(", "settings", ".", "SITEURL", ",", "urlpath", ")", "(", "link", ",", "created", ")", "=", "Link", ".", "objects", ".", "get_or_create", "(", "resource", "=", "resourcebase", ",", "url", "=", "url", ",", "defaults", "=", "dict", "(", "name", "=", "ISO_XSL_NAME", ",", "extension", "=", "'xml'", ",", "mime", "=", "'text/xml'", ",", "link_type", "=", "'metadata'", ")", ")", "return", "created" ]
add a link to the enriched iso metadata .
train
false
43,413
def define_total(name, description, manager=counters): counter = _TotalCounter(name, description) manager.register(counter) return counter
[ "def", "define_total", "(", "name", ",", "description", ",", "manager", "=", "counters", ")", ":", "counter", "=", "_TotalCounter", "(", "name", ",", "description", ")", "manager", ".", "register", "(", "counter", ")", "return", "counter" ]
creates a performance counter which tracks some cumulative value over the course of the program .
train
false
43,414
def shutdown_hook(self): return
[ "def", "shutdown_hook", "(", "self", ")", ":", "return" ]
default shutdown hook typically .
train
false
43,415
def group_has_volumes_filter(attached_or_with_snapshots=False): return IMPL.group_has_volumes_filter(attached_or_with_snapshots)
[ "def", "group_has_volumes_filter", "(", "attached_or_with_snapshots", "=", "False", ")", ":", "return", "IMPL", ".", "group_has_volumes_filter", "(", "attached_or_with_snapshots", ")" ]
return a filter to check if a group has volumes .
train
false
43,416
def register_archive_format(name, function, extra_args=None, description=''): if (extra_args is None): extra_args = [] if (not isinstance(function, collections.Callable)): raise TypeError(('The %s object is not callable' % function)) if (not isinstance(extra_args, (tuple, list))): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if ((not isinstance(element, (tuple, list))) or (len(element) != 2)): raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description)
[ "def", "register_archive_format", "(", "name", ",", "function", ",", "extra_args", "=", "None", ",", "description", "=", "''", ")", ":", "if", "(", "extra_args", "is", "None", ")", ":", "extra_args", "=", "[", "]", "if", "(", "not", "isinstance", "(", "function", ",", "collections", ".", "Callable", ")", ")", ":", "raise", "TypeError", "(", "(", "'The %s object is not callable'", "%", "function", ")", ")", "if", "(", "not", "isinstance", "(", "extra_args", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "raise", "TypeError", "(", "'extra_args needs to be a sequence'", ")", "for", "element", "in", "extra_args", ":", "if", "(", "(", "not", "isinstance", "(", "element", ",", "(", "tuple", ",", "list", ")", ")", ")", "or", "(", "len", "(", "element", ")", "!=", "2", ")", ")", ":", "raise", "TypeError", "(", "'extra_args elements are : (arg_name, value)'", ")", "_ARCHIVE_FORMATS", "[", "name", "]", "=", "(", "function", ",", "extra_args", ",", "description", ")" ]
registers an archive format .
train
true
43,417
def Disassemble(data, start, bits='32bit', stoponret=False): if (not has_distorm3): raise StopIteration if (bits == '32bit'): mode = distorm3.Decode32Bits else: mode = distorm3.Decode64Bits for (o, _, i, h) in distorm3.DecodeGenerator(start, data, mode): if (stoponret and i.startswith('RET')): raise StopIteration (yield (o, i, h))
[ "def", "Disassemble", "(", "data", ",", "start", ",", "bits", "=", "'32bit'", ",", "stoponret", "=", "False", ")", ":", "if", "(", "not", "has_distorm3", ")", ":", "raise", "StopIteration", "if", "(", "bits", "==", "'32bit'", ")", ":", "mode", "=", "distorm3", ".", "Decode32Bits", "else", ":", "mode", "=", "distorm3", ".", "Decode64Bits", "for", "(", "o", ",", "_", ",", "i", ",", "h", ")", "in", "distorm3", ".", "DecodeGenerator", "(", "start", ",", "data", ",", "mode", ")", ":", "if", "(", "stoponret", "and", "i", ".", "startswith", "(", "'RET'", ")", ")", ":", "raise", "StopIteration", "(", "yield", "(", "o", ",", "i", ",", "h", ")", ")" ]
dissassemble code with distorm3 .
train
false
43,418
def _long2bytesBigEndian(n, blocksize=0): s = '' pack = struct.pack while (n > 0): s = (pack('>I', (n & 4294967295L)) + s) n = (n >> 32) for i in range(len(s)): if (s[i] != '\x00'): break else: s = '\x00' i = 0 s = s[i:] if ((blocksize > 0) and (len(s) % blocksize)): s = (((blocksize - (len(s) % blocksize)) * '\x00') + s) return s
[ "def", "_long2bytesBigEndian", "(", "n", ",", "blocksize", "=", "0", ")", ":", "s", "=", "''", "pack", "=", "struct", ".", "pack", "while", "(", "n", ">", "0", ")", ":", "s", "=", "(", "pack", "(", "'>I'", ",", "(", "n", "&", "4294967295", "L", ")", ")", "+", "s", ")", "n", "=", "(", "n", ">>", "32", ")", "for", "i", "in", "range", "(", "len", "(", "s", ")", ")", ":", "if", "(", "s", "[", "i", "]", "!=", "'\\x00'", ")", ":", "break", "else", ":", "s", "=", "'\\x00'", "i", "=", "0", "s", "=", "s", "[", "i", ":", "]", "if", "(", "(", "blocksize", ">", "0", ")", "and", "(", "len", "(", "s", ")", "%", "blocksize", ")", ")", ":", "s", "=", "(", "(", "(", "blocksize", "-", "(", "len", "(", "s", ")", "%", "blocksize", ")", ")", "*", "'\\x00'", ")", "+", "s", ")", "return", "s" ]
convert a long integer to a byte string .
train
true
43,419
@pytest.fixture def data_tmpdir(monkeypatch, tmpdir): datadir = (tmpdir / 'data') path = str(datadir) os.mkdir(path) monkeypatch.setattr('qutebrowser.utils.standarddir.data', (lambda : path)) return datadir
[ "@", "pytest", ".", "fixture", "def", "data_tmpdir", "(", "monkeypatch", ",", "tmpdir", ")", ":", "datadir", "=", "(", "tmpdir", "/", "'data'", ")", "path", "=", "str", "(", "datadir", ")", "os", ".", "mkdir", "(", "path", ")", "monkeypatch", ".", "setattr", "(", "'qutebrowser.utils.standarddir.data'", ",", "(", "lambda", ":", "path", ")", ")", "return", "datadir" ]
set tmpdir/data as the datadir .
train
false
43,420
def convert_to_datetime(input): if isinstance(input, datetime): return input elif isinstance(input, date): return datetime.fromordinal(input.toordinal()) elif isinstance(input, str): m = _DATE_REGEX.match(input) if (not m): raise ValueError('Invalid date string') values = [(k, int((v or 0))) for (k, v) in m.groupdict().items()] values = dict(values) return datetime(**values) raise TypeError(('Unsupported input type: %s' % type(input)))
[ "def", "convert_to_datetime", "(", "input", ")", ":", "if", "isinstance", "(", "input", ",", "datetime", ")", ":", "return", "input", "elif", "isinstance", "(", "input", ",", "date", ")", ":", "return", "datetime", ".", "fromordinal", "(", "input", ".", "toordinal", "(", ")", ")", "elif", "isinstance", "(", "input", ",", "str", ")", ":", "m", "=", "_DATE_REGEX", ".", "match", "(", "input", ")", "if", "(", "not", "m", ")", ":", "raise", "ValueError", "(", "'Invalid date string'", ")", "values", "=", "[", "(", "k", ",", "int", "(", "(", "v", "or", "0", ")", ")", ")", "for", "(", "k", ",", "v", ")", "in", "m", ".", "groupdict", "(", ")", ".", "items", "(", ")", "]", "values", "=", "dict", "(", "values", ")", "return", "datetime", "(", "**", "values", ")", "raise", "TypeError", "(", "(", "'Unsupported input type: %s'", "%", "type", "(", "input", ")", ")", ")" ]
converts the given object to a datetime object .
train
false
43,421
def xml_to_string(elem, pretty=False): if pretty: elem = pretty_print_xml(elem) try: return ElementTree.tostring(elem) except TypeError as e: if hasattr(elem, 'text'): return ('<!-- %s -->\n' % elem.text) else: raise e
[ "def", "xml_to_string", "(", "elem", ",", "pretty", "=", "False", ")", ":", "if", "pretty", ":", "elem", "=", "pretty_print_xml", "(", "elem", ")", "try", ":", "return", "ElementTree", ".", "tostring", "(", "elem", ")", "except", "TypeError", "as", "e", ":", "if", "hasattr", "(", "elem", ",", "'text'", ")", ":", "return", "(", "'<!-- %s -->\\n'", "%", "elem", ".", "text", ")", "else", ":", "raise", "e" ]
returns a string from an xml tree .
train
false
43,422
@pytest.fixture def evil_member(): return _require_user('evil_member', 'Evil member')
[ "@", "pytest", ".", "fixture", "def", "evil_member", "(", ")", ":", "return", "_require_user", "(", "'evil_member'", ",", "'Evil member'", ")" ]
require a evil_member user .
train
false
43,423
def test_random_sample_prob_range(): a = db.from_sequence(range(50), npartitions=5) with pytest.raises(ValueError): a.random_sample((-1)) with pytest.raises(ValueError): a.random_sample(1.1)
[ "def", "test_random_sample_prob_range", "(", ")", ":", "a", "=", "db", ".", "from_sequence", "(", "range", "(", "50", ")", ",", "npartitions", "=", "5", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "a", ".", "random_sample", "(", "(", "-", "1", ")", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "a", ".", "random_sample", "(", "1.1", ")" ]
specifying probabilities outside the range [0 .
train
false
43,424
def upath(path): return path
[ "def", "upath", "(", "path", ")", ":", "return", "path" ]
always return a unicode path .
train
false
43,425
def _write_cert_to_database(ca_name, cert, cacert_path=None, status='V'): set_ca_path(cacert_path) ca_dir = '{0}/{1}'.format(cert_base_path(), ca_name) (index_file, expire_date, serial_number, subject) = _get_basic_info(ca_name, cert, ca_dir) index_data = '{0} DCTB {1} DCTB DCTB {2} DCTB unknown DCTB {3}'.format(status, expire_date, serial_number, subject) with salt.utils.fopen(index_file, 'a+') as ofile: ofile.write(index_data)
[ "def", "_write_cert_to_database", "(", "ca_name", ",", "cert", ",", "cacert_path", "=", "None", ",", "status", "=", "'V'", ")", ":", "set_ca_path", "(", "cacert_path", ")", "ca_dir", "=", "'{0}/{1}'", ".", "format", "(", "cert_base_path", "(", ")", ",", "ca_name", ")", "(", "index_file", ",", "expire_date", ",", "serial_number", ",", "subject", ")", "=", "_get_basic_info", "(", "ca_name", ",", "cert", ",", "ca_dir", ")", "index_data", "=", "'{0} DCTB {1} DCTB DCTB {2} DCTB unknown DCTB {3}'", ".", "format", "(", "status", ",", "expire_date", ",", "serial_number", ",", "subject", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "index_file", ",", "'a+'", ")", "as", "ofile", ":", "ofile", ".", "write", "(", "index_data", ")" ]
write out the index .
train
true
43,426
def test_system_threading_tasks(): temp_list = range(10000) temp_list_output = [] Parallel.For.Overloads[(int, int, System.Action[int])](0, len(temp_list), (lambda x: temp_list_output.append(x))) AreEqual(len(temp_list), len(temp_list_output)) temp_list_output.sort() AreEqual(temp_list, temp_list_output) temp_list = range(10000) temp_list_output = [] Parallel.ForEach(xrange(10000), (lambda x: temp_list_output.append(x))) AreEqual(len(temp_list), len(temp_list_output)) temp_list_output.sort() AreEqual(temp_list, temp_list_output)
[ "def", "test_system_threading_tasks", "(", ")", ":", "temp_list", "=", "range", "(", "10000", ")", "temp_list_output", "=", "[", "]", "Parallel", ".", "For", ".", "Overloads", "[", "(", "int", ",", "int", ",", "System", ".", "Action", "[", "int", "]", ")", "]", "(", "0", ",", "len", "(", "temp_list", ")", ",", "(", "lambda", "x", ":", "temp_list_output", ".", "append", "(", "x", ")", ")", ")", "AreEqual", "(", "len", "(", "temp_list", ")", ",", "len", "(", "temp_list_output", ")", ")", "temp_list_output", ".", "sort", "(", ")", "AreEqual", "(", "temp_list", ",", "temp_list_output", ")", "temp_list", "=", "range", "(", "10000", ")", "temp_list_output", "=", "[", "]", "Parallel", ".", "ForEach", "(", "xrange", "(", "10000", ")", ",", "(", "lambda", "x", ":", "temp_list_output", ".", "append", "(", "x", ")", ")", ")", "AreEqual", "(", "len", "(", "temp_list", ")", ",", "len", "(", "temp_list_output", ")", ")", "temp_list_output", ".", "sort", "(", ")", "AreEqual", "(", "temp_list", ",", "temp_list_output", ")" ]
URL URL shouldnt be necessary to test this .
train
false
43,427
def searchObfuscatedFunctions(jsCode, function): obfuscatedFunctionsInfo = [] if (jsCode != None): match = re.findall((('\\W(' + function) + '\\s{0,5}?\\((.*?)\\)\\s{0,5}?;)'), jsCode, re.DOTALL) if (match != []): for m in match: if (re.findall('return', m[1], re.IGNORECASE) != []): obfuscatedFunctionsInfo.append([function, m, True]) else: obfuscatedFunctionsInfo.append([function, m, False]) obfuscatedFunctions = re.findall((('\\s*?((\\w*?)\\s*?=\\s*?' + function) + ')\\s*?;'), jsCode, re.DOTALL) for obfuscatedFunction in obfuscatedFunctions: obfuscatedElement = obfuscatedFunction[1] obfuscatedFunctionsInfo += searchObfuscatedFunctions(jsCode, obfuscatedElement) return obfuscatedFunctionsInfo
[ "def", "searchObfuscatedFunctions", "(", "jsCode", ",", "function", ")", ":", "obfuscatedFunctionsInfo", "=", "[", "]", "if", "(", "jsCode", "!=", "None", ")", ":", "match", "=", "re", ".", "findall", "(", "(", "(", "'\\\\W('", "+", "function", ")", "+", "'\\\\s{0,5}?\\\\((.*?)\\\\)\\\\s{0,5}?;)'", ")", ",", "jsCode", ",", "re", ".", "DOTALL", ")", "if", "(", "match", "!=", "[", "]", ")", ":", "for", "m", "in", "match", ":", "if", "(", "re", ".", "findall", "(", "'return'", ",", "m", "[", "1", "]", ",", "re", ".", "IGNORECASE", ")", "!=", "[", "]", ")", ":", "obfuscatedFunctionsInfo", ".", "append", "(", "[", "function", ",", "m", ",", "True", "]", ")", "else", ":", "obfuscatedFunctionsInfo", ".", "append", "(", "[", "function", ",", "m", ",", "False", "]", ")", "obfuscatedFunctions", "=", "re", ".", "findall", "(", "(", "(", "'\\\\s*?((\\\\w*?)\\\\s*?=\\\\s*?'", "+", "function", ")", "+", "')\\\\s*?;'", ")", ",", "jsCode", ",", "re", ".", "DOTALL", ")", "for", "obfuscatedFunction", "in", "obfuscatedFunctions", ":", "obfuscatedElement", "=", "obfuscatedFunction", "[", "1", "]", "obfuscatedFunctionsInfo", "+=", "searchObfuscatedFunctions", "(", "jsCode", ",", "obfuscatedElement", ")", "return", "obfuscatedFunctionsInfo" ]
search for obfuscated functions in the javascript code .
train
false
43,428
def minify(pelican): for (dirpath, _, filenames) in os.walk(pelican.settings['OUTPUT_PATH']): for name in filenames: if (os.path.splitext(name)[1] in ('.css', '.js')): filepath = os.path.join(dirpath, name) logger.info('minifiy %s', filepath) verbose = ('-v' if SHOW_OUTPUT else '') call('yuicompressor {} --charset utf-8 {} -o {}'.format(verbose, filepath, filepath), shell=True)
[ "def", "minify", "(", "pelican", ")", ":", "for", "(", "dirpath", ",", "_", ",", "filenames", ")", "in", "os", ".", "walk", "(", "pelican", ".", "settings", "[", "'OUTPUT_PATH'", "]", ")", ":", "for", "name", "in", "filenames", ":", "if", "(", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "1", "]", "in", "(", "'.css'", ",", "'.js'", ")", ")", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "name", ")", "logger", ".", "info", "(", "'minifiy %s'", ",", "filepath", ")", "verbose", "=", "(", "'-v'", "if", "SHOW_OUTPUT", "else", "''", ")", "call", "(", "'yuicompressor {} --charset utf-8 {} -o {}'", ".", "format", "(", "verbose", ",", "filepath", ",", "filepath", ")", ",", "shell", "=", "True", ")" ]
minify css and js with yui compressor .
train
false
43,429
def transmit(msg): lane_stack.transmit(msg, remote_yard.uid)
[ "def", "transmit", "(", "msg", ")", ":", "lane_stack", ".", "transmit", "(", "msg", ",", "remote_yard", ".", "uid", ")" ]
sends msg to remote_yard .
train
false
43,430
def get_bookmarks(user, course_key=None, fields=None, serialized=True): bookmarks_queryset = Bookmark.objects.filter(user=user) if course_key: bookmarks_queryset = bookmarks_queryset.filter(course_key=course_key) if (len((set((fields or [])) & set(OPTIONAL_FIELDS))) > 0): bookmarks_queryset = bookmarks_queryset.select_related('user', 'xblock_cache') else: bookmarks_queryset = bookmarks_queryset.select_related('user') bookmarks_queryset = bookmarks_queryset.order_by('-created') if serialized: return BookmarkSerializer(bookmarks_queryset, context={'fields': fields}, many=True).data return bookmarks_queryset
[ "def", "get_bookmarks", "(", "user", ",", "course_key", "=", "None", ",", "fields", "=", "None", ",", "serialized", "=", "True", ")", ":", "bookmarks_queryset", "=", "Bookmark", ".", "objects", ".", "filter", "(", "user", "=", "user", ")", "if", "course_key", ":", "bookmarks_queryset", "=", "bookmarks_queryset", ".", "filter", "(", "course_key", "=", "course_key", ")", "if", "(", "len", "(", "(", "set", "(", "(", "fields", "or", "[", "]", ")", ")", "&", "set", "(", "OPTIONAL_FIELDS", ")", ")", ")", ">", "0", ")", ":", "bookmarks_queryset", "=", "bookmarks_queryset", ".", "select_related", "(", "'user'", ",", "'xblock_cache'", ")", "else", ":", "bookmarks_queryset", "=", "bookmarks_queryset", ".", "select_related", "(", "'user'", ")", "bookmarks_queryset", "=", "bookmarks_queryset", ".", "order_by", "(", "'-created'", ")", "if", "serialized", ":", "return", "BookmarkSerializer", "(", "bookmarks_queryset", ",", "context", "=", "{", "'fields'", ":", "fields", "}", ",", "many", "=", "True", ")", ".", "data", "return", "bookmarks_queryset" ]
return data for bookmarks of a user .
train
false
43,431
@permission_required([('AccountLookup', 'View')]) def app_activity(request, addon_id): app = get_object_or_404(Webapp.with_deleted, pk=addon_id) user_items = ActivityLog.objects.for_apps([app]).exclude(action__in=mkt.LOG_HIDE_DEVELOPER) admin_items = ActivityLog.objects.for_apps([app]).filter(action__in=mkt.LOG_HIDE_DEVELOPER) user_items = paginate(request, user_items, per_page=20) admin_items = paginate(request, admin_items, per_page=20) return render(request, 'lookup/app_activity.html', {'admin_items': admin_items, 'app': app, 'user_items': user_items})
[ "@", "permission_required", "(", "[", "(", "'AccountLookup'", ",", "'View'", ")", "]", ")", "def", "app_activity", "(", "request", ",", "addon_id", ")", ":", "app", "=", "get_object_or_404", "(", "Webapp", ".", "with_deleted", ",", "pk", "=", "addon_id", ")", "user_items", "=", "ActivityLog", ".", "objects", ".", "for_apps", "(", "[", "app", "]", ")", ".", "exclude", "(", "action__in", "=", "mkt", ".", "LOG_HIDE_DEVELOPER", ")", "admin_items", "=", "ActivityLog", ".", "objects", ".", "for_apps", "(", "[", "app", "]", ")", ".", "filter", "(", "action__in", "=", "mkt", ".", "LOG_HIDE_DEVELOPER", ")", "user_items", "=", "paginate", "(", "request", ",", "user_items", ",", "per_page", "=", "20", ")", "admin_items", "=", "paginate", "(", "request", ",", "admin_items", ",", "per_page", "=", "20", ")", "return", "render", "(", "request", ",", "'lookup/app_activity.html'", ",", "{", "'admin_items'", ":", "admin_items", ",", "'app'", ":", "app", ",", "'user_items'", ":", "user_items", "}", ")" ]
shows the app activity age for single app .
train
false
43,433
def pportD7(state): global dataReg if (state == 0): dataReg = (dataReg & (~ 128)) else: dataReg = (dataReg | 128) port.DlPortWritePortUchar(baseAddress, dataReg)
[ "def", "pportD7", "(", "state", ")", ":", "global", "dataReg", "if", "(", "state", "==", "0", ")", ":", "dataReg", "=", "(", "dataReg", "&", "(", "~", "128", ")", ")", "else", ":", "dataReg", "=", "(", "dataReg", "|", "128", ")", "port", ".", "DlPortWritePortUchar", "(", "baseAddress", ",", "dataReg", ")" ]
toggle data register d7 bit .
train
false
43,434
def run_with(*drivers): def decorator(test): if (isinstance(test, type) and issubclass(test, TestBase)): for attr in dir(test): value = getattr(test, attr) if (callable(value) and attr.startswith('test_')): if six.PY3: value._run_with = drivers else: value.__func__._run_with = drivers else: test._run_with = drivers return test return decorator
[ "def", "run_with", "(", "*", "drivers", ")", ":", "def", "decorator", "(", "test", ")", ":", "if", "(", "isinstance", "(", "test", ",", "type", ")", "and", "issubclass", "(", "test", ",", "TestBase", ")", ")", ":", "for", "attr", "in", "dir", "(", "test", ")", ":", "value", "=", "getattr", "(", "test", ",", "attr", ")", "if", "(", "callable", "(", "value", ")", "and", "attr", ".", "startswith", "(", "'test_'", ")", ")", ":", "if", "six", ".", "PY3", ":", "value", ".", "_run_with", "=", "drivers", "else", ":", "value", ".", "__func__", ".", "_run_with", "=", "drivers", "else", ":", "test", ".", "_run_with", "=", "drivers", "return", "test", "return", "decorator" ]
used to mark tests that are only applicable for certain db driver .
train
false
43,435
def reset_token_store(): auth.reset_default_token_store()
[ "def", "reset_token_store", "(", ")", ":", "auth", ".", "reset_default_token_store", "(", ")" ]
deletes the default token store .
train
false
43,436
def admin_media_prefix(): try: from django.conf import settings except ImportError: return '' return settings.ADMIN_MEDIA_PREFIX
[ "def", "admin_media_prefix", "(", ")", ":", "try", ":", "from", "django", ".", "conf", "import", "settings", "except", "ImportError", ":", "return", "''", "return", "settings", ".", "ADMIN_MEDIA_PREFIX" ]
returns the string contained in the setting admin_media_prefix .
train
false
43,438
def rst_invalid(text): invalid_rst = False try: rst_to_html(text) except Exception as e: invalid_rst = str(e) return invalid_rst
[ "def", "rst_invalid", "(", "text", ")", ":", "invalid_rst", "=", "False", "try", ":", "rst_to_html", "(", "text", ")", "except", "Exception", "as", "e", ":", "invalid_rst", "=", "str", "(", "e", ")", "return", "invalid_rst" ]
predicate to determine if text is invalid restructuredtext .
train
false
43,439
def _parse_proplist(data): out = {} for line in data.split('\n'): line = re.split('\\s+', line, 1) if (len(line) == 2): out[line[0]] = line[1] return out
[ "def", "_parse_proplist", "(", "data", ")", ":", "out", "=", "{", "}", "for", "line", "in", "data", ".", "split", "(", "'\\n'", ")", ":", "line", "=", "re", ".", "split", "(", "'\\\\s+'", ",", "line", ",", "1", ")", "if", "(", "len", "(", "line", ")", "==", "2", ")", ":", "out", "[", "line", "[", "0", "]", "]", "=", "line", "[", "1", "]", "return", "out" ]
parse properties list .
train
true
43,440
@utils.service_type('monitor') def do_rate_limits(cs, args): limits = cs.limits.get().rate columns = ['Verb', 'URI', 'Value', 'Remain', 'Unit', 'Next_Available'] utils.print_list(limits, columns)
[ "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_rate_limits", "(", "cs", ",", "args", ")", ":", "limits", "=", "cs", ".", "limits", ".", "get", "(", ")", ".", "rate", "columns", "=", "[", "'Verb'", ",", "'URI'", ",", "'Value'", ",", "'Remain'", ",", "'Unit'", ",", "'Next_Available'", "]", "utils", ".", "print_list", "(", "limits", ",", "columns", ")" ]
print a list of rate limits for a user .
train
false
43,441
@LocalContext def wait_for_device(kick=False): with log.waitfor('Waiting for device to come online') as w: with AdbClient() as c: if kick: try: c.reconnect() except Exception: pass serial = '' if context.device: serial = str(context.device) with AdbClient() as c: c.wait_for_device(serial) for device in devices(): if (context.device == device): return device if (not serial): break else: log.error('Could not find any devices') with context.local(device=device): w.success(('%s (%s %s %s)' % (device, product(), build(), _build_date()))) return context.device
[ "@", "LocalContext", "def", "wait_for_device", "(", "kick", "=", "False", ")", ":", "with", "log", ".", "waitfor", "(", "'Waiting for device to come online'", ")", "as", "w", ":", "with", "AdbClient", "(", ")", "as", "c", ":", "if", "kick", ":", "try", ":", "c", ".", "reconnect", "(", ")", "except", "Exception", ":", "pass", "serial", "=", "''", "if", "context", ".", "device", ":", "serial", "=", "str", "(", "context", ".", "device", ")", "with", "AdbClient", "(", ")", "as", "c", ":", "c", ".", "wait_for_device", "(", "serial", ")", "for", "device", "in", "devices", "(", ")", ":", "if", "(", "context", ".", "device", "==", "device", ")", ":", "return", "device", "if", "(", "not", "serial", ")", ":", "break", "else", ":", "log", ".", "error", "(", "'Could not find any devices'", ")", "with", "context", ".", "local", "(", "device", "=", "device", ")", ":", "w", ".", "success", "(", "(", "'%s (%s %s %s)'", "%", "(", "device", ",", "product", "(", ")", ",", "build", "(", ")", ",", "_build_date", "(", ")", ")", ")", ")", "return", "context", ".", "device" ]
waits for a device to be connected .
train
false
43,443
def _record_exists(arg_dict): cmd = ['xenstore-exists', ('/local/domain/%(dom_id)s/%(path)s' % arg_dict)] try: (ret, result) = _run_command(cmd) except XenstoreError as e: if (e.stderr == ''): return False raise return True
[ "def", "_record_exists", "(", "arg_dict", ")", ":", "cmd", "=", "[", "'xenstore-exists'", ",", "(", "'/local/domain/%(dom_id)s/%(path)s'", "%", "arg_dict", ")", "]", "try", ":", "(", "ret", ",", "result", ")", "=", "_run_command", "(", "cmd", ")", "except", "XenstoreError", "as", "e", ":", "if", "(", "e", ".", "stderr", "==", "''", ")", ":", "return", "False", "raise", "return", "True" ]
returns whether or not the given record exists .
train
false
43,444
def warning_priority_color(color_code): if (not color_code): return current.messages['NONE'] return DIV(_style=('width:%(size)s;height:%(size)s;background-color:#%(color)s;' % dict(size='2em', color=color_code)))
[ "def", "warning_priority_color", "(", "color_code", ")", ":", "if", "(", "not", "color_code", ")", ":", "return", "current", ".", "messages", "[", "'NONE'", "]", "return", "DIV", "(", "_style", "=", "(", "'width:%(size)s;height:%(size)s;background-color:#%(color)s;'", "%", "dict", "(", "size", "=", "'2em'", ",", "color", "=", "color_code", ")", ")", ")" ]
shows actual color for hex color code .
train
false
43,445
def test_nm2_fit(): ratio = 'auto' nm2 = NearMiss(ratio=ratio, random_state=RND_SEED, version=VERSION_NEARMISS) nm2.fit(X, Y) assert_equal(nm2.min_c_, 0) assert_equal(nm2.maj_c_, 2) assert_equal(nm2.stats_c_[0], 3) assert_equal(nm2.stats_c_[1], 5) assert_equal(nm2.stats_c_[2], 7)
[ "def", "test_nm2_fit", "(", ")", ":", "ratio", "=", "'auto'", "nm2", "=", "NearMiss", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ",", "version", "=", "VERSION_NEARMISS", ")", "nm2", ".", "fit", "(", "X", ",", "Y", ")", "assert_equal", "(", "nm2", ".", "min_c_", ",", "0", ")", "assert_equal", "(", "nm2", ".", "maj_c_", ",", "2", ")", "assert_equal", "(", "nm2", ".", "stats_c_", "[", "0", "]", ",", "3", ")", "assert_equal", "(", "nm2", ".", "stats_c_", "[", "1", "]", ",", "5", ")", "assert_equal", "(", "nm2", ".", "stats_c_", "[", "2", "]", ",", "7", ")" ]
test the fitting method .
train
false
43,446
def test_wraps_exclude_names(): class TestClass(object, ): def method(self, a, b, c=1, d=2, **kwargs): return (a, b, c, d, kwargs) test = TestClass() @wraps(test.method, exclude_args=(u'self',)) def func(*args, **kwargs): return test.method(*args, **kwargs) if six.PY2: argspec = inspect.getargspec(func) else: argspec = inspect.getfullargspec(func) assert (argspec.args == [u'a', u'b', u'c', u'd']) assert (func(u'a', u'b', e=3) == (u'a', u'b', 1, 2, {u'e': 3}))
[ "def", "test_wraps_exclude_names", "(", ")", ":", "class", "TestClass", "(", "object", ",", ")", ":", "def", "method", "(", "self", ",", "a", ",", "b", ",", "c", "=", "1", ",", "d", "=", "2", ",", "**", "kwargs", ")", ":", "return", "(", "a", ",", "b", ",", "c", ",", "d", ",", "kwargs", ")", "test", "=", "TestClass", "(", ")", "@", "wraps", "(", "test", ".", "method", ",", "exclude_args", "=", "(", "u'self'", ",", ")", ")", "def", "func", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "test", ".", "method", "(", "*", "args", ",", "**", "kwargs", ")", "if", "six", ".", "PY2", ":", "argspec", "=", "inspect", ".", "getargspec", "(", "func", ")", "else", ":", "argspec", "=", "inspect", ".", "getfullargspec", "(", "func", ")", "assert", "(", "argspec", ".", "args", "==", "[", "u'a'", ",", "u'b'", ",", "u'c'", ",", "u'd'", "]", ")", "assert", "(", "func", "(", "u'a'", ",", "u'b'", ",", "e", "=", "3", ")", "==", "(", "u'a'", ",", "u'b'", ",", "1", ",", "2", ",", "{", "u'e'", ":", "3", "}", ")", ")" ]
test the optional exclude_names argument to the wraps decorator .
train
false
43,447
def _ensure_requested_network_ordering(accessor, unordered, preferred): if preferred: unordered.sort(key=(lambda i: preferred.index(accessor(i))))
[ "def", "_ensure_requested_network_ordering", "(", "accessor", ",", "unordered", ",", "preferred", ")", ":", "if", "preferred", ":", "unordered", ".", "sort", "(", "key", "=", "(", "lambda", "i", ":", "preferred", ".", "index", "(", "accessor", "(", "i", ")", ")", ")", ")" ]
sort a list with respect to the preferred network ordering .
train
false
43,448
def fedit(data, title=u'', comment=u'', icon=None, parent=None, apply=None): if QtWidgets.QApplication.startingUp(): _app = QtWidgets.QApplication([]) dialog = FormDialog(data, title, comment, icon, parent, apply) if dialog.exec_(): return dialog.get()
[ "def", "fedit", "(", "data", ",", "title", "=", "u''", ",", "comment", "=", "u''", ",", "icon", "=", "None", ",", "parent", "=", "None", ",", "apply", "=", "None", ")", ":", "if", "QtWidgets", ".", "QApplication", ".", "startingUp", "(", ")", ":", "_app", "=", "QtWidgets", ".", "QApplication", "(", "[", "]", ")", "dialog", "=", "FormDialog", "(", "data", ",", "title", ",", "comment", ",", "icon", ",", "parent", ",", "apply", ")", "if", "dialog", ".", "exec_", "(", ")", ":", "return", "dialog", ".", "get", "(", ")" ]
create form dialog and return result data: datalist .
train
false
43,449
def send_feedback_message_email(recipient_id, feedback_messages): email_subject = ("You've received %s new message%s on your explorations" % (len(feedback_messages), ('s' if (len(feedback_messages) > 1) else ''))) email_body_template = 'Hi %s,<br><br>You\'ve received %s new message%s on your Oppia explorations:<br><ul>%s</ul>You can view and reply to your messages from your <a href="https://www.oppia.org/dashboard">dashboard</a>.<br>Thanks, and happy teaching!<br><br>Best wishes,<br>The Oppia Team<br><br>%s' if (not feconf.CAN_SEND_EMAILS): log_new_error('This app cannot send emails to users.') return if (not feconf.CAN_SEND_FEEDBACK_MESSAGE_EMAILS): log_new_error('This app cannot send feedback message emails to users.') return if (not feedback_messages): return recipient_user_settings = user_services.get_user_settings(recipient_id) messages_html = '' for (_, reference) in feedback_messages.iteritems(): for message in reference['messages']: messages_html += ('<li>%s: %s<br></li>' % (reference['title'], message)) email_body = (email_body_template % (recipient_user_settings.username, len(feedback_messages), ('s' if (len(feedback_messages) > 1) else ''), messages_html, EMAIL_FOOTER.value)) _send_email(recipient_id, feconf.SYSTEM_COMMITTER_ID, feconf.EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION, email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)
[ "def", "send_feedback_message_email", "(", "recipient_id", ",", "feedback_messages", ")", ":", "email_subject", "=", "(", "\"You've received %s new message%s on your explorations\"", "%", "(", "len", "(", "feedback_messages", ")", ",", "(", "'s'", "if", "(", "len", "(", "feedback_messages", ")", ">", "1", ")", "else", "''", ")", ")", ")", "email_body_template", "=", "'Hi %s,<br><br>You\\'ve received %s new message%s on your Oppia explorations:<br><ul>%s</ul>You can view and reply to your messages from your <a href=\"https://www.oppia.org/dashboard\">dashboard</a>.<br>Thanks, and happy teaching!<br><br>Best wishes,<br>The Oppia Team<br><br>%s'", "if", "(", "not", "feconf", ".", "CAN_SEND_EMAILS", ")", ":", "log_new_error", "(", "'This app cannot send emails to users.'", ")", "return", "if", "(", "not", "feconf", ".", "CAN_SEND_FEEDBACK_MESSAGE_EMAILS", ")", ":", "log_new_error", "(", "'This app cannot send feedback message emails to users.'", ")", "return", "if", "(", "not", "feedback_messages", ")", ":", "return", "recipient_user_settings", "=", "user_services", ".", "get_user_settings", "(", "recipient_id", ")", "messages_html", "=", "''", "for", "(", "_", ",", "reference", ")", "in", "feedback_messages", ".", "iteritems", "(", ")", ":", "for", "message", "in", "reference", "[", "'messages'", "]", ":", "messages_html", "+=", "(", "'<li>%s: %s<br></li>'", "%", "(", "reference", "[", "'title'", "]", ",", "message", ")", ")", "email_body", "=", "(", "email_body_template", "%", "(", "recipient_user_settings", ".", "username", ",", "len", "(", "feedback_messages", ")", ",", "(", "'s'", "if", "(", "len", "(", "feedback_messages", ")", ">", "1", ")", "else", "''", ")", ",", "messages_html", ",", "EMAIL_FOOTER", ".", "value", ")", ")", "_send_email", "(", "recipient_id", ",", "feconf", ".", "SYSTEM_COMMITTER_ID", ",", "feconf", ".", "EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION", ",", "email_subject", ",", "email_body", ",", "feconf", ".", "NOREPLY_EMAIL_ADDRESS", ")" ]
sends an email when creator receives feedback message to an exploration .
train
false
43,450
def _expand_item(item): ret = {} ret.update(item.__dict__) return ret
[ "def", "_expand_item", "(", "item", ")", ":", "ret", "=", "{", "}", "ret", ".", "update", "(", "item", ".", "__dict__", ")", "return", "ret" ]
convert the libcloud object into something more serializable .
train
false
43,451
def resource_path_tuple(resource, *elements): return tuple(_resource_path_list(resource, *elements))
[ "def", "resource_path_tuple", "(", "resource", ",", "*", "elements", ")", ":", "return", "tuple", "(", "_resource_path_list", "(", "resource", ",", "*", "elements", ")", ")" ]
return a tuple representing the absolute physical path of the resource object based on its position in a resource tree .
train
false
43,452
def twiny(ax=None): if (ax is None): ax = gca() ax1 = ax.twiny() draw_if_interactive() return ax1
[ "def", "twiny", "(", "ax", "=", "None", ")", ":", "if", "(", "ax", "is", "None", ")", ":", "ax", "=", "gca", "(", ")", "ax1", "=", "ax", ".", "twiny", "(", ")", "draw_if_interactive", "(", ")", "return", "ax1" ]
make a second axes overlay *ax* sharing the yaxis .
train
false
43,453
def create_job(name, priority, control_file, control_type, hosts=[], profiles=[], meta_hosts=[], meta_host_profiles=[], one_time_hosts=[], atomic_group_name=None, synch_count=None, is_template=False, timeout=None, max_runtime_hrs=None, run_verify=True, email_list='', dependencies=(), reboot_before=None, reboot_after=None, parse_failed_repair=None, hostless=False, keyvals=None, drone_set=None, reserve_hosts=False): return rpc_utils.create_job_common(**rpc_utils.get_create_job_common_args(locals()))
[ "def", "create_job", "(", "name", ",", "priority", ",", "control_file", ",", "control_type", ",", "hosts", "=", "[", "]", ",", "profiles", "=", "[", "]", ",", "meta_hosts", "=", "[", "]", ",", "meta_host_profiles", "=", "[", "]", ",", "one_time_hosts", "=", "[", "]", ",", "atomic_group_name", "=", "None", ",", "synch_count", "=", "None", ",", "is_template", "=", "False", ",", "timeout", "=", "None", ",", "max_runtime_hrs", "=", "None", ",", "run_verify", "=", "True", ",", "email_list", "=", "''", ",", "dependencies", "=", "(", ")", ",", "reboot_before", "=", "None", ",", "reboot_after", "=", "None", ",", "parse_failed_repair", "=", "None", ",", "hostless", "=", "False", ",", "keyvals", "=", "None", ",", "drone_set", "=", "None", ",", "reserve_hosts", "=", "False", ")", ":", "return", "rpc_utils", ".", "create_job_common", "(", "**", "rpc_utils", ".", "get_create_job_common_args", "(", "locals", "(", ")", ")", ")" ]
create and enqueue a job .
train
false
43,454
def _getinfos_dns(spec): infos = {} fullinfos = {} fields = {'domain': 'value', 'domaintarget': 'targetval'} for field in fields: try: if (fields[field] not in spec): continue infos[field] = [] fullinfos[field] = [] for domain in utils.get_domains(spec.get(('full' + fields[field]), spec[fields[field]])): infos[field].append(domain[:utils.MAXVALLEN]) if (len(domain) > utils.MAXVALLEN): fullinfos[field].append(domain) if (not infos[field]): del infos[field] if (not fullinfos[field]): del fullinfos[field] except Exception: pass res = {} if infos: res['infos'] = infos if fullinfos: res['fullinfos'] = fullinfos return res
[ "def", "_getinfos_dns", "(", "spec", ")", ":", "infos", "=", "{", "}", "fullinfos", "=", "{", "}", "fields", "=", "{", "'domain'", ":", "'value'", ",", "'domaintarget'", ":", "'targetval'", "}", "for", "field", "in", "fields", ":", "try", ":", "if", "(", "fields", "[", "field", "]", "not", "in", "spec", ")", ":", "continue", "infos", "[", "field", "]", "=", "[", "]", "fullinfos", "[", "field", "]", "=", "[", "]", "for", "domain", "in", "utils", ".", "get_domains", "(", "spec", ".", "get", "(", "(", "'full'", "+", "fields", "[", "field", "]", ")", ",", "spec", "[", "fields", "[", "field", "]", "]", ")", ")", ":", "infos", "[", "field", "]", ".", "append", "(", "domain", "[", ":", "utils", ".", "MAXVALLEN", "]", ")", "if", "(", "len", "(", "domain", ")", ">", "utils", ".", "MAXVALLEN", ")", ":", "fullinfos", "[", "field", "]", ".", "append", "(", "domain", ")", "if", "(", "not", "infos", "[", "field", "]", ")", ":", "del", "infos", "[", "field", "]", "if", "(", "not", "fullinfos", "[", "field", "]", ")", ":", "del", "fullinfos", "[", "field", "]", "except", "Exception", ":", "pass", "res", "=", "{", "}", "if", "infos", ":", "res", "[", "'infos'", "]", "=", "infos", "if", "fullinfos", ":", "res", "[", "'fullinfos'", "]", "=", "fullinfos", "return", "res" ]
extract domain names in an handy-to-index-and-query form .
train
false
43,455
def get_user_permission_codename(perm): return get_user_permission_full_codename(perm).split(u'.')[1]
[ "def", "get_user_permission_codename", "(", "perm", ")", ":", "return", "get_user_permission_full_codename", "(", "perm", ")", ".", "split", "(", "u'.'", ")", "[", "1", "]" ]
returns <perm>_<usermodulename> .
train
false
43,456
def django_orm_maxlength_truncate(backend, details, user=None, is_new=False, *args, **kwargs): if (user is None): return out = {} names = user._meta.get_all_field_names() for (name, value) in six.iteritems(details): if ((name in names) and (not _ignore_field(name, is_new))): max_length = user._meta.get_field(name).max_length try: if (max_length and (len(value) > max_length)): value = value[:max_length] except TypeError: pass out[name] = value return {'details': out}
[ "def", "django_orm_maxlength_truncate", "(", "backend", ",", "details", ",", "user", "=", "None", ",", "is_new", "=", "False", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "user", "is", "None", ")", ":", "return", "out", "=", "{", "}", "names", "=", "user", ".", "_meta", ".", "get_all_field_names", "(", ")", "for", "(", "name", ",", "value", ")", "in", "six", ".", "iteritems", "(", "details", ")", ":", "if", "(", "(", "name", "in", "names", ")", "and", "(", "not", "_ignore_field", "(", "name", ",", "is_new", ")", ")", ")", ":", "max_length", "=", "user", ".", "_meta", ".", "get_field", "(", "name", ")", ".", "max_length", "try", ":", "if", "(", "max_length", "and", "(", "len", "(", "value", ")", ">", "max_length", ")", ")", ":", "value", "=", "value", "[", ":", "max_length", "]", "except", "TypeError", ":", "pass", "out", "[", "name", "]", "=", "value", "return", "{", "'details'", ":", "out", "}" ]
truncate any value in details that corresponds with a field in the user model .
train
false
43,459
def decode_table(encoded, offset): result = {} tablesize = struct.unpack_from('>I', encoded, offset)[0] offset += 4 limit = (offset + tablesize) while (offset < limit): (key, offset) = decode_short_string(encoded, offset) (value, offset) = decode_value(encoded, offset) result[key] = value return (result, offset)
[ "def", "decode_table", "(", "encoded", ",", "offset", ")", ":", "result", "=", "{", "}", "tablesize", "=", "struct", ".", "unpack_from", "(", "'>I'", ",", "encoded", ",", "offset", ")", "[", "0", "]", "offset", "+=", "4", "limit", "=", "(", "offset", "+", "tablesize", ")", "while", "(", "offset", "<", "limit", ")", ":", "(", "key", ",", "offset", ")", "=", "decode_short_string", "(", "encoded", ",", "offset", ")", "(", "value", ",", "offset", ")", "=", "decode_value", "(", "encoded", ",", "offset", ")", "result", "[", "key", "]", "=", "value", "return", "(", "result", ",", "offset", ")" ]
decode the amqp table passed in from the encoded value returning the decoded result and the number of bytes read plus the offset .
train
false
43,461
def safe_bed_file(infile): fix_pat = re.compile('^(track|browser)') (fd, fname) = tempfile.mkstemp() in_handle = open(infile) out_handle = open(fname, 'w') for line in in_handle: if fix_pat.match(line): line = ('#' + line) out_handle.write(line) in_handle.close() out_handle.close() return fname
[ "def", "safe_bed_file", "(", "infile", ")", ":", "fix_pat", "=", "re", ".", "compile", "(", "'^(track|browser)'", ")", "(", "fd", ",", "fname", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "in_handle", "=", "open", "(", "infile", ")", "out_handle", "=", "open", "(", "fname", ",", "'w'", ")", "for", "line", "in", "in_handle", ":", "if", "fix_pat", ".", "match", "(", "line", ")", ":", "line", "=", "(", "'#'", "+", "line", ")", "out_handle", ".", "write", "(", "line", ")", "in_handle", ".", "close", "(", ")", "out_handle", ".", "close", "(", ")", "return", "fname" ]
make a bed file with track and browser lines ready for liftover .
train
false
43,462
def string_decode(string): if (PY_MAJOR_VERSION > 2): return bytes(string, 'utf-8').decode('unicode_escape') else: return string.decode('string_escape')
[ "def", "string_decode", "(", "string", ")", ":", "if", "(", "PY_MAJOR_VERSION", ">", "2", ")", ":", "return", "bytes", "(", "string", ",", "'utf-8'", ")", ".", "decode", "(", "'unicode_escape'", ")", "else", ":", "return", "string", ".", "decode", "(", "'string_escape'", ")" ]
for cross compatibility between python 2 and python 3 strings .
train
false
43,464
def print_binutils_instructions(util, context): instructions = 'https://docs.pwntools.com/en/stable/install/binutils.html' binutils_arch = {'amd64': 'x86_64', 'arm': 'armeabi', 'thumb': 'armeabi'}.get(context.arch, context.arch) packages = dpkg_search_for_binutils(binutils_arch, util) if packages: instructions = ('$ sudo apt-get install %s' % packages[0]) log.error(('\nCould not find %(util)r installed for %(context)s\nTry installing binutils for this architecture:\n%(instructions)s\n'.strip() % locals()))
[ "def", "print_binutils_instructions", "(", "util", ",", "context", ")", ":", "instructions", "=", "'https://docs.pwntools.com/en/stable/install/binutils.html'", "binutils_arch", "=", "{", "'amd64'", ":", "'x86_64'", ",", "'arm'", ":", "'armeabi'", ",", "'thumb'", ":", "'armeabi'", "}", ".", "get", "(", "context", ".", "arch", ",", "context", ".", "arch", ")", "packages", "=", "dpkg_search_for_binutils", "(", "binutils_arch", ",", "util", ")", "if", "packages", ":", "instructions", "=", "(", "'$ sudo apt-get install %s'", "%", "packages", "[", "0", "]", ")", "log", ".", "error", "(", "(", "'\\nCould not find %(util)r installed for %(context)s\\nTry installing binutils for this architecture:\\n%(instructions)s\\n'", ".", "strip", "(", ")", "%", "locals", "(", ")", ")", ")" ]
on failure to find a binutils utility .
train
false
43,465
def convert_newlines(fname, in_place=True, tmp_dir=None, tmp_prefix=None): (fd, temp_name) = tempfile.mkstemp(prefix=tmp_prefix, dir=tmp_dir) fp = os.fdopen(fd, 'wt') i = None for (i, line) in enumerate(open(fname, 'U')): fp.write(('%s\n' % line.rstrip('\r\n'))) fp.close() if (i is None): i = 0 else: i += 1 if in_place: shutil.move(temp_name, fname) return (i, None) else: return (i, temp_name)
[ "def", "convert_newlines", "(", "fname", ",", "in_place", "=", "True", ",", "tmp_dir", "=", "None", ",", "tmp_prefix", "=", "None", ")", ":", "(", "fd", ",", "temp_name", ")", "=", "tempfile", ".", "mkstemp", "(", "prefix", "=", "tmp_prefix", ",", "dir", "=", "tmp_dir", ")", "fp", "=", "os", ".", "fdopen", "(", "fd", ",", "'wt'", ")", "i", "=", "None", "for", "(", "i", ",", "line", ")", "in", "enumerate", "(", "open", "(", "fname", ",", "'U'", ")", ")", ":", "fp", ".", "write", "(", "(", "'%s\\n'", "%", "line", ".", "rstrip", "(", "'\\r\\n'", ")", ")", ")", "fp", ".", "close", "(", ")", "if", "(", "i", "is", "None", ")", ":", "i", "=", "0", "else", ":", "i", "+=", "1", "if", "in_place", ":", "shutil", ".", "move", "(", "temp_name", ",", "fname", ")", "return", "(", "i", ",", "None", ")", "else", ":", "return", "(", "i", ",", "temp_name", ")" ]
converts in place a file from universal line endings to posix line endings .
train
false
43,466
def fix_type_error(exc_info, callable, varargs, kwargs): if (exc_info is None): exc_info = sys.exc_info() if ((exc_info[0] != TypeError) or (str(exc_info[1]).find('arguments') == (-1)) or getattr(exc_info[1], '_type_error_fixed', False)): return exc_info exc_info[1]._type_error_fixed = True argspec = inspect.formatargspec(*inspect.getargspec(callable)) args = ', '.join(map(_short_repr, varargs)) if (kwargs and args): args += ', ' if kwargs: kwargs = sorted(kwargs.keys()) args += ', '.join((('%s=...' % n) for n in kwargs)) gotspec = ('(%s)' % args) msg = ('%s; got %s, wanted %s' % (exc_info[1], gotspec, argspec)) exc_info[1].args = (msg,) return exc_info
[ "def", "fix_type_error", "(", "exc_info", ",", "callable", ",", "varargs", ",", "kwargs", ")", ":", "if", "(", "exc_info", "is", "None", ")", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "(", "(", "exc_info", "[", "0", "]", "!=", "TypeError", ")", "or", "(", "str", "(", "exc_info", "[", "1", "]", ")", ".", "find", "(", "'arguments'", ")", "==", "(", "-", "1", ")", ")", "or", "getattr", "(", "exc_info", "[", "1", "]", ",", "'_type_error_fixed'", ",", "False", ")", ")", ":", "return", "exc_info", "exc_info", "[", "1", "]", ".", "_type_error_fixed", "=", "True", "argspec", "=", "inspect", ".", "formatargspec", "(", "*", "inspect", ".", "getargspec", "(", "callable", ")", ")", "args", "=", "', '", ".", "join", "(", "map", "(", "_short_repr", ",", "varargs", ")", ")", "if", "(", "kwargs", "and", "args", ")", ":", "args", "+=", "', '", "if", "kwargs", ":", "kwargs", "=", "sorted", "(", "kwargs", ".", "keys", "(", ")", ")", "args", "+=", "', '", ".", "join", "(", "(", "(", "'%s=...'", "%", "n", ")", "for", "n", "in", "kwargs", ")", ")", "gotspec", "=", "(", "'(%s)'", "%", "args", ")", "msg", "=", "(", "'%s; got %s, wanted %s'", "%", "(", "exc_info", "[", "1", "]", ",", "gotspec", ",", "argspec", ")", ")", "exc_info", "[", "1", "]", ".", "args", "=", "(", "msg", ",", ")", "return", "exc_info" ]
given an exception .
train
true
43,468
def extractField(field, event): keyFlattener = KeyFlattener() [[literalText, fieldName, formatSpec, conversion]] = aFormatter.parse((('{' + field) + '}')) key = keyFlattener.flatKey(fieldName, formatSpec, conversion) if ('log_flattened' not in event): flattenEvent(event) return event['log_flattened'][key]
[ "def", "extractField", "(", "field", ",", "event", ")", ":", "keyFlattener", "=", "KeyFlattener", "(", ")", "[", "[", "literalText", ",", "fieldName", ",", "formatSpec", ",", "conversion", "]", "]", "=", "aFormatter", ".", "parse", "(", "(", "(", "'{'", "+", "field", ")", "+", "'}'", ")", ")", "key", "=", "keyFlattener", ".", "flatKey", "(", "fieldName", ",", "formatSpec", ",", "conversion", ")", "if", "(", "'log_flattened'", "not", "in", "event", ")", ":", "flattenEvent", "(", "event", ")", "return", "event", "[", "'log_flattened'", "]", "[", "key", "]" ]
extract a given format field from the given event .
train
false
43,469
def record_messages(connection, topic, output): def process_event(body): print ('%s: %s' % (body.get('timestamp'), body.get('event_type', 'unknown event'))) pickle.dump(body, output) connection.declare_topic_consumer(topic, process_event) try: connection.consume() except KeyboardInterrupt: pass
[ "def", "record_messages", "(", "connection", ",", "topic", ",", "output", ")", ":", "def", "process_event", "(", "body", ")", ":", "print", "(", "'%s: %s'", "%", "(", "body", ".", "get", "(", "'timestamp'", ")", ",", "body", ".", "get", "(", "'event_type'", ",", "'unknown event'", ")", ")", ")", "pickle", ".", "dump", "(", "body", ",", "output", ")", "connection", ".", "declare_topic_consumer", "(", "topic", ",", "process_event", ")", "try", ":", "connection", ".", "consume", "(", ")", "except", "KeyboardInterrupt", ":", "pass" ]
listen to notification .
train
false
43,470
def fsjoin(*args): return encode(os.path.join(*args))
[ "def", "fsjoin", "(", "*", "args", ")", ":", "return", "encode", "(", "os", ".", "path", ".", "join", "(", "*", "args", ")", ")" ]
like os .
train
false
43,471
def id_chooser(query, ident): return ['north_america', 'asia', 'europe', 'south_america']
[ "def", "id_chooser", "(", "query", ",", "ident", ")", ":", "return", "[", "'north_america'", ",", "'asia'", ",", "'europe'", ",", "'south_america'", "]" ]
id chooser .
train
false
43,472
def intermediate_points(start, end, graph_data): newdata = [] newdata.append((start, (graph_data[0][0] + ((graph_data[1][0] - graph_data[0][0]) / 2.0)), graph_data[0][1])) for index in range(1, (len(graph_data) - 1)): (lastxval, lastyval) = graph_data[(index - 1)] (xval, yval) = graph_data[index] (nextxval, nextyval) = graph_data[(index + 1)] newdata.append(((lastxval + ((xval - lastxval) / 2.0)), (xval + ((nextxval - xval) / 2.0)), yval)) newdata.append(((xval + ((nextxval - xval) / 2.0)), end, graph_data[(-1)][1])) return newdata
[ "def", "intermediate_points", "(", "start", ",", "end", ",", "graph_data", ")", ":", "newdata", "=", "[", "]", "newdata", ".", "append", "(", "(", "start", ",", "(", "graph_data", "[", "0", "]", "[", "0", "]", "+", "(", "(", "graph_data", "[", "1", "]", "[", "0", "]", "-", "graph_data", "[", "0", "]", "[", "0", "]", ")", "/", "2.0", ")", ")", ",", "graph_data", "[", "0", "]", "[", "1", "]", ")", ")", "for", "index", "in", "range", "(", "1", ",", "(", "len", "(", "graph_data", ")", "-", "1", ")", ")", ":", "(", "lastxval", ",", "lastyval", ")", "=", "graph_data", "[", "(", "index", "-", "1", ")", "]", "(", "xval", ",", "yval", ")", "=", "graph_data", "[", "index", "]", "(", "nextxval", ",", "nextyval", ")", "=", "graph_data", "[", "(", "index", "+", "1", ")", "]", "newdata", ".", "append", "(", "(", "(", "lastxval", "+", "(", "(", "xval", "-", "lastxval", ")", "/", "2.0", ")", ")", ",", "(", "xval", "+", "(", "(", "nextxval", "-", "xval", ")", "/", "2.0", ")", ")", ",", "yval", ")", ")", "newdata", ".", "append", "(", "(", "(", "xval", "+", "(", "(", "nextxval", "-", "xval", ")", "/", "2.0", ")", ")", ",", "end", ",", "graph_data", "[", "(", "-", "1", ")", "]", "[", "1", "]", ")", ")", "return", "newdata" ]
generate intermediate points describing provided graph data .
train
false
43,473
def apply_view_middleware(request): urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF) urlresolvers.set_urlconf(urlconf) resolver = urlresolvers.RegexURLResolver('^/', urlconf) resolver_match = resolver.resolve(request.path_info) (callback, callback_args, callback_kwargs) = resolver_match request.resolver_match = resolver_match for middleware_path in settings.MIDDLEWARE_CLASSES: mw_class = import_string(middleware_path) try: mw_instance = mw_class() except MiddlewareNotUsed: continue if hasattr(mw_instance, 'process_view'): mw_instance.process_view(request, callback, callback_args, callback_kwargs) return request
[ "def", "apply_view_middleware", "(", "request", ")", ":", "urlconf", "=", "getattr", "(", "request", ",", "'urlconf'", ",", "settings", ".", "ROOT_URLCONF", ")", "urlresolvers", ".", "set_urlconf", "(", "urlconf", ")", "resolver", "=", "urlresolvers", ".", "RegexURLResolver", "(", "'^/'", ",", "urlconf", ")", "resolver_match", "=", "resolver", ".", "resolve", "(", "request", ".", "path_info", ")", "(", "callback", ",", "callback_args", ",", "callback_kwargs", ")", "=", "resolver_match", "request", ".", "resolver_match", "=", "resolver_match", "for", "middleware_path", "in", "settings", ".", "MIDDLEWARE_CLASSES", ":", "mw_class", "=", "import_string", "(", "middleware_path", ")", "try", ":", "mw_instance", "=", "mw_class", "(", ")", "except", "MiddlewareNotUsed", ":", "continue", "if", "hasattr", "(", "mw_instance", ",", "'process_view'", ")", ":", "mw_instance", ".", "process_view", "(", "request", ",", "callback", ",", "callback_args", ",", "callback_kwargs", ")", "return", "request" ]
apply all the process_view capable middleware configured into the given request .
train
false
43,476
def ps(cmd): if (not LINUX): cmd = cmd.replace(' --no-headers ', ' ') if SUNOS: cmd = cmd.replace('-o command', '-o comm') cmd = cmd.replace('-o start', '-o stime') p = subprocess.Popen(cmd, shell=1, stdout=subprocess.PIPE) output = p.communicate()[0].strip() if PY3: output = str(output, sys.stdout.encoding) if (not LINUX): output = output.split('\n')[1].strip() try: return int(output) except ValueError: return output
[ "def", "ps", "(", "cmd", ")", ":", "if", "(", "not", "LINUX", ")", ":", "cmd", "=", "cmd", ".", "replace", "(", "' --no-headers '", ",", "' '", ")", "if", "SUNOS", ":", "cmd", "=", "cmd", ".", "replace", "(", "'-o command'", ",", "'-o comm'", ")", "cmd", "=", "cmd", ".", "replace", "(", "'-o start'", ",", "'-o stime'", ")", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "shell", "=", "1", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "output", "=", "p", ".", "communicate", "(", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "PY3", ":", "output", "=", "str", "(", "output", ",", "sys", ".", "stdout", ".", "encoding", ")", "if", "(", "not", "LINUX", ")", ":", "output", "=", "output", ".", "split", "(", "'\\n'", ")", "[", "1", "]", ".", "strip", "(", ")", "try", ":", "return", "int", "(", "output", ")", "except", "ValueError", ":", "return", "output" ]
expects a ps command with a -o argument and parse the result returning only the value of interest .
train
false
43,479
def define_ua_schema(session): uaserver_tables = [{'name': dbconstants.APPS_TABLE, 'schema': dbconstants.APPS_SCHEMA}, {'name': dbconstants.USERS_TABLE, 'schema': dbconstants.USERS_SCHEMA}] for table in uaserver_tables: key = bytearray('/'.join([dbconstants.SCHEMA_TABLE, table['name']])) columns = bytearray(':'.join(table['schema'])) define_schema = '\n INSERT INTO "{table}" ({key}, {column}, {value})\n VALUES (%(key)s, %(column)s, %(value)s)\n '.format(table=dbconstants.SCHEMA_TABLE, key=ThriftColumn.KEY, column=ThriftColumn.COLUMN_NAME, value=ThriftColumn.VALUE) values = {'key': key, 'column': dbconstants.SCHEMA_TABLE_SCHEMA[0], 'value': columns} session.execute(define_schema, values)
[ "def", "define_ua_schema", "(", "session", ")", ":", "uaserver_tables", "=", "[", "{", "'name'", ":", "dbconstants", ".", "APPS_TABLE", ",", "'schema'", ":", "dbconstants", ".", "APPS_SCHEMA", "}", ",", "{", "'name'", ":", "dbconstants", ".", "USERS_TABLE", ",", "'schema'", ":", "dbconstants", ".", "USERS_SCHEMA", "}", "]", "for", "table", "in", "uaserver_tables", ":", "key", "=", "bytearray", "(", "'/'", ".", "join", "(", "[", "dbconstants", ".", "SCHEMA_TABLE", ",", "table", "[", "'name'", "]", "]", ")", ")", "columns", "=", "bytearray", "(", "':'", ".", "join", "(", "table", "[", "'schema'", "]", ")", ")", "define_schema", "=", "'\\n INSERT INTO \"{table}\" ({key}, {column}, {value})\\n VALUES (%(key)s, %(column)s, %(value)s)\\n '", ".", "format", "(", "table", "=", "dbconstants", ".", "SCHEMA_TABLE", ",", "key", "=", "ThriftColumn", ".", "KEY", ",", "column", "=", "ThriftColumn", ".", "COLUMN_NAME", ",", "value", "=", "ThriftColumn", ".", "VALUE", ")", "values", "=", "{", "'key'", ":", "key", ",", "'column'", ":", "dbconstants", ".", "SCHEMA_TABLE_SCHEMA", "[", "0", "]", ",", "'value'", ":", "columns", "}", "session", ".", "execute", "(", "define_schema", ",", "values", ")" ]
populate the schema table for the uaserver .
train
false
43,480
def nanargmin(values, axis=None, skipna=True): (values, mask, dtype, _) = _get_values(values, skipna, fill_value_typ='+inf', isfinite=True) result = values.argmin(axis) result = _maybe_arg_null_out(result, axis, mask, skipna) return result
[ "def", "nanargmin", "(", "values", ",", "axis", "=", "None", ",", "skipna", "=", "True", ")", ":", "(", "values", ",", "mask", ",", "dtype", ",", "_", ")", "=", "_get_values", "(", "values", ",", "skipna", ",", "fill_value_typ", "=", "'+inf'", ",", "isfinite", "=", "True", ")", "result", "=", "values", ".", "argmin", "(", "axis", ")", "result", "=", "_maybe_arg_null_out", "(", "result", ",", "axis", ",", "mask", ",", "skipna", ")", "return", "result" ]
returns -1 in the na case .
train
true
43,481
def requires_parallel(task): return ((state.env.parallel and (not getattr(task, 'serial', False))) or getattr(task, 'parallel', False))
[ "def", "requires_parallel", "(", "task", ")", ":", "return", "(", "(", "state", ".", "env", ".", "parallel", "and", "(", "not", "getattr", "(", "task", ",", "'serial'", ",", "False", ")", ")", ")", "or", "getattr", "(", "task", ",", "'parallel'", ",", "False", ")", ")" ]
returns true if given task should be run in parallel mode .
train
false