id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
48,824
def MkExFileWidget(w): msg = Tix.Message(w, relief=Tix.FLAT, width=240, anchor=Tix.N, text='The Tix ExFileSelectBox widget is more user friendly than the Motif style FileSelectBox.') box = Tix.ExFileSelectBox(w, bd=2, relief=Tix.RAISED) msg.pack(side=Tix.TOP, expand=1, fill=Tix.BOTH, padx=3, pady=3) box.pack(side=Tix.TOP, padx=3, pady=3)
[ "def", "MkExFileWidget", "(", "w", ")", ":", "msg", "=", "Tix", ".", "Message", "(", "w", ",", "relief", "=", "Tix", ".", "FLAT", ",", "width", "=", "240", ",", "anchor", "=", "Tix", ".", "N", ",", "text", "=", "'The Tix ExFileSelectBox widget is more user friendly than the Motif style FileSelectBox.'", ")", "box", "=", "Tix", ".", "ExFileSelectBox", "(", "w", ",", "bd", "=", "2", ",", "relief", "=", "Tix", ".", "RAISED", ")", "msg", ".", "pack", "(", "side", "=", "Tix", ".", "TOP", ",", "expand", "=", "1", ",", "fill", "=", "Tix", ".", "BOTH", ",", "padx", "=", "3", ",", "pady", "=", "3", ")", "box", ".", "pack", "(", "side", "=", "Tix", ".", "TOP", ",", "padx", "=", "3", ",", "pady", "=", "3", ")" ]
the tixexfileselectbox widget is more user friendly than the motif style fileselectbox .
train
false
48,826
def extract_id_from_snapshot_name(snap_name): regex = re.compile(CONF.snapshot_name_template.replace('%s', '(?P<uuid>.+)')) match = regex.match(snap_name) return (match.group('uuid') if match else None)
[ "def", "extract_id_from_snapshot_name", "(", "snap_name", ")", ":", "regex", "=", "re", ".", "compile", "(", "CONF", ".", "snapshot_name_template", ".", "replace", "(", "'%s'", ",", "'(?P<uuid>.+)'", ")", ")", "match", "=", "regex", ".", "match", "(", "snap_name", ")", "return", "(", "match", ".", "group", "(", "'uuid'", ")", "if", "match", "else", "None", ")" ]
return a snapshots id from its name on the backend .
train
false
48,828
def requireAnomalyModel(func): def _decorator(self, *args, **kwargs): if (not (self.getInferenceType() == InferenceType.TemporalAnomaly)): raise RuntimeError('Method required a TemporalAnomaly model.') if (self._getAnomalyClassifier() is None): raise RuntimeError('Model does not support this command. Model mustbe an active anomalyDetector model.') return func(self, *args, **kwargs) return _decorator
[ "def", "requireAnomalyModel", "(", "func", ")", ":", "def", "_decorator", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "(", "self", ".", "getInferenceType", "(", ")", "==", "InferenceType", ".", "TemporalAnomaly", ")", ")", ":", "raise", "RuntimeError", "(", "'Method required a TemporalAnomaly model.'", ")", "if", "(", "self", ".", "_getAnomalyClassifier", "(", ")", "is", "None", ")", ":", "raise", "RuntimeError", "(", "'Model does not support this command. Model mustbe an active anomalyDetector model.'", ")", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "_decorator" ]
decorator for functions that require anomaly models .
train
true
48,829
def header_decode(s): s = s.replace('_', ' ') return re.sub('=\\w{2}', _unquote_match, s)
[ "def", "header_decode", "(", "s", ")", ":", "s", "=", "s", ".", "replace", "(", "'_'", ",", "' '", ")", "return", "re", ".", "sub", "(", "'=\\\\w{2}'", ",", "_unquote_match", ",", "s", ")" ]
decode a string encoded with rfc 2045 mime header q encoding .
train
false
48,830
def gf_TC(f, K): if (not f): return K.zero else: return f[(-1)]
[ "def", "gf_TC", "(", "f", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "return", "K", ".", "zero", "else", ":", "return", "f", "[", "(", "-", "1", ")", "]" ]
return the trailing coefficient of f .
train
false
48,831
@contextlib.contextmanager def current_ioloop(io_loop): orig_loop = tornado.ioloop.IOLoop.current() io_loop.make_current() try: (yield) finally: orig_loop.make_current()
[ "@", "contextlib", ".", "contextmanager", "def", "current_ioloop", "(", "io_loop", ")", ":", "orig_loop", "=", "tornado", ".", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "io_loop", ".", "make_current", "(", ")", "try", ":", "(", "yield", ")", "finally", ":", "orig_loop", ".", "make_current", "(", ")" ]
a context manager that will set the current ioloop to io_loop for the context .
train
true
48,832
def pytest_fixture_setup(fixturedef, request): kwargs = {} for argname in fixturedef.argnames: fixdef = request._get_active_fixturedef(argname) (result, arg_cache_key, exc) = fixdef.cached_result request._check_scope(argname, request.scope, fixdef.scope) kwargs[argname] = result fixturefunc = fixturedef.func if fixturedef.unittest: if (request.instance is not None): fixturefunc = fixturedef.func.__get__(request.instance) elif (request.instance is not None): fixturefunc = getimfunc(fixturedef.func) if (fixturefunc != fixturedef.func): fixturefunc = fixturefunc.__get__(request.instance) my_cache_key = request.param_index try: result = call_fixture_func(fixturefunc, request, kwargs) except Exception: fixturedef.cached_result = (None, my_cache_key, sys.exc_info()) raise fixturedef.cached_result = (result, my_cache_key, None) return result
[ "def", "pytest_fixture_setup", "(", "fixturedef", ",", "request", ")", ":", "kwargs", "=", "{", "}", "for", "argname", "in", "fixturedef", ".", "argnames", ":", "fixdef", "=", "request", ".", "_get_active_fixturedef", "(", "argname", ")", "(", "result", ",", "arg_cache_key", ",", "exc", ")", "=", "fixdef", ".", "cached_result", "request", ".", "_check_scope", "(", "argname", ",", "request", ".", "scope", ",", "fixdef", ".", "scope", ")", "kwargs", "[", "argname", "]", "=", "result", "fixturefunc", "=", "fixturedef", ".", "func", "if", "fixturedef", ".", "unittest", ":", "if", "(", "request", ".", "instance", "is", "not", "None", ")", ":", "fixturefunc", "=", "fixturedef", ".", "func", ".", "__get__", "(", "request", ".", "instance", ")", "elif", "(", "request", ".", "instance", "is", "not", "None", ")", ":", "fixturefunc", "=", "getimfunc", "(", "fixturedef", ".", "func", ")", "if", "(", "fixturefunc", "!=", "fixturedef", ".", "func", ")", ":", "fixturefunc", "=", "fixturefunc", ".", "__get__", "(", "request", ".", "instance", ")", "my_cache_key", "=", "request", ".", "param_index", "try", ":", "result", "=", "call_fixture_func", "(", "fixturefunc", ",", "request", ",", "kwargs", ")", "except", "Exception", ":", "fixturedef", ".", "cached_result", "=", "(", "None", ",", "my_cache_key", ",", "sys", ".", "exc_info", "(", ")", ")", "raise", "fixturedef", ".", "cached_result", "=", "(", "result", ",", "my_cache_key", ",", "None", ")", "return", "result" ]
execution of fixture setup .
train
false
48,836
def thisjustin(command=None, username=None): user = (username if username else 'Dave') if (command == 'open the pod bay doors'): print ("I'm sorry, %s. afraid I can't do that." % user) elif (command == 'sing a song'): print "Daisy, Daisy, give me your answer do. I'm half crazy all for the love of you. It won't be a stylish marriage, I can't afford a carriage. But you'll look sweet upon the seat of a bicycle built for two." elif (command == 'do you read me?'): print ('Affirmative, %s. I read you.' % user) elif (command is None): print ("Just what do you think you're doing, %s?" % user)
[ "def", "thisjustin", "(", "command", "=", "None", ",", "username", "=", "None", ")", ":", "user", "=", "(", "username", "if", "username", "else", "'Dave'", ")", "if", "(", "command", "==", "'open the pod bay doors'", ")", ":", "print", "(", "\"I'm sorry, %s. afraid I can't do that.\"", "%", "user", ")", "elif", "(", "command", "==", "'sing a song'", ")", ":", "print", "\"Daisy, Daisy, give me your answer do. I'm half crazy all for the love of you. It won't be a stylish marriage, I can't afford a carriage. But you'll look sweet upon the seat of a bicycle built for two.\"", "elif", "(", "command", "==", "'do you read me?'", ")", ":", "print", "(", "'Affirmative, %s. I read you.'", "%", "user", ")", "elif", "(", "command", "is", "None", ")", ":", "print", "(", "\"Just what do you think you're doing, %s?\"", "%", "user", ")" ]
responds to a few choice hal 9000 commands form 2001: a space odyssey .
train
false
48,837
def _check_broadcast_up_to(arr_from, shape_to, name): shape_from = arr_from.shape if (len(shape_to) >= len(shape_from)): for (t, f) in zip(shape_to[::(-1)], shape_from[::(-1)]): if ((f != 1) and (f != t)): break else: if ((arr_from.size != 1) and (arr_from.shape != shape_to)): arr_from = (np.ones(shape_to, arr_from.dtype) * arr_from) return arr_from.ravel() raise ValueError(('%s argument must be able to broadcast up to shape %s but had shape %s' % (name, shape_to, shape_from)))
[ "def", "_check_broadcast_up_to", "(", "arr_from", ",", "shape_to", ",", "name", ")", ":", "shape_from", "=", "arr_from", ".", "shape", "if", "(", "len", "(", "shape_to", ")", ">=", "len", "(", "shape_from", ")", ")", ":", "for", "(", "t", ",", "f", ")", "in", "zip", "(", "shape_to", "[", ":", ":", "(", "-", "1", ")", "]", ",", "shape_from", "[", ":", ":", "(", "-", "1", ")", "]", ")", ":", "if", "(", "(", "f", "!=", "1", ")", "and", "(", "f", "!=", "t", ")", ")", ":", "break", "else", ":", "if", "(", "(", "arr_from", ".", "size", "!=", "1", ")", "and", "(", "arr_from", ".", "shape", "!=", "shape_to", ")", ")", ":", "arr_from", "=", "(", "np", ".", "ones", "(", "shape_to", ",", "arr_from", ".", "dtype", ")", "*", "arr_from", ")", "return", "arr_from", ".", "ravel", "(", ")", "raise", "ValueError", "(", "(", "'%s argument must be able to broadcast up to shape %s but had shape %s'", "%", "(", "name", ",", "shape_to", ",", "shape_from", ")", ")", ")" ]
helper to check that arr_from broadcasts up to shape_to .
train
false
48,838
def format_allowed(allowed): return_value = [] if (allowed.count(';') == 0): return [format_allowed_section(allowed)] else: sections = allowed.split(';') for section in sections: return_value.append(format_allowed_section(section)) return return_value
[ "def", "format_allowed", "(", "allowed", ")", ":", "return_value", "=", "[", "]", "if", "(", "allowed", ".", "count", "(", "';'", ")", "==", "0", ")", ":", "return", "[", "format_allowed_section", "(", "allowed", ")", "]", "else", ":", "sections", "=", "allowed", ".", "split", "(", "';'", ")", "for", "section", "in", "sections", ":", "return_value", ".", "append", "(", "format_allowed_section", "(", "section", ")", ")", "return", "return_value" ]
format the allowed value so that it is gce compatible .
train
false
48,841
def _run_hook_from_repo_dir(repo_dir, hook_name, project_dir, context): with work_in(repo_dir): try: run_hook(hook_name, project_dir, context) except FailedHookException: rmtree(project_dir) logger.error(u"Stopping generation because {} hook script didn't exit successfully".format(hook_name)) raise
[ "def", "_run_hook_from_repo_dir", "(", "repo_dir", ",", "hook_name", ",", "project_dir", ",", "context", ")", ":", "with", "work_in", "(", "repo_dir", ")", ":", "try", ":", "run_hook", "(", "hook_name", ",", "project_dir", ",", "context", ")", "except", "FailedHookException", ":", "rmtree", "(", "project_dir", ")", "logger", ".", "error", "(", "u\"Stopping generation because {} hook script didn't exit successfully\"", ".", "format", "(", "hook_name", ")", ")", "raise" ]
run hook from repo directory .
train
true
48,842
def collect_unioned_set_field(block_structure, transformer, merged_field_name, filter_by): for block_key in block_structure.topological_traversal(): result_set = ({block_key} if filter_by(block_key) else set()) for parent in block_structure.get_parents(block_key): result_set |= block_structure.get_transformer_block_field(parent, transformer, merged_field_name, set()) block_structure.set_transformer_block_field(block_key, transformer, merged_field_name, result_set)
[ "def", "collect_unioned_set_field", "(", "block_structure", ",", "transformer", ",", "merged_field_name", ",", "filter_by", ")", ":", "for", "block_key", "in", "block_structure", ".", "topological_traversal", "(", ")", ":", "result_set", "=", "(", "{", "block_key", "}", "if", "filter_by", "(", "block_key", ")", "else", "set", "(", ")", ")", "for", "parent", "in", "block_structure", ".", "get_parents", "(", "block_key", ")", ":", "result_set", "|=", "block_structure", ".", "get_transformer_block_field", "(", "parent", ",", "transformer", ",", "merged_field_name", ",", "set", "(", ")", ")", "block_structure", ".", "set_transformer_block_field", "(", "block_key", ",", "transformer", ",", "merged_field_name", ",", "result_set", ")" ]
recursively union a set field on the block structure .
train
false
48,844
def copy_byte(src, dest): byte = src.read(1) dest.write(byte) (val,) = unpack('B', byte) return val
[ "def", "copy_byte", "(", "src", ",", "dest", ")", ":", "byte", "=", "src", ".", "read", "(", "1", ")", "dest", ".", "write", "(", "byte", ")", "(", "val", ",", ")", "=", "unpack", "(", "'B'", ",", "byte", ")", "return", "val" ]
copy an unsigned byte between files .
train
false
48,846
def is_import(node): return (node.type in (syms.import_name, syms.import_from))
[ "def", "is_import", "(", "node", ")", ":", "return", "(", "node", ".", "type", "in", "(", "syms", ".", "import_name", ",", "syms", ".", "import_from", ")", ")" ]
returns true if the node is an import statement .
train
false
48,847
def _get_words_from_dataset(dataset): def tokenize(words): if isinstance(words, basestring): return word_tokenize(words, include_punc=False) else: return words all_words = chain.from_iterable((tokenize(words) for (words, _) in dataset)) return set(all_words)
[ "def", "_get_words_from_dataset", "(", "dataset", ")", ":", "def", "tokenize", "(", "words", ")", ":", "if", "isinstance", "(", "words", ",", "basestring", ")", ":", "return", "word_tokenize", "(", "words", ",", "include_punc", "=", "False", ")", "else", ":", "return", "words", "all_words", "=", "chain", ".", "from_iterable", "(", "(", "tokenize", "(", "words", ")", "for", "(", "words", ",", "_", ")", "in", "dataset", ")", ")", "return", "set", "(", "all_words", ")" ]
return a set of all words in a dataset .
train
true
48,848
def rm_handlers(app, handlers, key=None): for (handler_name, func) in handlers.iteritems(): rm_handler(app, handler_name, func, key=key)
[ "def", "rm_handlers", "(", "app", ",", "handlers", ",", "key", "=", "None", ")", ":", "for", "(", "handler_name", ",", "func", ")", "in", "handlers", ".", "iteritems", "(", ")", ":", "rm_handler", "(", "app", ",", "handler_name", ",", "func", ",", "key", "=", "key", ")" ]
remove multiple handlers from an application .
train
false
48,850
def _emit_problem_submitted_event(kwargs): root_type = get_event_transaction_type() if (not root_type): root_id = get_event_transaction_id() if (not root_id): root_id = create_new_event_transaction_id() set_event_transaction_type(PROBLEM_SUBMITTED_EVENT_TYPE) tracker.emit(unicode(PROBLEM_SUBMITTED_EVENT_TYPE), {'user_id': unicode(kwargs['user_id']), 'course_id': unicode(kwargs['course_id']), 'problem_id': unicode(kwargs['usage_id']), 'event_transaction_id': unicode(root_id), 'event_transaction_type': unicode(PROBLEM_SUBMITTED_EVENT_TYPE), 'weighted_earned': kwargs.get('weighted_earned'), 'weighted_possible': kwargs.get('weighted_possible')})
[ "def", "_emit_problem_submitted_event", "(", "kwargs", ")", ":", "root_type", "=", "get_event_transaction_type", "(", ")", "if", "(", "not", "root_type", ")", ":", "root_id", "=", "get_event_transaction_id", "(", ")", "if", "(", "not", "root_id", ")", ":", "root_id", "=", "create_new_event_transaction_id", "(", ")", "set_event_transaction_type", "(", "PROBLEM_SUBMITTED_EVENT_TYPE", ")", "tracker", ".", "emit", "(", "unicode", "(", "PROBLEM_SUBMITTED_EVENT_TYPE", ")", ",", "{", "'user_id'", ":", "unicode", "(", "kwargs", "[", "'user_id'", "]", ")", ",", "'course_id'", ":", "unicode", "(", "kwargs", "[", "'course_id'", "]", ")", ",", "'problem_id'", ":", "unicode", "(", "kwargs", "[", "'usage_id'", "]", ")", ",", "'event_transaction_id'", ":", "unicode", "(", "root_id", ")", ",", "'event_transaction_type'", ":", "unicode", "(", "PROBLEM_SUBMITTED_EVENT_TYPE", ")", ",", "'weighted_earned'", ":", "kwargs", ".", "get", "(", "'weighted_earned'", ")", ",", "'weighted_possible'", ":", "kwargs", ".", "get", "(", "'weighted_possible'", ")", "}", ")" ]
emits a problem submitted event only if there is no current event transaction type .
train
false
48,851
def test_url_incorrect_case_no_index(script, data): result = script.pip('install', '--no-index', '-f', data.find_links, 'upper') egg_folder = ((script.site_packages / 'Upper-1.0-py%s.egg-info') % pyversion) assert (egg_folder not in result.files_created), str(result) egg_folder = ((script.site_packages / 'Upper-2.0-py%s.egg-info') % pyversion) assert (egg_folder in result.files_created), str(result)
[ "def", "test_url_incorrect_case_no_index", "(", "script", ",", "data", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--no-index'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'upper'", ")", "egg_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'Upper-1.0-py%s.egg-info'", ")", "%", "pyversion", ")", "assert", "(", "egg_folder", "not", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")", "egg_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'Upper-2.0-py%s.egg-info'", ")", "%", "pyversion", ")", "assert", "(", "egg_folder", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")" ]
same as test_url_req_case_mismatch_no_index .
train
false
48,852
def get_fg_bg(name): if requires_special_home_display(name): return (Color.HOME_FG, Color.HOME_BG) return (Color.PATH_FG, Color.PATH_BG)
[ "def", "get_fg_bg", "(", "name", ")", ":", "if", "requires_special_home_display", "(", "name", ")", ":", "return", "(", "Color", ".", "HOME_FG", ",", "Color", ".", "HOME_BG", ")", "return", "(", "Color", ".", "PATH_FG", ",", "Color", ".", "PATH_BG", ")" ]
returns the foreground and background color to use for the given name .
train
false
48,853
def _import_status(data, item, repo_name, repo_tag): status = item['status'] try: if ('Downloading from' in status): return elif all(((x in string.hexdigits) for x in status)): data['Image'] = '{0}:{1}'.format(repo_name, repo_tag) data['Id'] = status except (AttributeError, TypeError): pass
[ "def", "_import_status", "(", "data", ",", "item", ",", "repo_name", ",", "repo_tag", ")", ":", "status", "=", "item", "[", "'status'", "]", "try", ":", "if", "(", "'Downloading from'", "in", "status", ")", ":", "return", "elif", "all", "(", "(", "(", "x", "in", "string", ".", "hexdigits", ")", "for", "x", "in", "status", ")", ")", ":", "data", "[", "'Image'", "]", "=", "'{0}:{1}'", ".", "format", "(", "repo_name", ",", "repo_tag", ")", "data", "[", "'Id'", "]", "=", "status", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "pass" ]
process a status update from docker import .
train
true
48,855
def channelfromname(user): cached = userdata_cached(user) if cached: (user, channel_id) = cached else: qs = {'part': 'id,snippet', 'maxResults': 1, 'q': user, 'type': 'channel'} try: userinfo = pafy.call_gdata('search', qs)['items'] if (len(userinfo) > 0): snippet = userinfo[0].get('snippet', {}) channel_id = snippet.get('channelId', user) username = snippet.get('title', user) user = cache_userdata(user, username, channel_id)[0] else: g.message = 'User {} not found.'.format(((c.y + user) + c.w)) return except pafy.GdataError as e: g.message = 'Could not retrieve information for user {}\n{}'.format(((c.y + user) + c.w), e) util.dbg('Error during channel request for user {}:\n{}'.format(user, e)) return return (user, channel_id)
[ "def", "channelfromname", "(", "user", ")", ":", "cached", "=", "userdata_cached", "(", "user", ")", "if", "cached", ":", "(", "user", ",", "channel_id", ")", "=", "cached", "else", ":", "qs", "=", "{", "'part'", ":", "'id,snippet'", ",", "'maxResults'", ":", "1", ",", "'q'", ":", "user", ",", "'type'", ":", "'channel'", "}", "try", ":", "userinfo", "=", "pafy", ".", "call_gdata", "(", "'search'", ",", "qs", ")", "[", "'items'", "]", "if", "(", "len", "(", "userinfo", ")", ">", "0", ")", ":", "snippet", "=", "userinfo", "[", "0", "]", ".", "get", "(", "'snippet'", ",", "{", "}", ")", "channel_id", "=", "snippet", ".", "get", "(", "'channelId'", ",", "user", ")", "username", "=", "snippet", ".", "get", "(", "'title'", ",", "user", ")", "user", "=", "cache_userdata", "(", "user", ",", "username", ",", "channel_id", ")", "[", "0", "]", "else", ":", "g", ".", "message", "=", "'User {} not found.'", ".", "format", "(", "(", "(", "c", ".", "y", "+", "user", ")", "+", "c", ".", "w", ")", ")", "return", "except", "pafy", ".", "GdataError", "as", "e", ":", "g", ".", "message", "=", "'Could not retrieve information for user {}\\n{}'", ".", "format", "(", "(", "(", "c", ".", "y", "+", "user", ")", "+", "c", ".", "w", ")", ",", "e", ")", "util", ".", "dbg", "(", "'Error during channel request for user {}:\\n{}'", ".", "format", "(", "user", ",", "e", ")", ")", "return", "return", "(", "user", ",", "channel_id", ")" ]
query channel id from username .
train
false
48,856
def test_ncr_fit_single_class(): ncr = NeighbourhoodCleaningRule(random_state=RND_SEED) y_single_class = np.zeros((X.shape[0],)) assert_warns(UserWarning, ncr.fit, X, y_single_class)
[ "def", "test_ncr_fit_single_class", "(", ")", ":", "ncr", "=", "NeighbourhoodCleaningRule", "(", "random_state", "=", "RND_SEED", ")", "y_single_class", "=", "np", ".", "zeros", "(", "(", "X", ".", "shape", "[", "0", "]", ",", ")", ")", "assert_warns", "(", "UserWarning", ",", "ncr", ".", "fit", ",", "X", ",", "y_single_class", ")" ]
test either if an error when there is a single class .
train
false
48,857
def sqlite_version(): return sqlite3.sqlite_version
[ "def", "sqlite_version", "(", ")", ":", "return", "sqlite3", ".", "sqlite_version" ]
return version of sqlite cli example: .
train
false
48,858
def draw_box(point1, point2, color=colors.lightgreen, border=None, colour=None, **kwargs): (x1, y1) = point1 (x2, y2) = point2 if (colour is not None): color = colour del colour (strokecolor, color) = _stroke_and_fill_colors(color, border) (x1, y1, x2, y2) = (min(x1, x2), min(y1, y2), max(x1, x2), max(y1, y2)) return Polygon([x1, y1, x2, y1, x2, y2, x1, y2], strokeColor=strokecolor, fillColor=color, strokewidth=0, **kwargs)
[ "def", "draw_box", "(", "point1", ",", "point2", ",", "color", "=", "colors", ".", "lightgreen", ",", "border", "=", "None", ",", "colour", "=", "None", ",", "**", "kwargs", ")", ":", "(", "x1", ",", "y1", ")", "=", "point1", "(", "x2", ",", "y2", ")", "=", "point2", "if", "(", "colour", "is", "not", "None", ")", ":", "color", "=", "colour", "del", "colour", "(", "strokecolor", ",", "color", ")", "=", "_stroke_and_fill_colors", "(", "color", ",", "border", ")", "(", "x1", ",", "y1", ",", "x2", ",", "y2", ")", "=", "(", "min", "(", "x1", ",", "x2", ")", ",", "min", "(", "y1", ",", "y2", ")", ",", "max", "(", "x1", ",", "x2", ")", ",", "max", "(", "y1", ",", "y2", ")", ")", "return", "Polygon", "(", "[", "x1", ",", "y1", ",", "x2", ",", "y1", ",", "x2", ",", "y2", ",", "x1", ",", "y2", "]", ",", "strokeColor", "=", "strokecolor", ",", "fillColor", "=", "color", ",", "strokewidth", "=", "0", ",", "**", "kwargs", ")" ]
draw a box .
train
false
48,860
def _safely_castable_to_int(dt): int_size = np.dtype(int).itemsize safe = ((np.issubdtype(dt, int) and (dt.itemsize <= int_size)) or (np.issubdtype(dt, np.unsignedinteger) and (dt.itemsize < int_size))) return safe
[ "def", "_safely_castable_to_int", "(", "dt", ")", ":", "int_size", "=", "np", ".", "dtype", "(", "int", ")", ".", "itemsize", "safe", "=", "(", "(", "np", ".", "issubdtype", "(", "dt", ",", "int", ")", "and", "(", "dt", ".", "itemsize", "<=", "int_size", ")", ")", "or", "(", "np", ".", "issubdtype", "(", "dt", ",", "np", ".", "unsignedinteger", ")", "and", "(", "dt", ".", "itemsize", "<", "int_size", ")", ")", ")", "return", "safe" ]
test whether the numpy data type dt can be safely cast to an int .
train
false
48,864
def _check_ori(pick_ori): if ((pick_ori is not None) and (pick_ori != 'normal')): raise RuntimeError(('pick_ori must be None or "normal", not %s' % pick_ori)) return pick_ori
[ "def", "_check_ori", "(", "pick_ori", ")", ":", "if", "(", "(", "pick_ori", "is", "not", "None", ")", "and", "(", "pick_ori", "!=", "'normal'", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'pick_ori must be None or \"normal\", not %s'", "%", "pick_ori", ")", ")", "return", "pick_ori" ]
check pick_ori .
train
false
48,867
def get_override_for_ccx(ccx, block, name, default=None): overrides = _get_overrides_for_ccx(ccx) clean_ccx_key = _clean_ccx_key(block.location) block_overrides = overrides.get(clean_ccx_key, {}) block_overrides['course_edit_method'] = None if (name in block_overrides): try: return block.fields[name].from_json(block_overrides[name]) except KeyError: return block_overrides[name] else: return default
[ "def", "get_override_for_ccx", "(", "ccx", ",", "block", ",", "name", ",", "default", "=", "None", ")", ":", "overrides", "=", "_get_overrides_for_ccx", "(", "ccx", ")", "clean_ccx_key", "=", "_clean_ccx_key", "(", "block", ".", "location", ")", "block_overrides", "=", "overrides", ".", "get", "(", "clean_ccx_key", ",", "{", "}", ")", "block_overrides", "[", "'course_edit_method'", "]", "=", "None", "if", "(", "name", "in", "block_overrides", ")", ":", "try", ":", "return", "block", ".", "fields", "[", "name", "]", ".", "from_json", "(", "block_overrides", "[", "name", "]", ")", "except", "KeyError", ":", "return", "block_overrides", "[", "name", "]", "else", ":", "return", "default" ]
gets the value of the overridden field for the ccx .
train
false
48,868
def instance_get_all_by_host_and_node(context, host, node, columns_to_join=None): return IMPL.instance_get_all_by_host_and_node(context, host, node, columns_to_join=columns_to_join)
[ "def", "instance_get_all_by_host_and_node", "(", "context", ",", "host", ",", "node", ",", "columns_to_join", "=", "None", ")", ":", "return", "IMPL", ".", "instance_get_all_by_host_and_node", "(", "context", ",", "host", ",", "node", ",", "columns_to_join", "=", "columns_to_join", ")" ]
get all instances belonging to a node .
train
false
48,870
def TestFlowHelper(flow_urn_or_cls_name, client_mock=None, client_id=None, check_flow_errors=True, token=None, notification_event=None, sync=True, **kwargs): if (client_id or client_mock): client_mock = MockClient(client_id, client_mock, token=token) worker_mock = MockWorker(check_flow_errors=check_flow_errors, token=token) if isinstance(flow_urn_or_cls_name, rdfvalue.RDFURN): session_id = flow_urn_or_cls_name else: session_id = flow.GRRFlow.StartFlow(client_id=client_id, flow_name=flow_urn_or_cls_name, notification_event=notification_event, sync=sync, token=token, **kwargs) total_flows = set() total_flows.add(session_id) while True: if client_mock: client_processed = client_mock.Next() else: client_processed = 0 flows_run = [] for flow_run in worker_mock.Next(): total_flows.add(flow_run) flows_run.append(flow_run) if ((client_processed == 0) and (not flows_run)): break (yield session_id) if check_flow_errors: CheckFlowErrors(total_flows, token=token)
[ "def", "TestFlowHelper", "(", "flow_urn_or_cls_name", ",", "client_mock", "=", "None", ",", "client_id", "=", "None", ",", "check_flow_errors", "=", "True", ",", "token", "=", "None", ",", "notification_event", "=", "None", ",", "sync", "=", "True", ",", "**", "kwargs", ")", ":", "if", "(", "client_id", "or", "client_mock", ")", ":", "client_mock", "=", "MockClient", "(", "client_id", ",", "client_mock", ",", "token", "=", "token", ")", "worker_mock", "=", "MockWorker", "(", "check_flow_errors", "=", "check_flow_errors", ",", "token", "=", "token", ")", "if", "isinstance", "(", "flow_urn_or_cls_name", ",", "rdfvalue", ".", "RDFURN", ")", ":", "session_id", "=", "flow_urn_or_cls_name", "else", ":", "session_id", "=", "flow", ".", "GRRFlow", ".", "StartFlow", "(", "client_id", "=", "client_id", ",", "flow_name", "=", "flow_urn_or_cls_name", ",", "notification_event", "=", "notification_event", ",", "sync", "=", "sync", ",", "token", "=", "token", ",", "**", "kwargs", ")", "total_flows", "=", "set", "(", ")", "total_flows", ".", "add", "(", "session_id", ")", "while", "True", ":", "if", "client_mock", ":", "client_processed", "=", "client_mock", ".", "Next", "(", ")", "else", ":", "client_processed", "=", "0", "flows_run", "=", "[", "]", "for", "flow_run", "in", "worker_mock", ".", "Next", "(", ")", ":", "total_flows", ".", "add", "(", "flow_run", ")", "flows_run", ".", "append", "(", "flow_run", ")", "if", "(", "(", "client_processed", "==", "0", ")", "and", "(", "not", "flows_run", ")", ")", ":", "break", "(", "yield", "session_id", ")", "if", "check_flow_errors", ":", "CheckFlowErrors", "(", "total_flows", ",", "token", "=", "token", ")" ]
build a full test harness: client - worker + start flow .
train
false
48,871
def isNumber(value): try: float(value) except: return False else: return True
[ "def", "isNumber", "(", "value", ")", ":", "try", ":", "float", "(", "value", ")", "except", ":", "return", "False", "else", ":", "return", "True" ]
returns true if the given value is a number-like object .
train
false
48,872
def mime_encode_header(line): newline = '' pos = 0 while 1: res = mime_header.search(line, pos) if (res is None): break newline = ('%s%s%s=?%s?Q?%s?=' % (newline, line[pos:res.start(0)], res.group(1), CHARSET, mime_encode(res.group(2), 1))) pos = res.end(0) return (newline + line[pos:])
[ "def", "mime_encode_header", "(", "line", ")", ":", "newline", "=", "''", "pos", "=", "0", "while", "1", ":", "res", "=", "mime_header", ".", "search", "(", "line", ",", "pos", ")", "if", "(", "res", "is", "None", ")", ":", "break", "newline", "=", "(", "'%s%s%s=?%s?Q?%s?='", "%", "(", "newline", ",", "line", "[", "pos", ":", "res", ".", "start", "(", "0", ")", "]", ",", "res", ".", "group", "(", "1", ")", ",", "CHARSET", ",", "mime_encode", "(", "res", ".", "group", "(", "2", ")", ",", "1", ")", ")", ")", "pos", "=", "res", ".", "end", "(", "0", ")", "return", "(", "newline", "+", "line", "[", "pos", ":", "]", ")" ]
code a single header line as quoted-printable .
train
false
48,875
def get_vm_boot_spec(client_factory, device): config_spec = client_factory.create('ns0:VirtualMachineConfigSpec') boot_disk = client_factory.create('ns0:VirtualMachineBootOptionsBootableDiskDevice') boot_disk.deviceKey = device.key boot_options = client_factory.create('ns0:VirtualMachineBootOptions') boot_options.bootOrder = [boot_disk] config_spec.bootOptions = boot_options return config_spec
[ "def", "get_vm_boot_spec", "(", "client_factory", ",", "device", ")", ":", "config_spec", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineConfigSpec'", ")", "boot_disk", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineBootOptionsBootableDiskDevice'", ")", "boot_disk", ".", "deviceKey", "=", "device", ".", "key", "boot_options", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineBootOptions'", ")", "boot_options", ".", "bootOrder", "=", "[", "boot_disk", "]", "config_spec", ".", "bootOptions", "=", "boot_options", "return", "config_spec" ]
returns updated boot settings for the instance .
train
false
48,876
def get_cls_by_name(name, aliases={}, imp=None, package=None, **kwargs): if (imp is None): imp = importlib.import_module if (not isinstance(name, basestring)): return name name = (aliases.get(name) or name) (module_name, _, cls_name) = name.rpartition('.') if ((not module_name) and package): module_name = package try: module = imp(module_name, package=package, **kwargs) except ValueError as exc: raise ValueError(("Couldn't import %r: %s" % (name, exc))) return getattr(module, cls_name)
[ "def", "get_cls_by_name", "(", "name", ",", "aliases", "=", "{", "}", ",", "imp", "=", "None", ",", "package", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "imp", "is", "None", ")", ":", "imp", "=", "importlib", ".", "import_module", "if", "(", "not", "isinstance", "(", "name", ",", "basestring", ")", ")", ":", "return", "name", "name", "=", "(", "aliases", ".", "get", "(", "name", ")", "or", "name", ")", "(", "module_name", ",", "_", ",", "cls_name", ")", "=", "name", ".", "rpartition", "(", "'.'", ")", "if", "(", "(", "not", "module_name", ")", "and", "package", ")", ":", "module_name", "=", "package", "try", ":", "module", "=", "imp", "(", "module_name", ",", "package", "=", "package", ",", "**", "kwargs", ")", "except", "ValueError", "as", "exc", ":", "raise", "ValueError", "(", "(", "\"Couldn't import %r: %s\"", "%", "(", "name", ",", "exc", ")", ")", ")", "return", "getattr", "(", "module", ",", "cls_name", ")" ]
get class by name .
train
false
48,879
def pformat(temp, **fmt): formatter = Formatter() mapping = _FormatDict(fmt) return formatter.vformat(temp, (), mapping)
[ "def", "pformat", "(", "temp", ",", "**", "fmt", ")", ":", "formatter", "=", "Formatter", "(", ")", "mapping", "=", "_FormatDict", "(", "fmt", ")", "return", "formatter", ".", "vformat", "(", "temp", ",", "(", ")", ",", "mapping", ")" ]
partially format a template string .
train
false
48,880
def write_error(request, code, message): error = {'error': {'code': code, 'message': message}} request.set_status(code) request.write(json.dumps(error))
[ "def", "write_error", "(", "request", ",", "code", ",", "message", ")", ":", "error", "=", "{", "'error'", ":", "{", "'code'", ":", "code", ",", "'message'", ":", "message", "}", "}", "request", ".", "set_status", "(", "code", ")", "request", ".", "write", "(", "json", ".", "dumps", "(", "error", ")", ")" ]
sets the response headers and body for error messages .
train
false
48,882
def symbol_string(symbol): if (symbol < (1 << 32)): return _key_names.get(symbol, str(symbol)) else: return ('user_key(%x)' % (symbol >> 32))
[ "def", "symbol_string", "(", "symbol", ")", ":", "if", "(", "symbol", "<", "(", "1", "<<", "32", ")", ")", ":", "return", "_key_names", ".", "get", "(", "symbol", ",", "str", "(", "symbol", ")", ")", "else", ":", "return", "(", "'user_key(%x)'", "%", "(", "symbol", ">>", "32", ")", ")" ]
return a string describing a key symbol .
train
false
48,883
def iter_importers(fullname=''): if fullname.startswith('.'): raise ImportError('Relative module names not supported') if ('.' in fullname): pkg = '.'.join(fullname.split('.')[:(-1)]) if (pkg not in sys.modules): __import__(pkg) path = (getattr(sys.modules[pkg], '__path__', None) or []) else: for importer in sys.meta_path: (yield importer) path = sys.path for item in path: (yield get_importer(item)) if ('.' not in fullname): (yield ImpImporter())
[ "def", "iter_importers", "(", "fullname", "=", "''", ")", ":", "if", "fullname", ".", "startswith", "(", "'.'", ")", ":", "raise", "ImportError", "(", "'Relative module names not supported'", ")", "if", "(", "'.'", "in", "fullname", ")", ":", "pkg", "=", "'.'", ".", "join", "(", "fullname", ".", "split", "(", "'.'", ")", "[", ":", "(", "-", "1", ")", "]", ")", "if", "(", "pkg", "not", "in", "sys", ".", "modules", ")", ":", "__import__", "(", "pkg", ")", "path", "=", "(", "getattr", "(", "sys", ".", "modules", "[", "pkg", "]", ",", "'__path__'", ",", "None", ")", "or", "[", "]", ")", "else", ":", "for", "importer", "in", "sys", ".", "meta_path", ":", "(", "yield", "importer", ")", "path", "=", "sys", ".", "path", "for", "item", "in", "path", ":", "(", "yield", "get_importer", "(", "item", ")", ")", "if", "(", "'.'", "not", "in", "fullname", ")", ":", "(", "yield", "ImpImporter", "(", ")", ")" ]
yield pep 302 importers for the given module name if fullname contains a .
train
true
48,884
def is_aperiodic(G): if (not G.is_directed()): raise nx.NetworkXError('is_aperiodic not defined for undirected graphs') s = arbitrary_element(G) levels = {s: 0} this_level = [s] g = 0 l = 1 while this_level: next_level = [] for u in this_level: for v in G[u]: if (v in levels): g = gcd(g, ((levels[u] - levels[v]) + 1)) else: next_level.append(v) levels[v] = l this_level = next_level l += 1 if (len(levels) == len(G)): return (g == 1) else: return ((g == 1) and nx.is_aperiodic(G.subgraph((set(G) - set(levels)))))
[ "def", "is_aperiodic", "(", "G", ")", ":", "if", "(", "not", "G", ".", "is_directed", "(", ")", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'is_aperiodic not defined for undirected graphs'", ")", "s", "=", "arbitrary_element", "(", "G", ")", "levels", "=", "{", "s", ":", "0", "}", "this_level", "=", "[", "s", "]", "g", "=", "0", "l", "=", "1", "while", "this_level", ":", "next_level", "=", "[", "]", "for", "u", "in", "this_level", ":", "for", "v", "in", "G", "[", "u", "]", ":", "if", "(", "v", "in", "levels", ")", ":", "g", "=", "gcd", "(", "g", ",", "(", "(", "levels", "[", "u", "]", "-", "levels", "[", "v", "]", ")", "+", "1", ")", ")", "else", ":", "next_level", ".", "append", "(", "v", ")", "levels", "[", "v", "]", "=", "l", "this_level", "=", "next_level", "l", "+=", "1", "if", "(", "len", "(", "levels", ")", "==", "len", "(", "G", ")", ")", ":", "return", "(", "g", "==", "1", ")", "else", ":", "return", "(", "(", "g", "==", "1", ")", "and", "nx", ".", "is_aperiodic", "(", "G", ".", "subgraph", "(", "(", "set", "(", "G", ")", "-", "set", "(", "levels", ")", ")", ")", ")", ")" ]
return true if g is aperiodic .
train
false
48,885
def _get_critical_checks_snippet(request, unit): if (not unit.has_critical_checks()): return None can_review = check_user_permission(request.user, 'review', unit.store.parent) ctx = {'canreview': can_review, 'unit': unit} template = loader.get_template('editor/units/xhr_checks.html') return template.render(context=ctx, request=request)
[ "def", "_get_critical_checks_snippet", "(", "request", ",", "unit", ")", ":", "if", "(", "not", "unit", ".", "has_critical_checks", "(", ")", ")", ":", "return", "None", "can_review", "=", "check_user_permission", "(", "request", ".", "user", ",", "'review'", ",", "unit", ".", "store", ".", "parent", ")", "ctx", "=", "{", "'canreview'", ":", "can_review", ",", "'unit'", ":", "unit", "}", "template", "=", "loader", ".", "get_template", "(", "'editor/units/xhr_checks.html'", ")", "return", "template", ".", "render", "(", "context", "=", "ctx", ",", "request", "=", "request", ")" ]
retrieves the critical checks snippet .
train
false
48,886
def delete_orphaned_document_files(): documents_path = os.path.join(settings.MEDIA_ROOT, 'documents') for filename in os.listdir(documents_path): fn = os.path.join(documents_path, filename) if (Document.objects.filter(doc_file__contains=filename).count() == 0): print ('Removing orphan document %s' % fn) try: os.remove(fn) except OSError: print ('Could not delete file %s' % fn)
[ "def", "delete_orphaned_document_files", "(", ")", ":", "documents_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "'documents'", ")", "for", "filename", "in", "os", ".", "listdir", "(", "documents_path", ")", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "documents_path", ",", "filename", ")", "if", "(", "Document", ".", "objects", ".", "filter", "(", "doc_file__contains", "=", "filename", ")", ".", "count", "(", ")", "==", "0", ")", ":", "print", "(", "'Removing orphan document %s'", "%", "fn", ")", "try", ":", "os", ".", "remove", "(", "fn", ")", "except", "OSError", ":", "print", "(", "'Could not delete file %s'", "%", "fn", ")" ]
deletes orphaned files of deleted documents .
train
false
48,887
@treeio_login_required def widget_welcome(request, response_format='html'): trial = False if (getattr(settings, 'HARDTREE_SUBSCRIPTION_USER_LIMIT') == 3): trial = True customization = getattr(settings, 'HARDTREE_SUBSCRIPTION_CUSTOMIZATION', True) return render_to_response('core/widgets/welcome', {'trial': trial, 'customization': customization}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "def", "widget_welcome", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "trial", "=", "False", "if", "(", "getattr", "(", "settings", ",", "'HARDTREE_SUBSCRIPTION_USER_LIMIT'", ")", "==", "3", ")", ":", "trial", "=", "True", "customization", "=", "getattr", "(", "settings", ",", "'HARDTREE_SUBSCRIPTION_CUSTOMIZATION'", ",", "True", ")", "return", "render_to_response", "(", "'core/widgets/welcome'", ",", "{", "'trial'", ":", "trial", ",", "'customization'", ":", "customization", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
quick start widget .
train
false
48,888
def preDeploy(site): pass
[ "def", "preDeploy", "(", "site", ")", ":", "pass" ]
called prior to deploying the site a good time to configure custom headers .
train
false
48,889
def make_createblockdevicedataset_mixin(profiled_api): class Mixin(CreateBlockDeviceDatasetImplementationMixin, TestCase, ): def setUp(self): super(Mixin, self).setUp() if profiled_api: self.api = fakeprofiledloopbackblockdeviceapi_for_test(self, allocation_unit=LOOPBACK_ALLOCATION_UNIT) else: self.api = loopbackblockdeviceapi_for_test(self, allocation_unit=LOOPBACK_ALLOCATION_UNIT) self.mountroot = mountroot_for_test(self) self.deployer = BlockDeviceDeployer(node_uuid=uuid4(), hostname=u'192.0.2.10', block_device_api=self.api, mountroot=self.mountroot) return Mixin
[ "def", "make_createblockdevicedataset_mixin", "(", "profiled_api", ")", ":", "class", "Mixin", "(", "CreateBlockDeviceDatasetImplementationMixin", ",", "TestCase", ",", ")", ":", "def", "setUp", "(", "self", ")", ":", "super", "(", "Mixin", ",", "self", ")", ".", "setUp", "(", ")", "if", "profiled_api", ":", "self", ".", "api", "=", "fakeprofiledloopbackblockdeviceapi_for_test", "(", "self", ",", "allocation_unit", "=", "LOOPBACK_ALLOCATION_UNIT", ")", "else", ":", "self", ".", "api", "=", "loopbackblockdeviceapi_for_test", "(", "self", ",", "allocation_unit", "=", "LOOPBACK_ALLOCATION_UNIT", ")", "self", ".", "mountroot", "=", "mountroot_for_test", "(", "self", ")", "self", ".", "deployer", "=", "BlockDeviceDeployer", "(", "node_uuid", "=", "uuid4", "(", ")", ",", "hostname", "=", "u'192.0.2.10'", ",", "block_device_api", "=", "self", ".", "api", ",", "mountroot", "=", "self", ".", "mountroot", ")", "return", "Mixin" ]
constructs a base class for tests that verify the implementation of createblockdevicedataset .
train
false
48,891
def auto_sign_file(file_, is_beta=False): addon = file_.version.addon if file_.is_experiment: amo.log(amo.LOG.EXPERIMENT_SIGNED, file_) sign_file(file_, settings.SIGNING_SERVER) elif is_beta: if file_.validation.passed_auto_validation: amo.log(amo.LOG.BETA_SIGNED_VALIDATION_PASSED, file_) else: amo.log(amo.LOG.BETA_SIGNED_VALIDATION_FAILED, file_) sign_file(file_, settings.SIGNING_SERVER) elif (file_.version.channel == amo.RELEASE_CHANNEL_UNLISTED): helper = ReviewHelper(request=None, addon=addon, version=file_.version) helper.set_data({'addon_files': [file_], 'comments': 'automatic validation'}) helper.handler.process_public() if file_.validation.passed_auto_validation: amo.log(amo.LOG.UNLISTED_SIGNED_VALIDATION_PASSED, file_) else: amo.log(amo.LOG.UNLISTED_SIGNED_VALIDATION_FAILED, file_)
[ "def", "auto_sign_file", "(", "file_", ",", "is_beta", "=", "False", ")", ":", "addon", "=", "file_", ".", "version", ".", "addon", "if", "file_", ".", "is_experiment", ":", "amo", ".", "log", "(", "amo", ".", "LOG", ".", "EXPERIMENT_SIGNED", ",", "file_", ")", "sign_file", "(", "file_", ",", "settings", ".", "SIGNING_SERVER", ")", "elif", "is_beta", ":", "if", "file_", ".", "validation", ".", "passed_auto_validation", ":", "amo", ".", "log", "(", "amo", ".", "LOG", ".", "BETA_SIGNED_VALIDATION_PASSED", ",", "file_", ")", "else", ":", "amo", ".", "log", "(", "amo", ".", "LOG", ".", "BETA_SIGNED_VALIDATION_FAILED", ",", "file_", ")", "sign_file", "(", "file_", ",", "settings", ".", "SIGNING_SERVER", ")", "elif", "(", "file_", ".", "version", ".", "channel", "==", "amo", ".", "RELEASE_CHANNEL_UNLISTED", ")", ":", "helper", "=", "ReviewHelper", "(", "request", "=", "None", ",", "addon", "=", "addon", ",", "version", "=", "file_", ".", "version", ")", "helper", ".", "set_data", "(", "{", "'addon_files'", ":", "[", "file_", "]", ",", "'comments'", ":", "'automatic validation'", "}", ")", "helper", ".", "handler", ".", "process_public", "(", ")", "if", "file_", ".", "validation", ".", "passed_auto_validation", ":", "amo", ".", "log", "(", "amo", ".", "LOG", ".", "UNLISTED_SIGNED_VALIDATION_PASSED", ",", "file_", ")", "else", ":", "amo", ".", "log", "(", "amo", ".", "LOG", ".", "UNLISTED_SIGNED_VALIDATION_FAILED", ",", "file_", ")" ]
if the file should be automatically reviewed and signed .
train
false
48,892
def as_tuple(x, N, t=None): try: X = tuple(x) except TypeError: X = ((x,) * N) if ((t is not None) and (not all((isinstance(v, t) for v in X)))): raise TypeError('expected a single value or an iterable of {0}, got {1} instead'.format(t.__name__, x)) if (len(X) != N): raise ValueError('expected a single value or an iterable with length {0}, got {1} instead'.format(N, x)) return X
[ "def", "as_tuple", "(", "x", ",", "N", ",", "t", "=", "None", ")", ":", "try", ":", "X", "=", "tuple", "(", "x", ")", "except", "TypeError", ":", "X", "=", "(", "(", "x", ",", ")", "*", "N", ")", "if", "(", "(", "t", "is", "not", "None", ")", "and", "(", "not", "all", "(", "(", "isinstance", "(", "v", ",", "t", ")", "for", "v", "in", "X", ")", ")", ")", ")", ":", "raise", "TypeError", "(", "'expected a single value or an iterable of {0}, got {1} instead'", ".", "format", "(", "t", ".", "__name__", ",", "x", ")", ")", "if", "(", "len", "(", "X", ")", "!=", "N", ")", ":", "raise", "ValueError", "(", "'expected a single value or an iterable with length {0}, got {1} instead'", ".", "format", "(", "N", ",", "x", ")", ")", "return", "X" ]
coerce a value to a tuple of given length .
train
false
48,893
def log_exc(logger): (t, v, tb) = sys.exc_info() logger.info(('Exception occured: %s' % t)) logger.info(('Exception value: %s' % v)) logger.info(('Exception Info:\n%s' % string.join(traceback.format_list(traceback.extract_tb(tb)))))
[ "def", "log_exc", "(", "logger", ")", ":", "(", "t", ",", "v", ",", "tb", ")", "=", "sys", ".", "exc_info", "(", ")", "logger", ".", "info", "(", "(", "'Exception occured: %s'", "%", "t", ")", ")", "logger", ".", "info", "(", "(", "'Exception value: %s'", "%", "v", ")", ")", "logger", ".", "info", "(", "(", "'Exception Info:\\n%s'", "%", "string", ".", "join", "(", "traceback", ".", "format_list", "(", "traceback", ".", "extract_tb", "(", "tb", ")", ")", ")", ")", ")" ]
log an exception .
train
false
48,895
def get_char_description(char): if (char in CHAR_NAMES): return CHAR_NAMES[char] else: return _(u'Insert character {0}').format(char)
[ "def", "get_char_description", "(", "char", ")", ":", "if", "(", "char", "in", "CHAR_NAMES", ")", ":", "return", "CHAR_NAMES", "[", "char", "]", "else", ":", "return", "_", "(", "u'Insert character {0}'", ")", ".", "format", "(", "char", ")" ]
returns verbose description of a character .
train
false
48,898
def bin2float(min_, max_, nbits): def wrap(function): def wrapped_function(individual, *args, **kargs): nelem = (len(individual) / nbits) decoded = ([0] * nelem) for i in xrange(nelem): gene = int(''.join(map(str, individual[(i * nbits):((i * nbits) + nbits)])), 2) div = ((2 ** nbits) - 1) temp = (float(gene) / float(div)) decoded[i] = (min_ + (temp * (max_ - min_))) return function(decoded, *args, **kargs) return wrapped_function return wrap
[ "def", "bin2float", "(", "min_", ",", "max_", ",", "nbits", ")", ":", "def", "wrap", "(", "function", ")", ":", "def", "wrapped_function", "(", "individual", ",", "*", "args", ",", "**", "kargs", ")", ":", "nelem", "=", "(", "len", "(", "individual", ")", "/", "nbits", ")", "decoded", "=", "(", "[", "0", "]", "*", "nelem", ")", "for", "i", "in", "xrange", "(", "nelem", ")", ":", "gene", "=", "int", "(", "''", ".", "join", "(", "map", "(", "str", ",", "individual", "[", "(", "i", "*", "nbits", ")", ":", "(", "(", "i", "*", "nbits", ")", "+", "nbits", ")", "]", ")", ")", ",", "2", ")", "div", "=", "(", "(", "2", "**", "nbits", ")", "-", "1", ")", "temp", "=", "(", "float", "(", "gene", ")", "/", "float", "(", "div", ")", ")", "decoded", "[", "i", "]", "=", "(", "min_", "+", "(", "temp", "*", "(", "max_", "-", "min_", ")", ")", ")", "return", "function", "(", "decoded", ",", "*", "args", ",", "**", "kargs", ")", "return", "wrapped_function", "return", "wrap" ]
convert a binary array into an array of float where each float is composed of *nbits* and is between *min_* and *max_* and return the result of the decorated function .
train
false
48,899
def _timedatectl(): ret = __salt__['cmd.run_all'](['timedatectl'], python_shell=False) if (ret['retcode'] != 0): msg = 'timedatectl failed: {0}'.format(ret['stderr']) raise CommandExecutionError(msg) return ret
[ "def", "_timedatectl", "(", ")", ":", "ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "[", "'timedatectl'", "]", ",", "python_shell", "=", "False", ")", "if", "(", "ret", "[", "'retcode'", "]", "!=", "0", ")", ":", "msg", "=", "'timedatectl failed: {0}'", ".", "format", "(", "ret", "[", "'stderr'", "]", ")", "raise", "CommandExecutionError", "(", "msg", ")", "return", "ret" ]
get the output of timedatectl .
train
true
48,900
def __invoke(trans, workflow, workflow_run_config, workflow_invocation=None, populate_state=False): if populate_state: modules.populate_module_and_state(trans, workflow, workflow_run_config.param_map, allow_tool_state_corrections=workflow_run_config.allow_tool_state_corrections) invoker = WorkflowInvoker(trans, workflow, workflow_run_config, workflow_invocation=workflow_invocation) try: outputs = invoker.invoke() except modules.CancelWorkflowEvaluation: if workflow_invocation: if workflow_invocation.cancel(): trans.sa_session.add(workflow_invocation) outputs = [] except Exception: log.exception('Failed to execute scheduled workflow.') if workflow_invocation: workflow_invocation.fail() trans.sa_session.add(workflow_invocation) else: raise outputs = [] if workflow_invocation: trans.sa_session.flush() return (outputs, invoker.workflow_invocation)
[ "def", "__invoke", "(", "trans", ",", "workflow", ",", "workflow_run_config", ",", "workflow_invocation", "=", "None", ",", "populate_state", "=", "False", ")", ":", "if", "populate_state", ":", "modules", ".", "populate_module_and_state", "(", "trans", ",", "workflow", ",", "workflow_run_config", ".", "param_map", ",", "allow_tool_state_corrections", "=", "workflow_run_config", ".", "allow_tool_state_corrections", ")", "invoker", "=", "WorkflowInvoker", "(", "trans", ",", "workflow", ",", "workflow_run_config", ",", "workflow_invocation", "=", "workflow_invocation", ")", "try", ":", "outputs", "=", "invoker", ".", "invoke", "(", ")", "except", "modules", ".", "CancelWorkflowEvaluation", ":", "if", "workflow_invocation", ":", "if", "workflow_invocation", ".", "cancel", "(", ")", ":", "trans", ".", "sa_session", ".", "add", "(", "workflow_invocation", ")", "outputs", "=", "[", "]", "except", "Exception", ":", "log", ".", "exception", "(", "'Failed to execute scheduled workflow.'", ")", "if", "workflow_invocation", ":", "workflow_invocation", ".", "fail", "(", ")", "trans", ".", "sa_session", ".", "add", "(", "workflow_invocation", ")", "else", ":", "raise", "outputs", "=", "[", "]", "if", "workflow_invocation", ":", "trans", ".", "sa_session", ".", "flush", "(", ")", "return", "(", "outputs", ",", "invoker", ".", "workflow_invocation", ")" ]
run the supplied workflow in the supplied target_history .
train
false
48,903
def set_vif_host_backend_vhostuser_config(conf, mode, path): conf.net_type = 'vhostuser' conf.vhostuser_type = 'unix' conf.vhostuser_mode = mode conf.vhostuser_path = path
[ "def", "set_vif_host_backend_vhostuser_config", "(", "conf", ",", "mode", ",", "path", ")", ":", "conf", ".", "net_type", "=", "'vhostuser'", "conf", ".", "vhostuser_type", "=", "'unix'", "conf", ".", "vhostuser_mode", "=", "mode", "conf", ".", "vhostuser_path", "=", "path" ]
populate a libvirtconfigguestinterface instance with host backend details for vhostuser socket .
train
false
48,904
def parse_block_scalar_indent(TokenClass): def callback(lexer, match, context): text = match.group() if (context.block_scalar_indent is None): if (len(text) <= max(context.indent, 0)): context.stack.pop() context.stack.pop() return context.block_scalar_indent = len(text) elif (len(text) < context.block_scalar_indent): context.stack.pop() context.stack.pop() return if text: (yield (match.start(), TokenClass, text)) context.pos = match.end() return callback
[ "def", "parse_block_scalar_indent", "(", "TokenClass", ")", ":", "def", "callback", "(", "lexer", ",", "match", ",", "context", ")", ":", "text", "=", "match", ".", "group", "(", ")", "if", "(", "context", ".", "block_scalar_indent", "is", "None", ")", ":", "if", "(", "len", "(", "text", ")", "<=", "max", "(", "context", ".", "indent", ",", "0", ")", ")", ":", "context", ".", "stack", ".", "pop", "(", ")", "context", ".", "stack", ".", "pop", "(", ")", "return", "context", ".", "block_scalar_indent", "=", "len", "(", "text", ")", "elif", "(", "len", "(", "text", ")", "<", "context", ".", "block_scalar_indent", ")", ":", "context", ".", "stack", ".", "pop", "(", ")", "context", ".", "stack", ".", "pop", "(", ")", "return", "if", "text", ":", "(", "yield", "(", "match", ".", "start", "(", ")", ",", "TokenClass", ",", "text", ")", ")", "context", ".", "pos", "=", "match", ".", "end", "(", ")", "return", "callback" ]
process indentation spaces in a block scalar .
train
true
48,905
def filter_foreign_columns(base_table, columns): return filter_list((lambda c: (c.table == base_table)), columns)
[ "def", "filter_foreign_columns", "(", "base_table", ",", "columns", ")", ":", "return", "filter_list", "(", "(", "lambda", "c", ":", "(", "c", ".", "table", "==", "base_table", ")", ")", ",", "columns", ")" ]
return list of columns that belong to passed table .
train
false
48,907
def stripList(listObj): return ' '.join((str(i) for i in listObj))
[ "def", "stripList", "(", "listObj", ")", ":", "return", "' '", ".", "join", "(", "(", "str", "(", "i", ")", "for", "i", "in", "listObj", ")", ")" ]
convert a list of numbers to a string of space-separated numbers .
train
false
48,908
def task_install_control_certificates(ca_cert, control_cert, control_key): return sequence([run('mkdir -p /etc/flocker'), run('chmod u=rwX,g=,o= /etc/flocker'), put(path='/etc/flocker/cluster.crt', content=ca_cert.getContent()), put(path='/etc/flocker/control-service.crt', content=control_cert.getContent()), put(path='/etc/flocker/control-service.key', content=control_key.getContent(), log_content_filter=_remove_private_key)])
[ "def", "task_install_control_certificates", "(", "ca_cert", ",", "control_cert", ",", "control_key", ")", ":", "return", "sequence", "(", "[", "run", "(", "'mkdir -p /etc/flocker'", ")", ",", "run", "(", "'chmod u=rwX,g=,o= /etc/flocker'", ")", ",", "put", "(", "path", "=", "'/etc/flocker/cluster.crt'", ",", "content", "=", "ca_cert", ".", "getContent", "(", ")", ")", ",", "put", "(", "path", "=", "'/etc/flocker/control-service.crt'", ",", "content", "=", "control_cert", ".", "getContent", "(", ")", ")", ",", "put", "(", "path", "=", "'/etc/flocker/control-service.key'", ",", "content", "=", "control_key", ".", "getContent", "(", ")", ",", "log_content_filter", "=", "_remove_private_key", ")", "]", ")" ]
install certificates and private key required by the control service .
train
false
48,909
def get_all_primitive_params(key): params = [key] for datatype in PRIMITIVE_DATATYPES: if ((key == 1) and (datatype == 'ascii')): params.append('') else: params.append(get_sample(datatype)) return params
[ "def", "get_all_primitive_params", "(", "key", ")", ":", "params", "=", "[", "key", "]", "for", "datatype", "in", "PRIMITIVE_DATATYPES", ":", "if", "(", "(", "key", "==", "1", ")", "and", "(", "datatype", "==", "'ascii'", ")", ")", ":", "params", ".", "append", "(", "''", ")", "else", ":", "params", ".", "append", "(", "get_sample", "(", "datatype", ")", ")", "return", "params" ]
simple utility method used to give back a list of all possible primitive data sample types .
train
false
48,910
def propertyx(function): keys = ('fget', 'fset', 'fdel') func_locals = {'doc': function.__doc__} def probe_func(frame, event, arg): if (event == 'return'): locals = frame.f_locals func_locals.update(dict(((k, locals.get(k)) for k in keys))) sys.settrace(None) return probe_func sys.settrace(probe_func) function() return property(**func_locals)
[ "def", "propertyx", "(", "function", ")", ":", "keys", "=", "(", "'fget'", ",", "'fset'", ",", "'fdel'", ")", "func_locals", "=", "{", "'doc'", ":", "function", ".", "__doc__", "}", "def", "probe_func", "(", "frame", ",", "event", ",", "arg", ")", ":", "if", "(", "event", "==", "'return'", ")", ":", "locals", "=", "frame", ".", "f_locals", "func_locals", ".", "update", "(", "dict", "(", "(", "(", "k", ",", "locals", ".", "get", "(", "k", ")", ")", "for", "k", "in", "keys", ")", ")", ")", "sys", ".", "settrace", "(", "None", ")", "return", "probe_func", "sys", ".", "settrace", "(", "probe_func", ")", "function", "(", ")", "return", "property", "(", "**", "func_locals", ")" ]
decorator to easily create properties in classes .
train
true
48,911
def string_range(last): for k in range(len(last)): for x in product(string.ascii_lowercase, repeat=(k + 1)): result = ''.join(x) (yield result) if (result == last): return
[ "def", "string_range", "(", "last", ")", ":", "for", "k", "in", "range", "(", "len", "(", "last", ")", ")", ":", "for", "x", "in", "product", "(", "string", ".", "ascii_lowercase", ",", "repeat", "=", "(", "k", "+", "1", ")", ")", ":", "result", "=", "''", ".", "join", "(", "x", ")", "(", "yield", "result", ")", "if", "(", "result", "==", "last", ")", ":", "return" ]
compute the range of string between "a" and last .
train
true
48,912
def _safe_copy_vdi(session, sr_ref, instance, vdi_to_copy_ref): with _dummy_vm(session, instance, vdi_to_copy_ref) as vm_ref: label = 'snapshot' with snapshot_attached_here(session, instance, vm_ref, label) as vdi_uuids: imported_vhds = session.call_plugin_serialized('workarounds', 'safe_copy_vdis', sr_path=get_sr_path(session), vdi_uuids=vdi_uuids, uuid_stack=_make_uuid_stack()) root_uuid = imported_vhds['root']['uuid'] scan_default_sr(session) vdi_ref = session.call_xenapi('VDI.get_by_uuid', root_uuid) return vdi_ref
[ "def", "_safe_copy_vdi", "(", "session", ",", "sr_ref", ",", "instance", ",", "vdi_to_copy_ref", ")", ":", "with", "_dummy_vm", "(", "session", ",", "instance", ",", "vdi_to_copy_ref", ")", "as", "vm_ref", ":", "label", "=", "'snapshot'", "with", "snapshot_attached_here", "(", "session", ",", "instance", ",", "vm_ref", ",", "label", ")", "as", "vdi_uuids", ":", "imported_vhds", "=", "session", ".", "call_plugin_serialized", "(", "'workarounds'", ",", "'safe_copy_vdis'", ",", "sr_path", "=", "get_sr_path", "(", "session", ")", ",", "vdi_uuids", "=", "vdi_uuids", ",", "uuid_stack", "=", "_make_uuid_stack", "(", ")", ")", "root_uuid", "=", "imported_vhds", "[", "'root'", "]", "[", "'uuid'", "]", "scan_default_sr", "(", "session", ")", "vdi_ref", "=", "session", ".", "call_xenapi", "(", "'VDI.get_by_uuid'", ",", "root_uuid", ")", "return", "vdi_ref" ]
copy a vdi and return the new vdis reference .
train
false
48,914
@loader_option() def defaultload(loadopt, attr): return loadopt.set_relationship_strategy(attr, None)
[ "@", "loader_option", "(", ")", "def", "defaultload", "(", "loadopt", ",", "attr", ")", ":", "return", "loadopt", ".", "set_relationship_strategy", "(", "attr", ",", "None", ")" ]
indicate an attribute should load using its default loader style .
train
false
48,915
def blacklist(*field_list): return Role(Role.blacklist, field_list)
[ "def", "blacklist", "(", "*", "field_list", ")", ":", "return", "Role", "(", "Role", ".", "blacklist", ",", "field_list", ")" ]
returns a function that operates as a blacklist for the provided list of fields .
train
false
48,916
def dirscan(): logging.info('Scheduled or manual watched folder scan') DirScanner.do.scan()
[ "def", "dirscan", "(", ")", ":", "logging", ".", "info", "(", "'Scheduled or manual watched folder scan'", ")", "DirScanner", ".", "do", ".", "scan", "(", ")" ]
wrapper required for scheduler .
train
false
48,917
def _get_xblock(usage_key, user): store = modulestore() with store.bulk_operations(usage_key.course_key): try: return store.get_item(usage_key, depth=None) except ItemNotFoundError: if (usage_key.category in CREATE_IF_NOT_FOUND): return store.create_item(user.id, usage_key.course_key, usage_key.block_type, block_id=usage_key.block_id) else: raise except InvalidLocationError: log.error("Can't find item by location.") return JsonResponse({'error': ("Can't find item by location: " + unicode(usage_key))}, 404)
[ "def", "_get_xblock", "(", "usage_key", ",", "user", ")", ":", "store", "=", "modulestore", "(", ")", "with", "store", ".", "bulk_operations", "(", "usage_key", ".", "course_key", ")", ":", "try", ":", "return", "store", ".", "get_item", "(", "usage_key", ",", "depth", "=", "None", ")", "except", "ItemNotFoundError", ":", "if", "(", "usage_key", ".", "category", "in", "CREATE_IF_NOT_FOUND", ")", ":", "return", "store", ".", "create_item", "(", "user", ".", "id", ",", "usage_key", ".", "course_key", ",", "usage_key", ".", "block_type", ",", "block_id", "=", "usage_key", ".", "block_id", ")", "else", ":", "raise", "except", "InvalidLocationError", ":", "log", ".", "error", "(", "\"Can't find item by location.\"", ")", "return", "JsonResponse", "(", "{", "'error'", ":", "(", "\"Can't find item by location: \"", "+", "unicode", "(", "usage_key", ")", ")", "}", ",", "404", ")" ]
returns the xblock for the specified usage key .
train
false
48,918
def diagsvd(s, M, N): part = diag(s) typ = part.dtype.char MorN = len(s) if (MorN == M): return r_[('-1', part, zeros((M, (N - M)), typ))] elif (MorN == N): return r_[(part, zeros(((M - N), N), typ))] else: raise ValueError('Length of s must be M or N.')
[ "def", "diagsvd", "(", "s", ",", "M", ",", "N", ")", ":", "part", "=", "diag", "(", "s", ")", "typ", "=", "part", ".", "dtype", ".", "char", "MorN", "=", "len", "(", "s", ")", "if", "(", "MorN", "==", "M", ")", ":", "return", "r_", "[", "(", "'-1'", ",", "part", ",", "zeros", "(", "(", "M", ",", "(", "N", "-", "M", ")", ")", ",", "typ", ")", ")", "]", "elif", "(", "MorN", "==", "N", ")", ":", "return", "r_", "[", "(", "part", ",", "zeros", "(", "(", "(", "M", "-", "N", ")", ",", "N", ")", ",", "typ", ")", ")", "]", "else", ":", "raise", "ValueError", "(", "'Length of s must be M or N.'", ")" ]
construct the sigma matrix in svd from singular values and size m .
train
false
48,920
def align_texts(source_blocks, target_blocks, params=LanguageIndependent): if (len(source_blocks) != len(target_blocks)): raise ValueError('Source and target texts do not have the same number of blocks.') return [align_blocks(source_block, target_block, params) for (source_block, target_block) in zip(source_blocks, target_blocks)]
[ "def", "align_texts", "(", "source_blocks", ",", "target_blocks", ",", "params", "=", "LanguageIndependent", ")", ":", "if", "(", "len", "(", "source_blocks", ")", "!=", "len", "(", "target_blocks", ")", ")", ":", "raise", "ValueError", "(", "'Source and target texts do not have the same number of blocks.'", ")", "return", "[", "align_blocks", "(", "source_block", ",", "target_block", ",", "params", ")", "for", "(", "source_block", ",", "target_block", ")", "in", "zip", "(", "source_blocks", ",", "target_blocks", ")", "]" ]
creates the sentence alignment of two texts .
train
true
48,921
def fastprint(text, show_prefix=False, end='', flush=True): return puts(text=text, show_prefix=show_prefix, end=end, flush=flush)
[ "def", "fastprint", "(", "text", ",", "show_prefix", "=", "False", ",", "end", "=", "''", ",", "flush", "=", "True", ")", ":", "return", "puts", "(", "text", "=", "text", ",", "show_prefix", "=", "show_prefix", ",", "end", "=", "end", ",", "flush", "=", "flush", ")" ]
print text immediately .
train
false
48,922
def _get_firstbday(wkday): first = 1 if (wkday == 5): first = 3 elif (wkday == 6): first = 2 return first
[ "def", "_get_firstbday", "(", "wkday", ")", ":", "first", "=", "1", "if", "(", "wkday", "==", "5", ")", ":", "first", "=", "3", "elif", "(", "wkday", "==", "6", ")", ":", "first", "=", "2", "return", "first" ]
wkday is the result of monthrange if its a saturday or sunday .
train
false
48,923
def _get_cpu_thread_policy_constraints(flavor, image_meta): (flavor_policy, image_policy) = _get_flavor_image_meta('cpu_thread_policy', flavor, image_meta) if (flavor_policy in [None, fields.CPUThreadAllocationPolicy.PREFER]): policy = (flavor_policy or image_policy) elif (image_policy and (image_policy != flavor_policy)): raise exception.ImageCPUThreadPolicyForbidden() else: policy = flavor_policy return policy
[ "def", "_get_cpu_thread_policy_constraints", "(", "flavor", ",", "image_meta", ")", ":", "(", "flavor_policy", ",", "image_policy", ")", "=", "_get_flavor_image_meta", "(", "'cpu_thread_policy'", ",", "flavor", ",", "image_meta", ")", "if", "(", "flavor_policy", "in", "[", "None", ",", "fields", ".", "CPUThreadAllocationPolicy", ".", "PREFER", "]", ")", ":", "policy", "=", "(", "flavor_policy", "or", "image_policy", ")", "elif", "(", "image_policy", "and", "(", "image_policy", "!=", "flavor_policy", ")", ")", ":", "raise", "exception", ".", "ImageCPUThreadPolicyForbidden", "(", ")", "else", ":", "policy", "=", "flavor_policy", "return", "policy" ]
validate and return the requested cpu thread policy .
train
false
48,924
def _get_xml_iter(xml_source): if (not hasattr(xml_source, 'read')): try: xml_source = xml_source.encode('utf-8') except (AttributeError, UnicodeDecodeError): pass return BytesIO(xml_source) else: try: xml_source.seek(0) except: pass return xml_source
[ "def", "_get_xml_iter", "(", "xml_source", ")", ":", "if", "(", "not", "hasattr", "(", "xml_source", ",", "'read'", ")", ")", ":", "try", ":", "xml_source", "=", "xml_source", ".", "encode", "(", "'utf-8'", ")", "except", "(", "AttributeError", ",", "UnicodeDecodeError", ")", ":", "pass", "return", "BytesIO", "(", "xml_source", ")", "else", ":", "try", ":", "xml_source", ".", "seek", "(", "0", ")", "except", ":", "pass", "return", "xml_source" ]
possible inputs: strings .
train
true
48,925
def create_baseline(tag='baseline', config='root'): return __salt__['snapper.create_snapshot'](config=config, snapshot_type='single', description='baseline snapshot', cleanup_algorithm='number', userdata={'baseline_tag': tag})
[ "def", "create_baseline", "(", "tag", "=", "'baseline'", ",", "config", "=", "'root'", ")", ":", "return", "__salt__", "[", "'snapper.create_snapshot'", "]", "(", "config", "=", "config", ",", "snapshot_type", "=", "'single'", ",", "description", "=", "'baseline snapshot'", ",", "cleanup_algorithm", "=", "'number'", ",", "userdata", "=", "{", "'baseline_tag'", ":", "tag", "}", ")" ]
creates a snapshot marked as baseline tag tag name for the baseline config configuration name .
train
true
48,926
@error.context_aware def lv_umount(vg_name, lv_name, mount_loc): error.context('Unmounting the logical volume', logging.info) try: utils.run(('umount /dev/%s/%s' % (vg_name, lv_name))) except error.CmdError as ex: logging.warning(ex) return False return True
[ "@", "error", ".", "context_aware", "def", "lv_umount", "(", "vg_name", ",", "lv_name", ",", "mount_loc", ")", ":", "error", ".", "context", "(", "'Unmounting the logical volume'", ",", "logging", ".", "info", ")", "try", ":", "utils", ".", "run", "(", "(", "'umount /dev/%s/%s'", "%", "(", "vg_name", ",", "lv_name", ")", ")", ")", "except", "error", ".", "CmdError", "as", "ex", ":", "logging", ".", "warning", "(", "ex", ")", "return", "False", "return", "True" ]
unmount a logical volume from a mount location .
train
false
48,927
@_ConfigurableFilter(executable='YUI_COMPRESSOR_EXECUTABLE') def yui_compressor(infile, executable=None): yuicompressor = executable if (not yuicompressor): try: subprocess.call('yui-compressor', stdout=open(os.devnull, 'w'), stderr=open(os.devnull, 'w')) yuicompressor = 'yui-compressor' except Exception: pass if (not yuicompressor): try: subprocess.call('yuicompressor', stdout=open(os.devnull, 'w'), stderr=open(os.devnull, 'w')) yuicompressor = 'yuicompressor' except: raise Exception('yui-compressor is not installed.') return False return runinplace('{} --nomunge %1 -o %2'.format(yuicompressor), infile)
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'YUI_COMPRESSOR_EXECUTABLE'", ")", "def", "yui_compressor", "(", "infile", ",", "executable", "=", "None", ")", ":", "yuicompressor", "=", "executable", "if", "(", "not", "yuicompressor", ")", ":", "try", ":", "subprocess", ".", "call", "(", "'yui-compressor'", ",", "stdout", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", ",", "stderr", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", ")", "yuicompressor", "=", "'yui-compressor'", "except", "Exception", ":", "pass", "if", "(", "not", "yuicompressor", ")", ":", "try", ":", "subprocess", ".", "call", "(", "'yuicompressor'", ",", "stdout", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", ",", "stderr", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", ")", "yuicompressor", "=", "'yuicompressor'", "except", ":", "raise", "Exception", "(", "'yui-compressor is not installed.'", ")", "return", "False", "return", "runinplace", "(", "'{} --nomunge %1 -o %2'", ".", "format", "(", "yuicompressor", ")", ",", "infile", ")" ]
run yui compressor on a file .
train
false
48,929
def async_trigger(hass, config, action): if (CONF_AFTER in config): after = config.get(CONF_AFTER) (hours, minutes, seconds) = (after.hour, after.minute, after.second) else: hours = config.get(CONF_HOURS) minutes = config.get(CONF_MINUTES) seconds = config.get(CONF_SECONDS) @callback def time_automation_listener(now): 'Listen for time changes and calls action.' hass.async_run_job(action, {'trigger': {'platform': 'time', 'now': now}}) return async_track_time_change(hass, time_automation_listener, hour=hours, minute=minutes, second=seconds)
[ "def", "async_trigger", "(", "hass", ",", "config", ",", "action", ")", ":", "if", "(", "CONF_AFTER", "in", "config", ")", ":", "after", "=", "config", ".", "get", "(", "CONF_AFTER", ")", "(", "hours", ",", "minutes", ",", "seconds", ")", "=", "(", "after", ".", "hour", ",", "after", ".", "minute", ",", "after", ".", "second", ")", "else", ":", "hours", "=", "config", ".", "get", "(", "CONF_HOURS", ")", "minutes", "=", "config", ".", "get", "(", "CONF_MINUTES", ")", "seconds", "=", "config", ".", "get", "(", "CONF_SECONDS", ")", "@", "callback", "def", "time_automation_listener", "(", "now", ")", ":", "hass", ".", "async_run_job", "(", "action", ",", "{", "'trigger'", ":", "{", "'platform'", ":", "'time'", ",", "'now'", ":", "now", "}", "}", ")", "return", "async_track_time_change", "(", "hass", ",", "time_automation_listener", ",", "hour", "=", "hours", ",", "minute", "=", "minutes", ",", "second", "=", "seconds", ")" ]
listen for state changes based on configuration .
train
false
48,930
def test_write_noheader_no_bookend(): out = StringIO() ascii.write(dat, out, Writer=ascii.FixedWidthNoHeader, bookend=False) assert_equal_splitlines(out.getvalue(), '1.2 | "hello" | 1 | a\n2.4 | \'s worlds | 2 | 2\n')
[ "def", "test_write_noheader_no_bookend", "(", ")", ":", "out", "=", "StringIO", "(", ")", "ascii", ".", "write", "(", "dat", ",", "out", ",", "Writer", "=", "ascii", ".", "FixedWidthNoHeader", ",", "bookend", "=", "False", ")", "assert_equal_splitlines", "(", "out", ".", "getvalue", "(", ")", ",", "'1.2 | \"hello\" | 1 | a\\n2.4 | \\'s worlds | 2 | 2\\n'", ")" ]
write a table as a fixed width table with no bookend .
train
false
48,931
def test_duplicate_keywords_2(): initialize() yamlfile = '{\n "model": !obj:pylearn2.models.rbm.GaussianBinaryRBM {\n\n "vis_space" : &vis_space !obj:pylearn2.space.Conv2DSpace {\n "shape" : [32,32],\n "num_channels" : 3\n },\n "hid_space" : &hid_space !obj:pylearn2.space.Conv2DSpace {\n "shape" : [27,27],\n "num_channels" : 10\n },\n "transformer" :\n !obj:pylearn2.linear.conv2d.make_random_conv2D {\n "irange" : .05,\n "input_space" : *vis_space,\n "output_space" : *hid_space,\n "kernel_shape" : [6,6],\n "batch_size" : &batch_size 5\n },\n "energy_function_class" :\n !obj:pylearn2.energy_functions.rbm_energy.grbm_type_1 {},\n "learn_sigma" : True,\n "init_sigma" : .3333,\n "init_bias_hid" : -2.,\n "mean_vis" : False,\n "sigma_lr_scale" : 1e-3\n\n }\n }' load(yamlfile)
[ "def", "test_duplicate_keywords_2", "(", ")", ":", "initialize", "(", ")", "yamlfile", "=", "'{\\n \"model\": !obj:pylearn2.models.rbm.GaussianBinaryRBM {\\n\\n \"vis_space\" : &vis_space !obj:pylearn2.space.Conv2DSpace {\\n \"shape\" : [32,32],\\n \"num_channels\" : 3\\n },\\n \"hid_space\" : &hid_space !obj:pylearn2.space.Conv2DSpace {\\n \"shape\" : [27,27],\\n \"num_channels\" : 10\\n },\\n \"transformer\" :\\n !obj:pylearn2.linear.conv2d.make_random_conv2D {\\n \"irange\" : .05,\\n \"input_space\" : *vis_space,\\n \"output_space\" : *hid_space,\\n \"kernel_shape\" : [6,6],\\n \"batch_size\" : &batch_size 5\\n },\\n \"energy_function_class\" :\\n !obj:pylearn2.energy_functions.rbm_energy.grbm_type_1 {},\\n \"learn_sigma\" : True,\\n \"init_sigma\" : .3333,\\n \"init_bias_hid\" : -2.,\\n \"mean_vis\" : False,\\n \"sigma_lr_scale\" : 1e-3\\n\\n }\\n }'", "load", "(", "yamlfile", ")" ]
tests whether duplicate keywords as independent parameters works fine .
train
false
48,932
def deprecate_thing_type(thingTypeName, undoDeprecate=False, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.deprecate_thing_type(thingTypeName=thingTypeName, undoDeprecate=undoDeprecate) deprecated = (True if (undoDeprecate is False) else False) return {'deprecated': deprecated} except ClientError as e: return {'deprecated': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "deprecate_thing_type", "(", "thingTypeName", ",", "undoDeprecate", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "deprecate_thing_type", "(", "thingTypeName", "=", "thingTypeName", ",", "undoDeprecate", "=", "undoDeprecate", ")", "deprecated", "=", "(", "True", "if", "(", "undoDeprecate", "is", "False", ")", "else", "False", ")", "return", "{", "'deprecated'", ":", "deprecated", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'deprecated'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
given a thing type name .
train
false
48,933
def make_fortran_patterns(): kwstr = 'access action advance allocatable allocate apostrophe assign assignment associate asynchronous backspace bind blank blockdata call case character class close common complex contains continue cycle data deallocate decimal delim default dimension direct do dowhile double doubleprecision else elseif elsewhere encoding end endassociate endblockdata enddo endfile endforall endfunction endif endinterface endmodule endprogram endselect endsubroutine endtype endwhere entry eor equivalence err errmsg exist exit external file flush fmt forall form format formatted function go goto id if implicit in include inout integer inquire intent interface intrinsic iomsg iolength iostat kind len logical module name named namelist nextrec nml none nullify number only open opened operator optional out pad parameter pass pause pending pointer pos position precision print private program protected public quote read readwrite real rec recl recursive result return rewind save select selectcase selecttype sequential sign size stat status stop stream subroutine target then to type unformatted unit use value volatile wait where while write' bistr1 = 'abs achar acos acosd adjustl adjustr aimag aimax0 aimin0 aint ajmax0 ajmin0 akmax0 akmin0 all allocated alog alog10 amax0 amax1 amin0 amin1 amod anint any asin asind associated atan atan2 atan2d atand bitest bitl bitlr bitrl bjtest bit_size bktest break btest cabs ccos cdabs cdcos cdexp cdlog cdsin cdsqrt ceiling cexp char clog cmplx conjg cos cosd cosh count cpu_time cshift csin csqrt dabs dacos dacosd dasin dasind datan datan2 datan2d datand date date_and_time dble dcmplx dconjg dcos dcosd dcosh dcotan ddim dexp dfloat dflotk dfloti dflotj digits dim dimag dint dlog dlog10 dmax1 dmin1 dmod dnint dot_product dprod dreal dsign dsin dsind dsinh dsqrt dtan dtand dtanh eoshift epsilon errsns exp exponent float floati floatj floatk floor fraction free huge iabs iachar iand ibclr ibits ibset ichar idate idim idint idnint ieor ifix iiabs iiand iibclr iibits iibset iidim iidint iidnnt iieor iifix iint iior iiqint iiqnnt iishft iishftc iisign ilen imax0 imax1 imin0 imin1 imod index inint inot int int1 int2 int4 int8 iqint iqnint ior ishft ishftc isign isnan izext jiand jibclr jibits jibset jidim jidint jidnnt jieor jifix jint jior jiqint jiqnnt jishft jishftc jisign jmax0 jmax1 jmin0 jmin1 jmod jnint jnot jzext kiabs kiand kibclr kibits kibset kidim kidint kidnnt kieor kifix kind kint kior kishft kishftc kisign kmax0 kmax1 kmin0 kmin1 kmod knint knot kzext lbound leadz len len_trim lenlge lge lgt lle llt log log10 logical lshift malloc matmul max max0 max1 maxexponent maxloc maxval merge min min0 min1 minexponent minloc minval mod modulo mvbits nearest nint not nworkers number_of_processors pack popcnt poppar precision present product radix random random_number random_seed range real repeat reshape rrspacing rshift scale scan secnds selected_int_kind selected_real_kind set_exponent shape sign sin sind sinh size sizeof sngl snglq spacing spread sqrt sum system_clock tan tand tanh tiny transfer transpose trim ubound unpack verify' bistr2 = 'cdabs cdcos cdexp cdlog cdsin cdsqrt cotan cotand dcmplx dconjg dcotan dcotand decode dimag dll_export dll_import doublecomplex dreal dvchk encode find flen flush getarg getcharqq getcl getdat getenv gettim hfix ibchng identifier imag int1 int2 int4 intc intrup invalop iostat_msg isha ishc ishl jfix lacfar locking locnear map nargs nbreak ndperr ndpexc offset ovefl peekcharqq precfill prompt qabs qacos qacosd qasin qasind qatan qatand qatan2 qcmplx qconjg qcos qcosd qcosh qdim qexp qext qextd qfloat qimag qlog qlog10 qmax1 qmin1 qmod qreal qsign qsin qsind qsinh qsqrt qtan qtand qtanh ran rand randu rewrite segment setdat settim system timer undfl unlock union val virtual volatile zabs zcos zexp zlog zsin zsqrt' kw = (('\\b' + any('keyword', kwstr.split())) + '\\b') builtin = (('\\b' + any('builtin', (bistr1.split() + bistr2.split()))) + '\\b') comment = any('comment', ['\\![^\\n]*']) number = any('number', ['\\b[+-]?[0-9]+[lL]?\\b', '\\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\\b', '\\b[+-]?[0-9]+(?:\\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\\b']) sqstring = "(\\b[rRuU])?'[^'\\\\\\n]*(\\\\.[^'\\\\\\n]*)*'?" dqstring = '(\\b[rRuU])?"[^"\\\\\\n]*(\\\\.[^"\\\\\\n]*)*"?' string = any('string', [sqstring, dqstring]) return '|'.join([kw, comment, string, number, builtin, any('SYNC', ['\\n'])])
[ "def", "make_fortran_patterns", "(", ")", ":", "kwstr", "=", "'access action advance allocatable allocate apostrophe assign assignment associate asynchronous backspace bind blank blockdata call case character class close common complex contains continue cycle data deallocate decimal delim default dimension direct do dowhile double doubleprecision else elseif elsewhere encoding end endassociate endblockdata enddo endfile endforall endfunction endif endinterface endmodule endprogram endselect endsubroutine endtype endwhere entry eor equivalence err errmsg exist exit external file flush fmt forall form format formatted function go goto id if implicit in include inout integer inquire intent interface intrinsic iomsg iolength iostat kind len logical module name named namelist nextrec nml none nullify number only open opened operator optional out pad parameter pass pause pending pointer pos position precision print private program protected public quote read readwrite real rec recl recursive result return rewind save select selectcase selecttype sequential sign size stat status stop stream subroutine target then to type unformatted unit use value volatile wait where while write'", "bistr1", "=", "'abs achar acos acosd adjustl adjustr aimag aimax0 aimin0 aint ajmax0 ajmin0 akmax0 akmin0 all allocated alog alog10 amax0 amax1 amin0 amin1 amod anint any asin asind associated atan atan2 atan2d atand bitest bitl bitlr bitrl bjtest bit_size bktest break btest cabs ccos cdabs cdcos cdexp cdlog cdsin cdsqrt ceiling cexp char clog cmplx conjg cos cosd cosh count cpu_time cshift csin csqrt dabs dacos dacosd dasin dasind datan datan2 datan2d datand date date_and_time dble dcmplx dconjg dcos dcosd dcosh dcotan ddim dexp dfloat dflotk dfloti dflotj digits dim dimag dint dlog dlog10 dmax1 dmin1 dmod dnint dot_product dprod dreal dsign dsin dsind dsinh dsqrt dtan dtand dtanh eoshift epsilon errsns exp exponent float floati floatj floatk floor fraction free huge iabs iachar iand ibclr ibits ibset ichar idate idim idint idnint ieor ifix iiabs iiand iibclr iibits iibset iidim iidint iidnnt iieor iifix iint iior iiqint iiqnnt iishft iishftc iisign ilen imax0 imax1 imin0 imin1 imod index inint inot int int1 int2 int4 int8 iqint iqnint ior ishft ishftc isign isnan izext jiand jibclr jibits jibset jidim jidint jidnnt jieor jifix jint jior jiqint jiqnnt jishft jishftc jisign jmax0 jmax1 jmin0 jmin1 jmod jnint jnot jzext kiabs kiand kibclr kibits kibset kidim kidint kidnnt kieor kifix kind kint kior kishft kishftc kisign kmax0 kmax1 kmin0 kmin1 kmod knint knot kzext lbound leadz len len_trim lenlge lge lgt lle llt log log10 logical lshift malloc matmul max max0 max1 maxexponent maxloc maxval merge min min0 min1 minexponent minloc minval mod modulo mvbits nearest nint not nworkers number_of_processors pack popcnt poppar precision present product radix random random_number random_seed range real repeat reshape rrspacing rshift scale scan secnds selected_int_kind selected_real_kind set_exponent shape sign sin sind sinh size sizeof sngl snglq spacing spread sqrt sum system_clock tan tand tanh tiny transfer transpose trim ubound unpack verify'", "bistr2", "=", "'cdabs cdcos cdexp cdlog cdsin cdsqrt cotan cotand dcmplx dconjg dcotan dcotand decode dimag dll_export dll_import doublecomplex dreal dvchk encode find flen flush getarg getcharqq getcl getdat getenv gettim hfix ibchng identifier imag int1 int2 int4 intc intrup invalop iostat_msg isha ishc ishl jfix lacfar locking locnear map nargs nbreak ndperr ndpexc offset ovefl peekcharqq precfill prompt qabs qacos qacosd qasin qasind qatan qatand qatan2 qcmplx qconjg qcos qcosd qcosh qdim qexp qext qextd qfloat qimag qlog qlog10 qmax1 qmin1 qmod qreal qsign qsin qsind qsinh qsqrt qtan qtand qtanh ran rand randu rewrite segment setdat settim system timer undfl unlock union val virtual volatile zabs zcos zexp zlog zsin zsqrt'", "kw", "=", "(", "(", "'\\\\b'", "+", "any", "(", "'keyword'", ",", "kwstr", ".", "split", "(", ")", ")", ")", "+", "'\\\\b'", ")", "builtin", "=", "(", "(", "'\\\\b'", "+", "any", "(", "'builtin'", ",", "(", "bistr1", ".", "split", "(", ")", "+", "bistr2", ".", "split", "(", ")", ")", ")", ")", "+", "'\\\\b'", ")", "comment", "=", "any", "(", "'comment'", ",", "[", "'\\\\![^\\\\n]*'", "]", ")", "number", "=", "any", "(", "'number'", ",", "[", "'\\\\b[+-]?[0-9]+[lL]?\\\\b'", ",", "'\\\\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\\\\b'", ",", "'\\\\b[+-]?[0-9]+(?:\\\\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\\\\b'", "]", ")", "sqstring", "=", "\"(\\\\b[rRuU])?'[^'\\\\\\\\\\\\n]*(\\\\\\\\.[^'\\\\\\\\\\\\n]*)*'?\"", "dqstring", "=", "'(\\\\b[rRuU])?\"[^\"\\\\\\\\\\\\n]*(\\\\\\\\.[^\"\\\\\\\\\\\\n]*)*\"?'", "string", "=", "any", "(", "'string'", ",", "[", "sqstring", ",", "dqstring", "]", ")", "return", "'|'", ".", "join", "(", "[", "kw", ",", "comment", ",", "string", ",", "number", ",", "builtin", ",", "any", "(", "'SYNC'", ",", "[", "'\\\\n'", "]", ")", "]", ")" ]
strongly inspired from idlelib .
train
false
48,934
def split_virtual_offset(virtual_offset): start = (virtual_offset >> 16) return (start, (virtual_offset ^ (start << 16)))
[ "def", "split_virtual_offset", "(", "virtual_offset", ")", ":", "start", "=", "(", "virtual_offset", ">>", "16", ")", "return", "(", "start", ",", "(", "virtual_offset", "^", "(", "start", "<<", "16", ")", ")", ")" ]
divides a 64-bit bgzf virtual offset into block start & within block offsets .
train
false
48,935
@app.route('/<username>') def user_timeline(username): profile_user = query_db('select * from user where username = ?', [username], one=True) if (profile_user is None): abort(404) followed = False if g.user: followed = (query_db('select 1 from follower where\n follower.who_id = ? and follower.whom_id = ?', [session['user_id'], profile_user['user_id']], one=True) is not None) return render_template('timeline.html', messages=query_db('\n select message.*, user.* from message, user where\n user.user_id = message.author_id and user.user_id = ?\n order by message.pub_date desc limit ?', [profile_user['user_id'], PER_PAGE]), followed=followed, profile_user=profile_user)
[ "@", "app", ".", "route", "(", "'/<username>'", ")", "def", "user_timeline", "(", "username", ")", ":", "profile_user", "=", "query_db", "(", "'select * from user where username = ?'", ",", "[", "username", "]", ",", "one", "=", "True", ")", "if", "(", "profile_user", "is", "None", ")", ":", "abort", "(", "404", ")", "followed", "=", "False", "if", "g", ".", "user", ":", "followed", "=", "(", "query_db", "(", "'select 1 from follower where\\n follower.who_id = ? and follower.whom_id = ?'", ",", "[", "session", "[", "'user_id'", "]", ",", "profile_user", "[", "'user_id'", "]", "]", ",", "one", "=", "True", ")", "is", "not", "None", ")", "return", "render_template", "(", "'timeline.html'", ",", "messages", "=", "query_db", "(", "'\\n select message.*, user.* from message, user where\\n user.user_id = message.author_id and user.user_id = ?\\n order by message.pub_date desc limit ?'", ",", "[", "profile_user", "[", "'user_id'", "]", ",", "PER_PAGE", "]", ")", ",", "followed", "=", "followed", ",", "profile_user", "=", "profile_user", ")" ]
displays a users tweets .
train
false
48,936
def is_ipv4_addr(ip): parts = ip.split('.') try: return ((len(parts) == 4) and all(((0 <= int(part) < 256) for part in parts))) except: return False
[ "def", "is_ipv4_addr", "(", "ip", ")", ":", "parts", "=", "ip", ".", "split", "(", "'.'", ")", "try", ":", "return", "(", "(", "len", "(", "parts", ")", "==", "4", ")", "and", "all", "(", "(", "(", "0", "<=", "int", "(", "part", ")", "<", "256", ")", "for", "part", "in", "parts", ")", ")", ")", "except", ":", "return", "False" ]
simple way to check if ipv4 address .
train
false
48,937
def list_plugins(): plugin_list = os.listdir(PLUGINDIR) ret = [] for plugin in plugin_list: stat_f = os.path.join(PLUGINDIR, plugin) execute_bit = (stat.S_IXUSR & os.stat(stat_f)[stat.ST_MODE]) if execute_bit: ret.append(plugin) return ret
[ "def", "list_plugins", "(", ")", ":", "plugin_list", "=", "os", ".", "listdir", "(", "PLUGINDIR", ")", "ret", "=", "[", "]", "for", "plugin", "in", "plugin_list", ":", "stat_f", "=", "os", ".", "path", ".", "join", "(", "PLUGINDIR", ",", "plugin", ")", "execute_bit", "=", "(", "stat", ".", "S_IXUSR", "&", "os", ".", "stat", "(", "stat_f", ")", "[", "stat", ".", "ST_MODE", "]", ")", "if", "execute_bit", ":", "ret", ".", "append", "(", "plugin", ")", "return", "ret" ]
list all the nagios plugins cli example: .
train
true
48,938
def zone_exists(zone, region=None, key=None, keyid=None, profile=None, retry_on_rate_limit=True, rate_limit_retries=5): if (region is None): region = 'universal' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) while (rate_limit_retries > 0): try: return bool(conn.get_zone(zone)) except DNSServerError as e: if (retry_on_rate_limit and ('Throttling' == e.code)): log.debug('Throttled by AWS API.') time.sleep(2) rate_limit_retries -= 1 continue raise e
[ "def", "zone_exists", "(", "zone", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "retry_on_rate_limit", "=", "True", ",", "rate_limit_retries", "=", "5", ")", ":", "if", "(", "region", "is", "None", ")", ":", "region", "=", "'universal'", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "while", "(", "rate_limit_retries", ">", "0", ")", ":", "try", ":", "return", "bool", "(", "conn", ".", "get_zone", "(", "zone", ")", ")", "except", "DNSServerError", "as", "e", ":", "if", "(", "retry_on_rate_limit", "and", "(", "'Throttling'", "==", "e", ".", "code", ")", ")", ":", "log", ".", "debug", "(", "'Throttled by AWS API.'", ")", "time", ".", "sleep", "(", "2", ")", "rate_limit_retries", "-=", "1", "continue", "raise", "e" ]
check for the existence of a route53 hosted zone .
train
false
48,939
def _bem_explain_surface(id_): _rev_dict = dict(((val, key) for (key, val) in _surf_dict.items())) return _rev_dict[id_]
[ "def", "_bem_explain_surface", "(", "id_", ")", ":", "_rev_dict", "=", "dict", "(", "(", "(", "val", ",", "key", ")", "for", "(", "key", ",", "val", ")", "in", "_surf_dict", ".", "items", "(", ")", ")", ")", "return", "_rev_dict", "[", "id_", "]" ]
return a string corresponding to the given surface id .
train
false
48,940
def nativejoin(base, path): return url2pathname(pathjoin(base, path))
[ "def", "nativejoin", "(", "base", ",", "path", ")", ":", "return", "url2pathname", "(", "pathjoin", "(", "base", ",", "path", ")", ")" ]
joins two paths - returning a native file path .
train
false
48,941
def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): length = single_source_dijkstra_path_length for n in G: (yield (n, dict(length(G, n, cutoff=cutoff, weight=weight))))
[ "def", "all_pairs_dijkstra_path_length", "(", "G", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "length", "=", "single_source_dijkstra_path_length", "for", "n", "in", "G", ":", "(", "yield", "(", "n", ",", "dict", "(", "length", "(", "G", ",", "n", ",", "cutoff", "=", "cutoff", ",", "weight", "=", "weight", ")", ")", ")", ")" ]
compute shortest path lengths between all nodes in a weighted graph .
train
false
48,942
@frappe.whitelist() def get_contact_number(contact_name, value, key): number = frappe.db.sql((u'select mobile_no, phone from tabContact where name=%s and %s=%s' % (u'%s', frappe.db.escape(key), u'%s')), (contact_name, value)) return ((number and (number[0][0] or number[0][1])) or u'')
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_contact_number", "(", "contact_name", ",", "value", ",", "key", ")", ":", "number", "=", "frappe", ".", "db", ".", "sql", "(", "(", "u'select mobile_no, phone from tabContact where name=%s and %s=%s'", "%", "(", "u'%s'", ",", "frappe", ".", "db", ".", "escape", "(", "key", ")", ",", "u'%s'", ")", ")", ",", "(", "contact_name", ",", "value", ")", ")", "return", "(", "(", "number", "and", "(", "number", "[", "0", "]", "[", "0", "]", "or", "number", "[", "0", "]", "[", "1", "]", ")", ")", "or", "u''", ")" ]
returns mobile number of the contact .
train
false
48,943
def _define_nrt_decref(module, atomic_decr): fn_decref = module.get_or_insert_function(incref_decref_ty, name='NRT_decref') calldtor = module.add_function(ir.FunctionType(ir.VoidType(), [_pointer_type]), name='NRT_MemInfo_call_dtor') builder = ir.IRBuilder(fn_decref.append_basic_block()) [ptr] = fn_decref.args is_null = builder.icmp_unsigned('==', ptr, cgutils.get_null_value(ptr.type)) with cgutils.if_unlikely(builder, is_null): builder.ret_void() if _debug_print: cgutils.printf(builder, '*** NRT_Decref %zu [%p]\n', builder.load(ptr), ptr) newrefct = builder.call(atomic_decr, [builder.bitcast(ptr, atomic_decr.args[0].type)]) refct_eq_0 = builder.icmp_unsigned('==', newrefct, ir.Constant(newrefct.type, 0)) with cgutils.if_unlikely(builder, refct_eq_0): builder.call(calldtor, [ptr]) builder.ret_void()
[ "def", "_define_nrt_decref", "(", "module", ",", "atomic_decr", ")", ":", "fn_decref", "=", "module", ".", "get_or_insert_function", "(", "incref_decref_ty", ",", "name", "=", "'NRT_decref'", ")", "calldtor", "=", "module", ".", "add_function", "(", "ir", ".", "FunctionType", "(", "ir", ".", "VoidType", "(", ")", ",", "[", "_pointer_type", "]", ")", ",", "name", "=", "'NRT_MemInfo_call_dtor'", ")", "builder", "=", "ir", ".", "IRBuilder", "(", "fn_decref", ".", "append_basic_block", "(", ")", ")", "[", "ptr", "]", "=", "fn_decref", ".", "args", "is_null", "=", "builder", ".", "icmp_unsigned", "(", "'=='", ",", "ptr", ",", "cgutils", ".", "get_null_value", "(", "ptr", ".", "type", ")", ")", "with", "cgutils", ".", "if_unlikely", "(", "builder", ",", "is_null", ")", ":", "builder", ".", "ret_void", "(", ")", "if", "_debug_print", ":", "cgutils", ".", "printf", "(", "builder", ",", "'*** NRT_Decref %zu [%p]\\n'", ",", "builder", ".", "load", "(", "ptr", ")", ",", "ptr", ")", "newrefct", "=", "builder", ".", "call", "(", "atomic_decr", ",", "[", "builder", ".", "bitcast", "(", "ptr", ",", "atomic_decr", ".", "args", "[", "0", "]", ".", "type", ")", "]", ")", "refct_eq_0", "=", "builder", ".", "icmp_unsigned", "(", "'=='", ",", "newrefct", ",", "ir", ".", "Constant", "(", "newrefct", ".", "type", ",", "0", ")", ")", "with", "cgutils", ".", "if_unlikely", "(", "builder", ",", "refct_eq_0", ")", ":", "builder", ".", "call", "(", "calldtor", ",", "[", "ptr", "]", ")", "builder", ".", "ret_void", "(", ")" ]
implement nrt_decref in the module .
train
false
48,944
def GetActiveView(): try: (childFrame, bIsMaximised) = win32ui.GetMainFrame().MDIGetActive() return childFrame.GetActiveView() except win32ui.error: return None
[ "def", "GetActiveView", "(", ")", ":", "try", ":", "(", "childFrame", ",", "bIsMaximised", ")", "=", "win32ui", ".", "GetMainFrame", "(", ")", ".", "MDIGetActive", "(", ")", "return", "childFrame", ".", "GetActiveView", "(", ")", "except", "win32ui", ".", "error", ":", "return", "None" ]
gets the edit control with the focus .
train
false
48,947
def get_folder(fileName): return os.path.dirname(fileName)
[ "def", "get_folder", "(", "fileName", ")", ":", "return", "os", ".", "path", ".", "dirname", "(", "fileName", ")" ]
get the name of the folder containing the file or folder received .
train
false
48,948
def _gen_explain_command(coll, spec, projection, skip, limit, batch_size, options, read_concern): cmd = _gen_find_command(coll, spec, projection, skip, limit, batch_size, options) if read_concern.level: return SON([('explain', cmd), ('readConcern', read_concern.document)]) return SON([('explain', cmd)])
[ "def", "_gen_explain_command", "(", "coll", ",", "spec", ",", "projection", ",", "skip", ",", "limit", ",", "batch_size", ",", "options", ",", "read_concern", ")", ":", "cmd", "=", "_gen_find_command", "(", "coll", ",", "spec", ",", "projection", ",", "skip", ",", "limit", ",", "batch_size", ",", "options", ")", "if", "read_concern", ".", "level", ":", "return", "SON", "(", "[", "(", "'explain'", ",", "cmd", ")", ",", "(", "'readConcern'", ",", "read_concern", ".", "document", ")", "]", ")", "return", "SON", "(", "[", "(", "'explain'", ",", "cmd", ")", "]", ")" ]
generate an explain command document .
train
true
48,949
@pytest.fixture(scope='module') def remote_resource(cloud_config): remote_uri = 'http://storage.googleapis.com/{}/'.format(cloud_config.storage_bucket) return (lambda path, tmpdir: fetch_gcs_resource((remote_uri + path.strip('/')), tmpdir))
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'module'", ")", "def", "remote_resource", "(", "cloud_config", ")", ":", "remote_uri", "=", "'http://storage.googleapis.com/{}/'", ".", "format", "(", "cloud_config", ".", "storage_bucket", ")", "return", "(", "lambda", "path", ",", "tmpdir", ":", "fetch_gcs_resource", "(", "(", "remote_uri", "+", "path", ".", "strip", "(", "'/'", ")", ")", ",", "tmpdir", ")", ")" ]
provides a function that downloads the given resource from cloud storage .
train
false
48,950
def resetVector(x1, x2): size = len(x1) for i in range(size): x2[i] = x1[i]
[ "def", "resetVector", "(", "x1", ",", "x2", ")", ":", "size", "=", "len", "(", "x1", ")", "for", "i", "in", "range", "(", "size", ")", ":", "x2", "[", "i", "]", "=", "x1", "[", "i", "]" ]
copies the contents of vector x1 into vector x2 .
train
true
48,951
def fix_multiple_files(filenames, options, output=None): filenames = find_files(filenames, options.recursive, options.exclude) if (options.jobs > 1): import multiprocessing pool = multiprocessing.Pool(options.jobs) pool.map(_fix_file, [(name, options) for name in filenames]) else: for name in filenames: _fix_file((name, options, output))
[ "def", "fix_multiple_files", "(", "filenames", ",", "options", ",", "output", "=", "None", ")", ":", "filenames", "=", "find_files", "(", "filenames", ",", "options", ".", "recursive", ",", "options", ".", "exclude", ")", "if", "(", "options", ".", "jobs", ">", "1", ")", ":", "import", "multiprocessing", "pool", "=", "multiprocessing", ".", "Pool", "(", "options", ".", "jobs", ")", "pool", ".", "map", "(", "_fix_file", ",", "[", "(", "name", ",", "options", ")", "for", "name", "in", "filenames", "]", ")", "else", ":", "for", "name", "in", "filenames", ":", "_fix_file", "(", "(", "name", ",", "options", ",", "output", ")", ")" ]
fix list of files .
train
true
48,952
def getTemplatesPath(subName=''): return getJoinedPath(getFabmetheusUtilitiesPath('templates'), subName)
[ "def", "getTemplatesPath", "(", "subName", "=", "''", ")", ":", "return", "getJoinedPath", "(", "getFabmetheusUtilitiesPath", "(", "'templates'", ")", ",", "subName", ")" ]
get the templates directory path .
train
false
48,954
def modifiers_string(modifiers): mod_names = [] if (modifiers & MOD_SHIFT): mod_names.append('MOD_SHIFT') if (modifiers & MOD_CTRL): mod_names.append('MOD_CTRL') if (modifiers & MOD_ALT): mod_names.append('MOD_ALT') if (modifiers & MOD_CAPSLOCK): mod_names.append('MOD_CAPSLOCK') if (modifiers & MOD_NUMLOCK): mod_names.append('MOD_NUMLOCK') if (modifiers & MOD_SCROLLLOCK): mod_names.append('MOD_SCROLLLOCK') if (modifiers & MOD_COMMAND): mod_names.append('MOD_COMMAND') if (modifiers & MOD_OPTION): mod_names.append('MOD_OPTION') if (modifiers & MOD_FUNCTION): mod_names.append('MOD_FUNCTION') return '|'.join(mod_names)
[ "def", "modifiers_string", "(", "modifiers", ")", ":", "mod_names", "=", "[", "]", "if", "(", "modifiers", "&", "MOD_SHIFT", ")", ":", "mod_names", ".", "append", "(", "'MOD_SHIFT'", ")", "if", "(", "modifiers", "&", "MOD_CTRL", ")", ":", "mod_names", ".", "append", "(", "'MOD_CTRL'", ")", "if", "(", "modifiers", "&", "MOD_ALT", ")", ":", "mod_names", ".", "append", "(", "'MOD_ALT'", ")", "if", "(", "modifiers", "&", "MOD_CAPSLOCK", ")", ":", "mod_names", ".", "append", "(", "'MOD_CAPSLOCK'", ")", "if", "(", "modifiers", "&", "MOD_NUMLOCK", ")", ":", "mod_names", ".", "append", "(", "'MOD_NUMLOCK'", ")", "if", "(", "modifiers", "&", "MOD_SCROLLLOCK", ")", ":", "mod_names", ".", "append", "(", "'MOD_SCROLLLOCK'", ")", "if", "(", "modifiers", "&", "MOD_COMMAND", ")", ":", "mod_names", ".", "append", "(", "'MOD_COMMAND'", ")", "if", "(", "modifiers", "&", "MOD_OPTION", ")", ":", "mod_names", ".", "append", "(", "'MOD_OPTION'", ")", "if", "(", "modifiers", "&", "MOD_FUNCTION", ")", ":", "mod_names", ".", "append", "(", "'MOD_FUNCTION'", ")", "return", "'|'", ".", "join", "(", "mod_names", ")" ]
return a string describing a set of modifiers .
train
true
48,955
def _handle_Integral(expr, func, order, hint): global y x = func.args[0] f = func.func if (hint == '1st_exact'): sol = expr.doit().subs(y, f(x)) del y elif (hint == '1st_exact_Integral'): sol = Eq(Subs(expr.lhs, y, f(x)), expr.rhs) del y elif (hint == 'nth_linear_constant_coeff_homogeneous'): sol = expr elif (not hint.endswith('_Integral')): sol = expr.doit() else: sol = expr return sol
[ "def", "_handle_Integral", "(", "expr", ",", "func", ",", "order", ",", "hint", ")", ":", "global", "y", "x", "=", "func", ".", "args", "[", "0", "]", "f", "=", "func", ".", "func", "if", "(", "hint", "==", "'1st_exact'", ")", ":", "sol", "=", "expr", ".", "doit", "(", ")", ".", "subs", "(", "y", ",", "f", "(", "x", ")", ")", "del", "y", "elif", "(", "hint", "==", "'1st_exact_Integral'", ")", ":", "sol", "=", "Eq", "(", "Subs", "(", "expr", ".", "lhs", ",", "y", ",", "f", "(", "x", ")", ")", ",", "expr", ".", "rhs", ")", "del", "y", "elif", "(", "hint", "==", "'nth_linear_constant_coeff_homogeneous'", ")", ":", "sol", "=", "expr", "elif", "(", "not", "hint", ".", "endswith", "(", "'_Integral'", ")", ")", ":", "sol", "=", "expr", ".", "doit", "(", ")", "else", ":", "sol", "=", "expr", "return", "sol" ]
converts a solution with integrals in it into an actual solution .
train
false
48,956
def _remove_deactivated(contexts): stack_contexts = tuple([h for h in contexts[0] if h.active]) head = contexts[1] while ((head is not None) and (not head.active)): head = head.old_contexts[1] ctx = head while (ctx is not None): parent = ctx.old_contexts[1] while (parent is not None): if parent.active: break ctx.old_contexts = parent.old_contexts parent = parent.old_contexts[1] ctx = parent return (stack_contexts, head)
[ "def", "_remove_deactivated", "(", "contexts", ")", ":", "stack_contexts", "=", "tuple", "(", "[", "h", "for", "h", "in", "contexts", "[", "0", "]", "if", "h", ".", "active", "]", ")", "head", "=", "contexts", "[", "1", "]", "while", "(", "(", "head", "is", "not", "None", ")", "and", "(", "not", "head", ".", "active", ")", ")", ":", "head", "=", "head", ".", "old_contexts", "[", "1", "]", "ctx", "=", "head", "while", "(", "ctx", "is", "not", "None", ")", ":", "parent", "=", "ctx", ".", "old_contexts", "[", "1", "]", "while", "(", "parent", "is", "not", "None", ")", ":", "if", "parent", ".", "active", ":", "break", "ctx", ".", "old_contexts", "=", "parent", ".", "old_contexts", "parent", "=", "parent", ".", "old_contexts", "[", "1", "]", "ctx", "=", "parent", "return", "(", "stack_contexts", ",", "head", ")" ]
remove deactivated handlers from the chain .
train
true
48,957
def migrate_data_dirs(): check_data_writable() vcs = data_dir('vcs') if (os.path.exists(appsettings.GIT_ROOT) and (not os.path.exists(vcs))): shutil.move(appsettings.GIT_ROOT, vcs) whoosh = data_dir('whoosh') if (os.path.exists(appsettings.WHOOSH_INDEX) and (not os.path.exists(whoosh))): shutil.move(appsettings.WHOOSH_INDEX, whoosh) ssh_home = os.path.expanduser('~/.ssh') ssh = data_dir('ssh') for name in ('known_hosts', 'id_rsa', 'id_rsa.pub'): source = os.path.join(ssh_home, name) target = os.path.join(ssh, name) if (os.path.exists(source) and (not os.path.exists(target))): shutil.copy(source, target)
[ "def", "migrate_data_dirs", "(", ")", ":", "check_data_writable", "(", ")", "vcs", "=", "data_dir", "(", "'vcs'", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "appsettings", ".", "GIT_ROOT", ")", "and", "(", "not", "os", ".", "path", ".", "exists", "(", "vcs", ")", ")", ")", ":", "shutil", ".", "move", "(", "appsettings", ".", "GIT_ROOT", ",", "vcs", ")", "whoosh", "=", "data_dir", "(", "'whoosh'", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "appsettings", ".", "WHOOSH_INDEX", ")", "and", "(", "not", "os", ".", "path", ".", "exists", "(", "whoosh", ")", ")", ")", ":", "shutil", ".", "move", "(", "appsettings", ".", "WHOOSH_INDEX", ",", "whoosh", ")", "ssh_home", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.ssh'", ")", "ssh", "=", "data_dir", "(", "'ssh'", ")", "for", "name", "in", "(", "'known_hosts'", ",", "'id_rsa'", ",", "'id_rsa.pub'", ")", ":", "source", "=", "os", ".", "path", ".", "join", "(", "ssh_home", ",", "name", ")", "target", "=", "os", ".", "path", ".", "join", "(", "ssh", ",", "name", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "source", ")", "and", "(", "not", "os", ".", "path", ".", "exists", "(", "target", ")", ")", ")", ":", "shutil", ".", "copy", "(", "source", ",", "target", ")" ]
migrate data directory from old locations to new consolidated data directory .
train
false
48,960
def terminal_len(value): if isinstance(value, bytes): value = value.decode('utf8', 'ignore') return len(value)
[ "def", "terminal_len", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "value", "=", "value", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "return", "len", "(", "value", ")" ]
returns the length of the string it would be when displayed .
train
false
48,961
def libvlc_media_discoverer_localized_name(p_mdis): f = (_Cfunctions.get('libvlc_media_discoverer_localized_name', None) or _Cfunction('libvlc_media_discoverer_localized_name', ((1,),), string_result, ctypes.c_void_p, MediaDiscoverer)) return f(p_mdis)
[ "def", "libvlc_media_discoverer_localized_name", "(", "p_mdis", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_discoverer_localized_name'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_discoverer_localized_name'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "string_result", ",", "ctypes", ".", "c_void_p", ",", "MediaDiscoverer", ")", ")", "return", "f", "(", "p_mdis", ")" ]
get media service discover object its localized name .
train
true