id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
9,897
def butter2d_bp(size, cutin, cutoff, n): return (butter2d_lp(size, cutoff, n) - butter2d_lp(size, cutin, n))
[ "def", "butter2d_bp", "(", "size", ",", "cutin", ",", "cutoff", ",", "n", ")", ":", "return", "(", "butter2d_lp", "(", "size", ",", "cutoff", ",", "n", ")", "-", "butter2d_lp", "(", "size", ",", "cutin", ",", "n", ")", ")" ]
bandpass butterworth filter in two dimensions .
train
false
9,898
def test_fixes2(): header = get_pkg_data_contents(u'data/nonstandard_units.hdr', encoding=u'binary') with pytest.raises(wcs.InvalidTransformError): w = wcs.WCS(header, fix=False)
[ "def", "test_fixes2", "(", ")", ":", "header", "=", "get_pkg_data_contents", "(", "u'data/nonstandard_units.hdr'", ",", "encoding", "=", "u'binary'", ")", "with", "pytest", ".", "raises", "(", "wcs", ".", "InvalidTransformError", ")", ":", "w", "=", "wcs", ".", "WCS", "(", "header", ",", "fix", "=", "False", ")" ]
from github issue #1854 .
train
false
9,899
def tempdeny(ip=None, ttl=None, port=None, direction=None, comment=''): return _tmp_access_rule('tempdeny', ip, ttl, port, direction, comment)
[ "def", "tempdeny", "(", "ip", "=", "None", ",", "ttl", "=", "None", ",", "port", "=", "None", ",", "direction", "=", "None", ",", "comment", "=", "''", ")", ":", "return", "_tmp_access_rule", "(", "'tempdeny'", ",", "ip", ",", "ttl", ",", "port", ",", "direction", ",", "comment", ")" ]
add a rule to the temporary ip deny list .
train
true
9,900
def prompt(text, default=None, hide_input=False, confirmation_prompt=False, type=None, value_proc=None, prompt_suffix=': ', show_default=True, err=False, show_choices=True): result = None def prompt_func(text): f = ((hide_input and hidden_prompt_func) or visible_prompt_func) try: echo(text, nl=False, err=err) return f('') except (KeyboardInterrupt, EOFError): if hide_input: echo(None, err=err) raise Abort() if (value_proc is None): value_proc = convert_type(type, default) prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type) while 1: while 1: value = prompt_func(prompt) if value: break elif (default is not None): return default try: result = value_proc(value) except UsageError as e: echo(('Error: %s' % e.message), err=err) continue if (not confirmation_prompt): return result while 1: value2 = prompt_func('Repeat for confirmation: ') if value2: break if (value == value2): return result echo('Error: the two entered values do not match', err=err)
[ "def", "prompt", "(", "text", ",", "default", "=", "None", ",", "hide_input", "=", "False", ",", "confirmation_prompt", "=", "False", ",", "type", "=", "None", ",", "value_proc", "=", "None", ",", "prompt_suffix", "=", "': '", ",", "show_default", "=", "True", ",", "err", "=", "False", ",", "show_choices", "=", "True", ")", ":", "result", "=", "None", "def", "prompt_func", "(", "text", ")", ":", "f", "=", "(", "(", "hide_input", "and", "hidden_prompt_func", ")", "or", "visible_prompt_func", ")", "try", ":", "echo", "(", "text", ",", "nl", "=", "False", ",", "err", "=", "err", ")", "return", "f", "(", "''", ")", "except", "(", "KeyboardInterrupt", ",", "EOFError", ")", ":", "if", "hide_input", ":", "echo", "(", "None", ",", "err", "=", "err", ")", "raise", "Abort", "(", ")", "if", "(", "value_proc", "is", "None", ")", ":", "value_proc", "=", "convert_type", "(", "type", ",", "default", ")", "prompt", "=", "_build_prompt", "(", "text", ",", "prompt_suffix", ",", "show_default", ",", "default", ",", "show_choices", ",", "type", ")", "while", "1", ":", "while", "1", ":", "value", "=", "prompt_func", "(", "prompt", ")", "if", "value", ":", "break", "elif", "(", "default", "is", "not", "None", ")", ":", "return", "default", "try", ":", "result", "=", "value_proc", "(", "value", ")", "except", "UsageError", "as", "e", ":", "echo", "(", "(", "'Error: %s'", "%", "e", ".", "message", ")", ",", "err", "=", "err", ")", "continue", "if", "(", "not", "confirmation_prompt", ")", ":", "return", "result", "while", "1", ":", "value2", "=", "prompt_func", "(", "'Repeat for confirmation: '", ")", "if", "value2", ":", "break", "if", "(", "value", "==", "value2", ")", ":", "return", "result", "echo", "(", "'Error: the two entered values do not match'", ",", "err", "=", "err", ")" ]
presents the user with an input widget and returns the input .
train
true
9,902
def _is_in_course_tree(block): ancestor = block.get_parent() while ((ancestor is not None) and (ancestor.location.category != 'course')): ancestor = ancestor.get_parent() return (ancestor is not None)
[ "def", "_is_in_course_tree", "(", "block", ")", ":", "ancestor", "=", "block", ".", "get_parent", "(", ")", "while", "(", "(", "ancestor", "is", "not", "None", ")", "and", "(", "ancestor", ".", "location", ".", "category", "!=", "'course'", ")", ")", ":", "ancestor", "=", "ancestor", ".", "get_parent", "(", ")", "return", "(", "ancestor", "is", "not", "None", ")" ]
check that the xblock is in the course tree .
train
false
9,903
def str_version_to_evr(verstring): if (verstring in [None, '']): return ('0', '', '') idx_e = verstring.find(':') if (idx_e != (-1)): try: epoch = str(int(verstring[:idx_e])) except ValueError: epoch = '0' else: epoch = '0' idx_r = verstring.find('-') if (idx_r != (-1)): version = verstring[(idx_e + 1):idx_r] release = verstring[(idx_r + 1):] else: version = verstring[(idx_e + 1):] release = '' return (epoch, version, release)
[ "def", "str_version_to_evr", "(", "verstring", ")", ":", "if", "(", "verstring", "in", "[", "None", ",", "''", "]", ")", ":", "return", "(", "'0'", ",", "''", ",", "''", ")", "idx_e", "=", "verstring", ".", "find", "(", "':'", ")", "if", "(", "idx_e", "!=", "(", "-", "1", ")", ")", ":", "try", ":", "epoch", "=", "str", "(", "int", "(", "verstring", "[", ":", "idx_e", "]", ")", ")", "except", "ValueError", ":", "epoch", "=", "'0'", "else", ":", "epoch", "=", "'0'", "idx_r", "=", "verstring", ".", "find", "(", "'-'", ")", "if", "(", "idx_r", "!=", "(", "-", "1", ")", ")", ":", "version", "=", "verstring", "[", "(", "idx_e", "+", "1", ")", ":", "idx_r", "]", "release", "=", "verstring", "[", "(", "idx_r", "+", "1", ")", ":", "]", "else", ":", "version", "=", "verstring", "[", "(", "idx_e", "+", "1", ")", ":", "]", "release", "=", "''", "return", "(", "epoch", ",", "version", ",", "release", ")" ]
split the package version string into epoch .
train
false
9,904
def test_shared_float32(): theano.shared.constructors.append(cuda.shared_constructor) a = theano.shared(numpy.ones((2, 3), dtype='float32')) assert isinstance(a.type, tcn.CudaNdarrayType) del theano.shared.constructors[(-1)]
[ "def", "test_shared_float32", "(", ")", ":", "theano", ".", "shared", ".", "constructors", ".", "append", "(", "cuda", ".", "shared_constructor", ")", "a", "=", "theano", ".", "shared", "(", "numpy", ".", "ones", "(", "(", "2", ",", "3", ")", ",", "dtype", "=", "'float32'", ")", ")", "assert", "isinstance", "(", "a", ".", "type", ",", "tcn", ".", "CudaNdarrayType", ")", "del", "theano", ".", "shared", ".", "constructors", "[", "(", "-", "1", ")", "]" ]
test use of cuda .
train
false
9,905
def build_alert_hooks(patterns_file, warnfile, overrides_file=None): pattern_lines = patterns_file.readlines() patterns = zip(pattern_lines[0::4], pattern_lines[1::4], pattern_lines[2::4]) _assert_is_all_blank_lines(pattern_lines[3::4], patterns_file) overrides_map = _read_overrides(overrides_file) hooks = [] for (msgtype, regex, alert) in patterns: regex = overrides_map.get(regex, regex) regex = re.compile(regex.rstrip('\n')) alert_function = make_alert(warnfile, msgtype.rstrip('\n'), alert.rstrip('\n')) hooks.append((regex, alert_function)) return hooks
[ "def", "build_alert_hooks", "(", "patterns_file", ",", "warnfile", ",", "overrides_file", "=", "None", ")", ":", "pattern_lines", "=", "patterns_file", ".", "readlines", "(", ")", "patterns", "=", "zip", "(", "pattern_lines", "[", "0", ":", ":", "4", "]", ",", "pattern_lines", "[", "1", ":", ":", "4", "]", ",", "pattern_lines", "[", "2", ":", ":", "4", "]", ")", "_assert_is_all_blank_lines", "(", "pattern_lines", "[", "3", ":", ":", "4", "]", ",", "patterns_file", ")", "overrides_map", "=", "_read_overrides", "(", "overrides_file", ")", "hooks", "=", "[", "]", "for", "(", "msgtype", ",", "regex", ",", "alert", ")", "in", "patterns", ":", "regex", "=", "overrides_map", ".", "get", "(", "regex", ",", "regex", ")", "regex", "=", "re", ".", "compile", "(", "regex", ".", "rstrip", "(", "'\\n'", ")", ")", "alert_function", "=", "make_alert", "(", "warnfile", ",", "msgtype", ".", "rstrip", "(", "'\\n'", ")", ",", "alert", ".", "rstrip", "(", "'\\n'", ")", ")", "hooks", ".", "append", "(", "(", "regex", ",", "alert_function", ")", ")", "return", "hooks" ]
parse data in patterns file and transform into alert_hook list .
train
false
9,906
def source_tmux_files(pl, args, tmux_version=None, source_tmux_file=source_tmux_file): tmux_version = (tmux_version or get_tmux_version(pl)) source_tmux_file(os.path.join(TMUX_CONFIG_DIRECTORY, u'powerline-base.conf')) for (fname, priority) in sorted(get_tmux_configs(tmux_version), key=(lambda v: v[1])): source_tmux_file(fname) if (not os.environ.get(u'POWERLINE_COMMAND')): cmd = deduce_command() if cmd: set_tmux_environment(u'POWERLINE_COMMAND', deduce_command(), remove=False) try: run_tmux_command(u'refresh-client') except subprocess.CalledProcessError: pass
[ "def", "source_tmux_files", "(", "pl", ",", "args", ",", "tmux_version", "=", "None", ",", "source_tmux_file", "=", "source_tmux_file", ")", ":", "tmux_version", "=", "(", "tmux_version", "or", "get_tmux_version", "(", "pl", ")", ")", "source_tmux_file", "(", "os", ".", "path", ".", "join", "(", "TMUX_CONFIG_DIRECTORY", ",", "u'powerline-base.conf'", ")", ")", "for", "(", "fname", ",", "priority", ")", "in", "sorted", "(", "get_tmux_configs", "(", "tmux_version", ")", ",", "key", "=", "(", "lambda", "v", ":", "v", "[", "1", "]", ")", ")", ":", "source_tmux_file", "(", "fname", ")", "if", "(", "not", "os", ".", "environ", ".", "get", "(", "u'POWERLINE_COMMAND'", ")", ")", ":", "cmd", "=", "deduce_command", "(", ")", "if", "cmd", ":", "set_tmux_environment", "(", "u'POWERLINE_COMMAND'", ",", "deduce_command", "(", ")", ",", "remove", "=", "False", ")", "try", ":", "run_tmux_command", "(", "u'refresh-client'", ")", "except", "subprocess", ".", "CalledProcessError", ":", "pass" ]
source relevant version-specific tmux configuration files files are sourced in the following order: * first relevant files with older versions are sourced .
train
false
9,907
def test_warning_on_non_existant_path_FileLink(): fl = display.FileLink('example.txt') nt.assert_true(fl._repr_html_().startswith('Path (<tt>example.txt</tt>)'))
[ "def", "test_warning_on_non_existant_path_FileLink", "(", ")", ":", "fl", "=", "display", ".", "FileLink", "(", "'example.txt'", ")", "nt", ".", "assert_true", "(", "fl", ".", "_repr_html_", "(", ")", ".", "startswith", "(", "'Path (<tt>example.txt</tt>)'", ")", ")" ]
filelink: calling _repr_html_ on non-existant files returns a warning .
train
false
9,908
def MakeProxyType(name, exposed, _cache={}): exposed = tuple(exposed) try: return _cache[(name, exposed)] except KeyError: pass dic = {} for meth in exposed: exec ('def %s(self, *args, **kwds):\n return self._callmethod(%r, args, kwds)' % (meth, meth)) in dic ProxyType = type(name, (BaseProxy,), dic) ProxyType._exposed_ = exposed _cache[(name, exposed)] = ProxyType return ProxyType
[ "def", "MakeProxyType", "(", "name", ",", "exposed", ",", "_cache", "=", "{", "}", ")", ":", "exposed", "=", "tuple", "(", "exposed", ")", "try", ":", "return", "_cache", "[", "(", "name", ",", "exposed", ")", "]", "except", "KeyError", ":", "pass", "dic", "=", "{", "}", "for", "meth", "in", "exposed", ":", "exec", "(", "'def %s(self, *args, **kwds):\\n return self._callmethod(%r, args, kwds)'", "%", "(", "meth", ",", "meth", ")", ")", "in", "dic", "ProxyType", "=", "type", "(", "name", ",", "(", "BaseProxy", ",", ")", ",", "dic", ")", "ProxyType", ".", "_exposed_", "=", "exposed", "_cache", "[", "(", "name", ",", "exposed", ")", "]", "=", "ProxyType", "return", "ProxyType" ]
return an proxy type whose methods are given by exposed .
train
false
9,909
def _mangle_attr(name): return ('m_' + name)
[ "def", "_mangle_attr", "(", "name", ")", ":", "return", "(", "'m_'", "+", "name", ")" ]
mangle attributes .
train
false
9,910
def mpl_dates_to_datestrings(dates, mpl_formatter): _dates = dates if (mpl_formatter == 'TimeSeries_DateFormatter'): try: dates = matplotlib.dates.epoch2num([(((date * 24) * 60) * 60) for date in dates]) dates = matplotlib.dates.num2date(dates, tz=pytz.utc) except: return _dates else: try: dates = matplotlib.dates.num2date(dates, tz=pytz.utc) except: return _dates time_stings = [' '.join(date.isoformat().split('+')[0].split('T')) for date in dates] return time_stings
[ "def", "mpl_dates_to_datestrings", "(", "dates", ",", "mpl_formatter", ")", ":", "_dates", "=", "dates", "if", "(", "mpl_formatter", "==", "'TimeSeries_DateFormatter'", ")", ":", "try", ":", "dates", "=", "matplotlib", ".", "dates", ".", "epoch2num", "(", "[", "(", "(", "(", "date", "*", "24", ")", "*", "60", ")", "*", "60", ")", "for", "date", "in", "dates", "]", ")", "dates", "=", "matplotlib", ".", "dates", ".", "num2date", "(", "dates", ",", "tz", "=", "pytz", ".", "utc", ")", "except", ":", "return", "_dates", "else", ":", "try", ":", "dates", "=", "matplotlib", ".", "dates", ".", "num2date", "(", "dates", ",", "tz", "=", "pytz", ".", "utc", ")", "except", ":", "return", "_dates", "time_stings", "=", "[", "' '", ".", "join", "(", "date", ".", "isoformat", "(", ")", ".", "split", "(", "'+'", ")", "[", "0", "]", ".", "split", "(", "'T'", ")", ")", "for", "date", "in", "dates", "]", "return", "time_stings" ]
convert matplotlib dates to iso-formatted-like time strings .
train
false
9,912
def print_ret(ret): if (ret == 0): utils.print_col('success', 'green') elif (ret == (- signal.SIGSEGV)): utils.print_col('segfault', 'red') else: utils.print_col('error {}'.format(ret), 'yellow') print ()
[ "def", "print_ret", "(", "ret", ")", ":", "if", "(", "ret", "==", "0", ")", ":", "utils", ".", "print_col", "(", "'success'", ",", "'green'", ")", "elif", "(", "ret", "==", "(", "-", "signal", ".", "SIGSEGV", ")", ")", ":", "utils", ".", "print_col", "(", "'segfault'", ",", "'red'", ")", "else", ":", "utils", ".", "print_col", "(", "'error {}'", ".", "format", "(", "ret", ")", ",", "'yellow'", ")", "print", "(", ")" ]
print information about an exit status .
train
false
9,913
def remove_whitespace(tokens): return [token for token in tokens if (token.type != u'S')]
[ "def", "remove_whitespace", "(", "tokens", ")", ":", "return", "[", "token", "for", "token", "in", "tokens", "if", "(", "token", ".", "type", "!=", "u'S'", ")", "]" ]
remove any top-level whitespace in a token list .
train
false
9,914
def create_sample_bookstore(): store = bookstore.Bookstore() shelf = bookstore_pb2.Shelf() shelf.theme = 'Fiction' (_, fiction) = store.create_shelf(shelf) book = bookstore_pb2.Book() book.title = 'README' book.author = 'Neal Stephenson' store.create_book(fiction, book) shelf = bookstore_pb2.Shelf() shelf.theme = 'Fantasy' (_, fantasy) = store.create_shelf(shelf) book = bookstore_pb2.Book() book.title = 'A Game of Thrones' book.author = 'George R.R. Martin' store.create_book(fantasy, book) return store
[ "def", "create_sample_bookstore", "(", ")", ":", "store", "=", "bookstore", ".", "Bookstore", "(", ")", "shelf", "=", "bookstore_pb2", ".", "Shelf", "(", ")", "shelf", ".", "theme", "=", "'Fiction'", "(", "_", ",", "fiction", ")", "=", "store", ".", "create_shelf", "(", "shelf", ")", "book", "=", "bookstore_pb2", ".", "Book", "(", ")", "book", ".", "title", "=", "'README'", "book", ".", "author", "=", "'Neal Stephenson'", "store", ".", "create_book", "(", "fiction", ",", "book", ")", "shelf", "=", "bookstore_pb2", ".", "Shelf", "(", ")", "shelf", ".", "theme", "=", "'Fantasy'", "(", "_", ",", "fantasy", ")", "=", "store", ".", "create_shelf", "(", "shelf", ")", "book", "=", "bookstore_pb2", ".", "Book", "(", ")", "book", ".", "title", "=", "'A Game of Thrones'", "book", ".", "author", "=", "'George R.R. Martin'", "store", ".", "create_book", "(", "fantasy", ",", "book", ")", "return", "store" ]
creates a bookstore with some initial sample data .
train
false
9,915
def _testing_mode(): return (os.getenv('_MNE_GUI_TESTING_MODE', '') == 'true')
[ "def", "_testing_mode", "(", ")", ":", "return", "(", "os", ".", "getenv", "(", "'_MNE_GUI_TESTING_MODE'", ",", "''", ")", "==", "'true'", ")" ]
helper to determine if were running tests .
train
false
9,916
def libvlc_media_duplicate(p_md): f = (_Cfunctions.get('libvlc_media_duplicate', None) or _Cfunction('libvlc_media_duplicate', ((1,),), class_result(Media), ctypes.c_void_p, Media)) return f(p_md)
[ "def", "libvlc_media_duplicate", "(", "p_md", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_duplicate'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_duplicate'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "class_result", "(", "Media", ")", ",", "ctypes", ".", "c_void_p", ",", "Media", ")", ")", "return", "f", "(", "p_md", ")" ]
duplicate a media descriptor object .
train
true
9,917
def sync_returners(saltenv='base'): return salt.utils.extmods.sync(__opts__, 'returners', saltenv=saltenv)[0]
[ "def", "sync_returners", "(", "saltenv", "=", "'base'", ")", ":", "return", "salt", ".", "utils", ".", "extmods", ".", "sync", "(", "__opts__", ",", "'returners'", ",", "saltenv", "=", "saltenv", ")", "[", "0", "]" ]
sync returner modules from salt://_returners to the master saltenv : base the fileserver environment from which to sync .
train
false
9,918
def discard_draft(exp_id, user_id): exp_user_data = user_models.ExplorationUserDataModel.get(user_id, exp_id) if exp_user_data: exp_user_data.draft_change_list = None exp_user_data.draft_change_list_last_updated = None exp_user_data.draft_change_list_exp_version = None exp_user_data.put()
[ "def", "discard_draft", "(", "exp_id", ",", "user_id", ")", ":", "exp_user_data", "=", "user_models", ".", "ExplorationUserDataModel", ".", "get", "(", "user_id", ",", "exp_id", ")", "if", "exp_user_data", ":", "exp_user_data", ".", "draft_change_list", "=", "None", "exp_user_data", ".", "draft_change_list_last_updated", "=", "None", "exp_user_data", ".", "draft_change_list_exp_version", "=", "None", "exp_user_data", ".", "put", "(", ")" ]
discard the draft for the given user and exploration .
train
false
9,919
def request_oauth_completion(hass): configurator = get_component('configurator') if ('fitbit' in _CONFIGURING): configurator.notify_errors(_CONFIGURING['fitbit'], 'Failed to register, please try again.') return def fitbit_configuration_callback(callback_data): 'The actions to do when our configuration callback is called.' start_url = '{}{}'.format(hass.config.api.base_url, FITBIT_AUTH_START) description = 'Please authorize Fitbit by visiting {}'.format(start_url) _CONFIGURING['fitbit'] = configurator.request_config(hass, 'Fitbit', fitbit_configuration_callback, description=description, submit_caption='I have authorized Fitbit.')
[ "def", "request_oauth_completion", "(", "hass", ")", ":", "configurator", "=", "get_component", "(", "'configurator'", ")", "if", "(", "'fitbit'", "in", "_CONFIGURING", ")", ":", "configurator", ".", "notify_errors", "(", "_CONFIGURING", "[", "'fitbit'", "]", ",", "'Failed to register, please try again.'", ")", "return", "def", "fitbit_configuration_callback", "(", "callback_data", ")", ":", "start_url", "=", "'{}{}'", ".", "format", "(", "hass", ".", "config", ".", "api", ".", "base_url", ",", "FITBIT_AUTH_START", ")", "description", "=", "'Please authorize Fitbit by visiting {}'", ".", "format", "(", "start_url", ")", "_CONFIGURING", "[", "'fitbit'", "]", "=", "configurator", ".", "request_config", "(", "hass", ",", "'Fitbit'", ",", "fitbit_configuration_callback", ",", "description", "=", "description", ",", "submit_caption", "=", "'I have authorized Fitbit.'", ")" ]
request user complete fitbit oauth2 flow .
train
false
9,920
def setup_args(config_files=[]): global args arglist = sys.argv[1:] for config_file in filter(os.path.isfile, config_files): arglist.insert(0, ('@' + config_file)) args = parser.parse_args(arglist) if args.stream: args.stream = [stream.lower() for stream in args.stream]
[ "def", "setup_args", "(", "config_files", "=", "[", "]", ")", ":", "global", "args", "arglist", "=", "sys", ".", "argv", "[", "1", ":", "]", "for", "config_file", "in", "filter", "(", "os", ".", "path", ".", "isfile", ",", "config_files", ")", ":", "arglist", ".", "insert", "(", "0", ",", "(", "'@'", "+", "config_file", ")", ")", "args", "=", "parser", ".", "parse_args", "(", "arglist", ")", "if", "args", ".", "stream", ":", "args", ".", "stream", "=", "[", "stream", ".", "lower", "(", ")", "for", "stream", "in", "args", ".", "stream", "]" ]
adds additional args to allow the vm name or uuid to be set .
train
false
9,922
def binding_for(obj, name, parent=None): if isinstance(obj, QObject): meta = obj.metaObject() index = meta.indexOfProperty(name) if (index == (-1)): boundprop = DynamicPropertyBinding(obj, name, parent) else: boundprop = PropertyBinding(obj, name, parent) else: raise TypeError return boundprop
[ "def", "binding_for", "(", "obj", ",", "name", ",", "parent", "=", "None", ")", ":", "if", "isinstance", "(", "obj", ",", "QObject", ")", ":", "meta", "=", "obj", ".", "metaObject", "(", ")", "index", "=", "meta", ".", "indexOfProperty", "(", "name", ")", "if", "(", "index", "==", "(", "-", "1", ")", ")", ":", "boundprop", "=", "DynamicPropertyBinding", "(", "obj", ",", "name", ",", "parent", ")", "else", ":", "boundprop", "=", "PropertyBinding", "(", "obj", ",", "name", ",", "parent", ")", "else", ":", "raise", "TypeError", "return", "boundprop" ]
return a suitable binding for property name of an obj .
train
false
9,923
def test_options_from_venv_config(script, virtualenv): from pip.locations import config_basename conf = '[global]\nno-index = true' ini = (virtualenv.location / config_basename) with open(ini, 'w') as f: f.write(conf) result = script.pip('install', '-vvv', 'INITools', expect_error=True) assert ('Ignoring indexes:' in result.stdout), str(result) assert ('DistributionNotFound: No matching distribution found for INITools' in result.stdout)
[ "def", "test_options_from_venv_config", "(", "script", ",", "virtualenv", ")", ":", "from", "pip", ".", "locations", "import", "config_basename", "conf", "=", "'[global]\\nno-index = true'", "ini", "=", "(", "virtualenv", ".", "location", "/", "config_basename", ")", "with", "open", "(", "ini", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "conf", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-vvv'", ",", "'INITools'", ",", "expect_error", "=", "True", ")", "assert", "(", "'Ignoring indexes:'", "in", "result", ".", "stdout", ")", ",", "str", "(", "result", ")", "assert", "(", "'DistributionNotFound: No matching distribution found for INITools'", "in", "result", ".", "stdout", ")" ]
test if configoptionparser reads a virtualenv-local config file .
train
false
9,924
def delete_rax(args): print ("--- Cleaning CloudServers matching '%s'" % args.match_re) search_opts = dict(name=('^%s' % args.match_re)) for region in pyrax.identity.services.compute.regions: cs = pyrax.connect_to_cloudservers(region=region) servers = rax_list_iterator(cs.servers, search_opts=search_opts) for server in servers: prompt_and_delete(server, ('Delete matching %s? [y/n]: ' % server), args.assumeyes)
[ "def", "delete_rax", "(", "args", ")", ":", "print", "(", "\"--- Cleaning CloudServers matching '%s'\"", "%", "args", ".", "match_re", ")", "search_opts", "=", "dict", "(", "name", "=", "(", "'^%s'", "%", "args", ".", "match_re", ")", ")", "for", "region", "in", "pyrax", ".", "identity", ".", "services", ".", "compute", ".", "regions", ":", "cs", "=", "pyrax", ".", "connect_to_cloudservers", "(", "region", "=", "region", ")", "servers", "=", "rax_list_iterator", "(", "cs", ".", "servers", ",", "search_opts", "=", "search_opts", ")", "for", "server", "in", "servers", ":", "prompt_and_delete", "(", "server", ",", "(", "'Delete matching %s? [y/n]: '", "%", "server", ")", ",", "args", ".", "assumeyes", ")" ]
function for deleting cloudservers .
train
false
9,925
def create_image(ami_name, instance_id=None, instance_name=None, tags=None, region=None, key=None, keyid=None, profile=None, description=None, no_reboot=False, dry_run=False, filters=None): instances = find_instances(instance_id=instance_id, name=instance_name, tags=tags, region=region, key=key, keyid=keyid, profile=profile, return_objs=True, filters=filters) if (not instances): log.error('Source instance not found') return False if (len(instances) > 1): log.error('Multiple instances found, must match exactly only one instance to create an image from') return False instance = instances[0] try: return instance.create_image(ami_name, description=description, no_reboot=no_reboot, dry_run=dry_run) except boto.exception.BotoServerError as exc: log.error(exc) return False
[ "def", "create_image", "(", "ami_name", ",", "instance_id", "=", "None", ",", "instance_name", "=", "None", ",", "tags", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "description", "=", "None", ",", "no_reboot", "=", "False", ",", "dry_run", "=", "False", ",", "filters", "=", "None", ")", ":", "instances", "=", "find_instances", "(", "instance_id", "=", "instance_id", ",", "name", "=", "instance_name", ",", "tags", "=", "tags", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ",", "return_objs", "=", "True", ",", "filters", "=", "filters", ")", "if", "(", "not", "instances", ")", ":", "log", ".", "error", "(", "'Source instance not found'", ")", "return", "False", "if", "(", "len", "(", "instances", ")", ">", "1", ")", ":", "log", ".", "error", "(", "'Multiple instances found, must match exactly only one instance to create an image from'", ")", "return", "False", "instance", "=", "instances", "[", "0", "]", "try", ":", "return", "instance", ".", "create_image", "(", "ami_name", ",", "description", "=", "description", ",", "no_reboot", "=", "no_reboot", ",", "dry_run", "=", "dry_run", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "exc", ":", "log", ".", "error", "(", "exc", ")", "return", "False" ]
given an uploaded file .
train
true
9,927
def findLocalPort(ports): retVal = None for port in ports: try: try: s = socket._orig_socket(socket.AF_INET, socket.SOCK_STREAM) except AttributeError: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((LOCALHOST, port)) retVal = port break except socket.error: pass finally: try: s.close() except socket.error: pass return retVal
[ "def", "findLocalPort", "(", "ports", ")", ":", "retVal", "=", "None", "for", "port", "in", "ports", ":", "try", ":", "try", ":", "s", "=", "socket", ".", "_orig_socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "except", "AttributeError", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "s", ".", "connect", "(", "(", "LOCALHOST", ",", "port", ")", ")", "retVal", "=", "port", "break", "except", "socket", ".", "error", ":", "pass", "finally", ":", "try", ":", "s", ".", "close", "(", ")", "except", "socket", ".", "error", ":", "pass", "return", "retVal" ]
find the first opened localhost port from a given list of ports .
train
false
9,929
def ConvertStringToFilename(name): return re.sub('\\W', (lambda x: ('%%%02X' % ord(x.group(0)))), name, flags=re.UNICODE).rstrip('/')
[ "def", "ConvertStringToFilename", "(", "name", ")", ":", "return", "re", ".", "sub", "(", "'\\\\W'", ",", "(", "lambda", "x", ":", "(", "'%%%02X'", "%", "ord", "(", "x", ".", "group", "(", "0", ")", ")", ")", ")", ",", "name", ",", "flags", "=", "re", ".", "UNICODE", ")", ".", "rstrip", "(", "'/'", ")" ]
converts an unicode string to a filesystem safe filename .
train
true
9,930
def p_argument_expression_list(t): pass
[ "def", "p_argument_expression_list", "(", "t", ")", ":", "pass" ]
argument_expression_list : assignment_expression | argument_expression_list comma assignment_expression .
train
false
9,932
def requires_app_credentials(func): @wraps(func) def auth_wrapper(self, *args, **kwargs): (client_id, client_secret) = self.session.retrieve_client_credentials() if (client_id and client_secret): return func(self, *args, **kwargs) else: from .exceptions import error_for r = generate_fake_error_response('{"message": "Requires username/password authentication"}') raise error_for(r) return auth_wrapper
[ "def", "requires_app_credentials", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "auth_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "(", "client_id", ",", "client_secret", ")", "=", "self", ".", "session", ".", "retrieve_client_credentials", "(", ")", "if", "(", "client_id", "and", "client_secret", ")", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "from", ".", "exceptions", "import", "error_for", "r", "=", "generate_fake_error_response", "(", "'{\"message\": \"Requires username/password authentication\"}'", ")", "raise", "error_for", "(", "r", ")", "return", "auth_wrapper" ]
require client_id and client_secret to be associated .
train
false
9,933
def get_valid_layer_name(layer, overwrite): if isinstance(layer, Layer): layer_name = layer.name elif isinstance(layer, basestring): layer_name = layer else: msg = 'You must pass either a filename or a GeoNode layer object' raise GeoNodeException(msg) if overwrite: return layer_name else: return get_valid_name(layer_name)
[ "def", "get_valid_layer_name", "(", "layer", ",", "overwrite", ")", ":", "if", "isinstance", "(", "layer", ",", "Layer", ")", ":", "layer_name", "=", "layer", ".", "name", "elif", "isinstance", "(", "layer", ",", "basestring", ")", ":", "layer_name", "=", "layer", "else", ":", "msg", "=", "'You must pass either a filename or a GeoNode layer object'", "raise", "GeoNodeException", "(", "msg", ")", "if", "overwrite", ":", "return", "layer_name", "else", ":", "return", "get_valid_name", "(", "layer_name", ")" ]
checks if the layer is a string and fetches it from the database .
train
false
9,934
@hgcommand def mail(ui, repo, *pats, **opts): if codereview_disabled: raise hg_util.Abort(codereview_disabled) (cl, err) = CommandLineCL(ui, repo, pats, opts, op='mail', defaultcc=defaultcc) if (err != ''): raise hg_util.Abort(err) cl.Upload(ui, repo, gofmt_just_warn=True) if (not cl.reviewer): if ((not defaultcc) or cl.private): raise hg_util.Abort('no reviewers listed in CL') cl.cc = Sub(cl.cc, defaultcc) cl.reviewer = defaultcc cl.Flush(ui, repo) if (cl.files == []): raise hg_util.Abort('no changed files, not sending mail') cl.Mail(ui, repo)
[ "@", "hgcommand", "def", "mail", "(", "ui", ",", "repo", ",", "*", "pats", ",", "**", "opts", ")", ":", "if", "codereview_disabled", ":", "raise", "hg_util", ".", "Abort", "(", "codereview_disabled", ")", "(", "cl", ",", "err", ")", "=", "CommandLineCL", "(", "ui", ",", "repo", ",", "pats", ",", "opts", ",", "op", "=", "'mail'", ",", "defaultcc", "=", "defaultcc", ")", "if", "(", "err", "!=", "''", ")", ":", "raise", "hg_util", ".", "Abort", "(", "err", ")", "cl", ".", "Upload", "(", "ui", ",", "repo", ",", "gofmt_just_warn", "=", "True", ")", "if", "(", "not", "cl", ".", "reviewer", ")", ":", "if", "(", "(", "not", "defaultcc", ")", "or", "cl", ".", "private", ")", ":", "raise", "hg_util", ".", "Abort", "(", "'no reviewers listed in CL'", ")", "cl", ".", "cc", "=", "Sub", "(", "cl", ".", "cc", ",", "defaultcc", ")", "cl", ".", "reviewer", "=", "defaultcc", "cl", ".", "Flush", "(", "ui", ",", "repo", ")", "if", "(", "cl", ".", "files", "==", "[", "]", ")", ":", "raise", "hg_util", ".", "Abort", "(", "'no changed files, not sending mail'", ")", "cl", ".", "Mail", "(", "ui", ",", "repo", ")" ]
mail a change for review uploads a patch to the code review server and then sends mail to the reviewer and cc list asking for a review .
train
false
9,936
def addPointOnPath(path, pathIndex, pixelTable, point, pointIndex, width): pointIndexMinusOne = (pointIndex - 1) if ((pointIndex < len(path)) and (pointIndexMinusOne >= 0)): segmentTable = {} begin = path[pointIndexMinusOne] end = path[pointIndex] euclidean.addValueSegmentToPixelTable(begin, end, segmentTable, pathIndex, width) euclidean.removePixelTableFromPixelTable(segmentTable, pixelTable) if (pointIndexMinusOne >= 0): begin = path[pointIndexMinusOne] euclidean.addValueSegmentToPixelTable(begin, point, pixelTable, pathIndex, width) if (pointIndex < len(path)): end = path[pointIndex] euclidean.addValueSegmentToPixelTable(point, end, pixelTable, pathIndex, width) path.insert(pointIndex, point)
[ "def", "addPointOnPath", "(", "path", ",", "pathIndex", ",", "pixelTable", ",", "point", ",", "pointIndex", ",", "width", ")", ":", "pointIndexMinusOne", "=", "(", "pointIndex", "-", "1", ")", "if", "(", "(", "pointIndex", "<", "len", "(", "path", ")", ")", "and", "(", "pointIndexMinusOne", ">=", "0", ")", ")", ":", "segmentTable", "=", "{", "}", "begin", "=", "path", "[", "pointIndexMinusOne", "]", "end", "=", "path", "[", "pointIndex", "]", "euclidean", ".", "addValueSegmentToPixelTable", "(", "begin", ",", "end", ",", "segmentTable", ",", "pathIndex", ",", "width", ")", "euclidean", ".", "removePixelTableFromPixelTable", "(", "segmentTable", ",", "pixelTable", ")", "if", "(", "pointIndexMinusOne", ">=", "0", ")", ":", "begin", "=", "path", "[", "pointIndexMinusOne", "]", "euclidean", ".", "addValueSegmentToPixelTable", "(", "begin", ",", "point", ",", "pixelTable", ",", "pathIndex", ",", "width", ")", "if", "(", "pointIndex", "<", "len", "(", "path", ")", ")", ":", "end", "=", "path", "[", "pointIndex", "]", "euclidean", ".", "addValueSegmentToPixelTable", "(", "point", ",", "end", ",", "pixelTable", ",", "pathIndex", ",", "width", ")", "path", ".", "insert", "(", "pointIndex", ",", "point", ")" ]
add a point to a path and the pixel table .
train
false
9,937
def parse_flag_set(source): flags = 0 try: while True: saved_pos = source.pos ch = source.get() if (ch == 'V'): ch += source.get() flags |= REGEX_FLAGS[ch] except KeyError: source.pos = saved_pos return flags
[ "def", "parse_flag_set", "(", "source", ")", ":", "flags", "=", "0", "try", ":", "while", "True", ":", "saved_pos", "=", "source", ".", "pos", "ch", "=", "source", ".", "get", "(", ")", "if", "(", "ch", "==", "'V'", ")", ":", "ch", "+=", "source", ".", "get", "(", ")", "flags", "|=", "REGEX_FLAGS", "[", "ch", "]", "except", "KeyError", ":", "source", ".", "pos", "=", "saved_pos", "return", "flags" ]
parses a set of inline flags .
train
false
9,939
def nonzero_features(data, combine=None): if (combine is None): combine = functools.partial(reduce, numpy.logical_and) masks = numpy.asarray([subset.sum(axis=0) for subset in data]).squeeze() nz_feats = combine(masks).nonzero()[0] return nz_feats
[ "def", "nonzero_features", "(", "data", ",", "combine", "=", "None", ")", ":", "if", "(", "combine", "is", "None", ")", ":", "combine", "=", "functools", ".", "partial", "(", "reduce", ",", "numpy", ".", "logical_and", ")", "masks", "=", "numpy", ".", "asarray", "(", "[", "subset", ".", "sum", "(", "axis", "=", "0", ")", "for", "subset", "in", "data", "]", ")", ".", "squeeze", "(", ")", "nz_feats", "=", "combine", "(", "masks", ")", ".", "nonzero", "(", ")", "[", "0", "]", "return", "nz_feats" ]
get features for which there are nonzero entries in the data .
train
false
9,940
def switchUID(uid, gid, euid=False): if euid: setuid = os.seteuid setgid = os.setegid getuid = os.geteuid else: setuid = os.setuid setgid = os.setgid getuid = os.getuid if (gid is not None): setgid(gid) if (uid is not None): if (uid == getuid()): uidText = ((euid and 'euid') or 'uid') actionText = ('tried to drop privileges and set%s %s' % (uidText, uid)) problemText = ('%s is already %s' % (uidText, getuid())) warnings.warn(('%s but %s; should we be root? Continuing.' % (actionText, problemText))) else: initgroups(uid, gid) setuid(uid)
[ "def", "switchUID", "(", "uid", ",", "gid", ",", "euid", "=", "False", ")", ":", "if", "euid", ":", "setuid", "=", "os", ".", "seteuid", "setgid", "=", "os", ".", "setegid", "getuid", "=", "os", ".", "geteuid", "else", ":", "setuid", "=", "os", ".", "setuid", "setgid", "=", "os", ".", "setgid", "getuid", "=", "os", ".", "getuid", "if", "(", "gid", "is", "not", "None", ")", ":", "setgid", "(", "gid", ")", "if", "(", "uid", "is", "not", "None", ")", ":", "if", "(", "uid", "==", "getuid", "(", ")", ")", ":", "uidText", "=", "(", "(", "euid", "and", "'euid'", ")", "or", "'uid'", ")", "actionText", "=", "(", "'tried to drop privileges and set%s %s'", "%", "(", "uidText", ",", "uid", ")", ")", "problemText", "=", "(", "'%s is already %s'", "%", "(", "uidText", ",", "getuid", "(", ")", ")", ")", "warnings", ".", "warn", "(", "(", "'%s but %s; should we be root? Continuing.'", "%", "(", "actionText", ",", "problemText", ")", ")", ")", "else", ":", "initgroups", "(", "uid", ",", "gid", ")", "setuid", "(", "uid", ")" ]
attempts to switch the uid/euid and gid/egid for the current process .
train
false
9,941
def get_permissions_from_urls(urls): permissions = set() for url in urls: if (hasattr(url, 'permissions') and url.permissions): permissions.update(url.permissions) return permissions
[ "def", "get_permissions_from_urls", "(", "urls", ")", ":", "permissions", "=", "set", "(", ")", "for", "url", "in", "urls", ":", "if", "(", "hasattr", "(", "url", ",", "'permissions'", ")", "and", "url", ".", "permissions", ")", ":", "permissions", ".", "update", "(", "url", ".", "permissions", ")", "return", "permissions" ]
return a set of permissions for a given iterable of urls .
train
false
9,943
def pem(b, name): s1 = b2a_base64(b)[:(-1)] s2 = '' while s1: s2 += (s1[:64] + '\n') s1 = s1[64:] s = ((('-----BEGIN %s-----\n' % name) + s2) + ('-----END %s-----\n' % name)) return s
[ "def", "pem", "(", "b", ",", "name", ")", ":", "s1", "=", "b2a_base64", "(", "b", ")", "[", ":", "(", "-", "1", ")", "]", "s2", "=", "''", "while", "s1", ":", "s2", "+=", "(", "s1", "[", ":", "64", "]", "+", "'\\n'", ")", "s1", "=", "s1", "[", "64", ":", "]", "s", "=", "(", "(", "(", "'-----BEGIN %s-----\\n'", "%", "name", ")", "+", "s2", ")", "+", "(", "'-----END %s-----\\n'", "%", "name", ")", ")", "return", "s" ]
encode a payload bytearray into a pem string .
train
false
9,944
def skip_unless_cms(func): return skipUnless((settings.ROOT_URLCONF == 'cms.urls'), 'Test only valid in CMS')(func)
[ "def", "skip_unless_cms", "(", "func", ")", ":", "return", "skipUnless", "(", "(", "settings", ".", "ROOT_URLCONF", "==", "'cms.urls'", ")", ",", "'Test only valid in CMS'", ")", "(", "func", ")" ]
only run the decorated test in the cms test suite .
train
false
9,945
def linear_rainbow(res, frac=0.5): nobs = res.nobs endog = res.model.endog exog = res.model.exog lowidx = np.ceil(((0.5 * (1 - frac)) * nobs)).astype(int) uppidx = np.floor((lowidx + (frac * nobs))).astype(int) mi_sl = slice(lowidx, uppidx) res_mi = OLS(endog[mi_sl], exog[mi_sl]).fit() nobs_mi = res_mi.model.endog.shape[0] ss_mi = res_mi.ssr ss = res.ssr fstat = ((((ss - ss_mi) / (nobs - nobs_mi)) / ss_mi) * res_mi.df_resid) from scipy import stats pval = stats.f.sf(fstat, (nobs - nobs_mi), res_mi.df_resid) return (fstat, pval)
[ "def", "linear_rainbow", "(", "res", ",", "frac", "=", "0.5", ")", ":", "nobs", "=", "res", ".", "nobs", "endog", "=", "res", ".", "model", ".", "endog", "exog", "=", "res", ".", "model", ".", "exog", "lowidx", "=", "np", ".", "ceil", "(", "(", "(", "0.5", "*", "(", "1", "-", "frac", ")", ")", "*", "nobs", ")", ")", ".", "astype", "(", "int", ")", "uppidx", "=", "np", ".", "floor", "(", "(", "lowidx", "+", "(", "frac", "*", "nobs", ")", ")", ")", ".", "astype", "(", "int", ")", "mi_sl", "=", "slice", "(", "lowidx", ",", "uppidx", ")", "res_mi", "=", "OLS", "(", "endog", "[", "mi_sl", "]", ",", "exog", "[", "mi_sl", "]", ")", ".", "fit", "(", ")", "nobs_mi", "=", "res_mi", ".", "model", ".", "endog", ".", "shape", "[", "0", "]", "ss_mi", "=", "res_mi", ".", "ssr", "ss", "=", "res", ".", "ssr", "fstat", "=", "(", "(", "(", "(", "ss", "-", "ss_mi", ")", "/", "(", "nobs", "-", "nobs_mi", ")", ")", "/", "ss_mi", ")", "*", "res_mi", ".", "df_resid", ")", "from", "scipy", "import", "stats", "pval", "=", "stats", ".", "f", ".", "sf", "(", "fstat", ",", "(", "nobs", "-", "nobs_mi", ")", ",", "res_mi", ".", "df_resid", ")", "return", "(", "fstat", ",", "pval", ")" ]
rainbow test for linearity the null hypothesis is that the regression is correctly modelled as linear .
train
false
9,947
def get_fc_hbas_info(): hbas = get_fc_hbas() hbas_info = [] for hba in hbas: wwpn = hba['port_name'].replace('0x', '') wwnn = hba['node_name'].replace('0x', '') device_path = hba['ClassDevicepath'] device = hba['ClassDevice'] hbas_info.append({'port_name': wwpn, 'node_name': wwnn, 'host_device': device, 'device_path': device_path}) return hbas_info
[ "def", "get_fc_hbas_info", "(", ")", ":", "hbas", "=", "get_fc_hbas", "(", ")", "hbas_info", "=", "[", "]", "for", "hba", "in", "hbas", ":", "wwpn", "=", "hba", "[", "'port_name'", "]", ".", "replace", "(", "'0x'", ",", "''", ")", "wwnn", "=", "hba", "[", "'node_name'", "]", ".", "replace", "(", "'0x'", ",", "''", ")", "device_path", "=", "hba", "[", "'ClassDevicepath'", "]", "device", "=", "hba", "[", "'ClassDevice'", "]", "hbas_info", ".", "append", "(", "{", "'port_name'", ":", "wwpn", ",", "'node_name'", ":", "wwnn", ",", "'host_device'", ":", "device", ",", "'device_path'", ":", "device_path", "}", ")", "return", "hbas_info" ]
get fibre channel wwns and device paths from the system .
train
false
9,948
@_api_version(1.21) @_client_version('1.5.0') def create_volume(name, driver=None, driver_opts=None): response = _client_wrapper('create_volume', name, driver=driver, driver_opts=driver_opts) _clear_context() return response
[ "@", "_api_version", "(", "1.21", ")", "@", "_client_version", "(", "'1.5.0'", ")", "def", "create_volume", "(", "name", ",", "driver", "=", "None", ",", "driver_opts", "=", "None", ")", ":", "response", "=", "_client_wrapper", "(", "'create_volume'", ",", "name", ",", "driver", "=", "driver", ",", "driver_opts", "=", "driver_opts", ")", "_clear_context", "(", ")", "return", "response" ]
sets up a dmcrypt mapping .
train
false
9,951
def get_tempest_default_config_dir(): global_conf_dir = '/etc/tempest' xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')) user_xdg_global_path = os.path.join(xdg_config, 'tempest') user_global_path = os.path.join(os.path.expanduser('~'), '.tempest/etc') if os.path.isdir(global_conf_dir): return global_conf_dir elif os.path.isdir(user_xdg_global_path): return user_xdg_global_path elif os.path.isdir(user_global_path): return user_global_path else: os.makedirs(user_global_path) return user_global_path
[ "def", "get_tempest_default_config_dir", "(", ")", ":", "global_conf_dir", "=", "'/etc/tempest'", "xdg_config", "=", "os", ".", "environ", ".", "get", "(", "'XDG_CONFIG_HOME'", ",", "os", ".", "path", ".", "expanduser", "(", "'~/.config'", ")", ")", "user_xdg_global_path", "=", "os", ".", "path", ".", "join", "(", "xdg_config", ",", "'tempest'", ")", "user_global_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ",", "'.tempest/etc'", ")", "if", "os", ".", "path", ".", "isdir", "(", "global_conf_dir", ")", ":", "return", "global_conf_dir", "elif", "os", ".", "path", ".", "isdir", "(", "user_xdg_global_path", ")", ":", "return", "user_xdg_global_path", "elif", "os", ".", "path", ".", "isdir", "(", "user_global_path", ")", ":", "return", "user_global_path", "else", ":", "os", ".", "makedirs", "(", "user_global_path", ")", "return", "user_global_path" ]
get default config directory of tempest there are 3 dirs that get tried in priority order .
train
false
9,953
def test_blackbody_exceptions_and_warnings(): with pytest.raises(ValueError) as exc: blackbody_nu((1000 * u.AA), (-100)) assert (exc.value.args[0] == u'Temperature should be positive: -100.0 K') with catch_warnings(AstropyUserWarning) as w: blackbody_nu((0 * u.AA), 5000) assert (len(w) == 1) assert (u'invalid' in w[0].message.args[0]) with catch_warnings(AstropyUserWarning) as w: blackbody_nu(((-1.0) * u.AA), 5000) assert (len(w) == 1) assert (u'invalid' in w[0].message.args[0])
[ "def", "test_blackbody_exceptions_and_warnings", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "exc", ":", "blackbody_nu", "(", "(", "1000", "*", "u", ".", "AA", ")", ",", "(", "-", "100", ")", ")", "assert", "(", "exc", ".", "value", ".", "args", "[", "0", "]", "==", "u'Temperature should be positive: -100.0 K'", ")", "with", "catch_warnings", "(", "AstropyUserWarning", ")", "as", "w", ":", "blackbody_nu", "(", "(", "0", "*", "u", ".", "AA", ")", ",", "5000", ")", "assert", "(", "len", "(", "w", ")", "==", "1", ")", "assert", "(", "u'invalid'", "in", "w", "[", "0", "]", ".", "message", ".", "args", "[", "0", "]", ")", "with", "catch_warnings", "(", "AstropyUserWarning", ")", "as", "w", ":", "blackbody_nu", "(", "(", "(", "-", "1.0", ")", "*", "u", ".", "AA", ")", ",", "5000", ")", "assert", "(", "len", "(", "w", ")", "==", "1", ")", "assert", "(", "u'invalid'", "in", "w", "[", "0", "]", ".", "message", ".", "args", "[", "0", "]", ")" ]
test exceptions .
train
false
9,954
def no_skip_on_missing_deps(wrapped): @functools.wraps(wrapped) def wrapper(*args, **kwargs): try: return wrapped(*args, **kwargs) except (testtools.TestCase.skipException, unittest2.case.SkipTest) as e: if base.bool_from_env('OS_FAIL_ON_MISSING_DEPS'): tools.fail(('%s cannot be skipped because OS_FAIL_ON_MISSING_DEPS is enabled, skip reason: %s' % (wrapped.__name__, e))) raise return wrapper
[ "def", "no_skip_on_missing_deps", "(", "wrapped", ")", ":", "@", "functools", ".", "wraps", "(", "wrapped", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", "except", "(", "testtools", ".", "TestCase", ".", "skipException", ",", "unittest2", ".", "case", ".", "SkipTest", ")", "as", "e", ":", "if", "base", ".", "bool_from_env", "(", "'OS_FAIL_ON_MISSING_DEPS'", ")", ":", "tools", ".", "fail", "(", "(", "'%s cannot be skipped because OS_FAIL_ON_MISSING_DEPS is enabled, skip reason: %s'", "%", "(", "wrapped", ".", "__name__", ",", "e", ")", ")", ")", "raise", "return", "wrapper" ]
do not allow a method/test to skip on missing dependencies .
train
false
9,955
def testCGI(data='Hello <b>World</b>'): result = cStringIO.StringIO() pdf = pisa.CreatePDF(cStringIO.StringIO(data), result) if pdf.err: print 'Content-Type: text/plain' print dumpErrors(pdf) else: print 'Content-Type: application/octet-stream' print sys.stdout.write(result.getvalue())
[ "def", "testCGI", "(", "data", "=", "'Hello <b>World</b>'", ")", ":", "result", "=", "cStringIO", ".", "StringIO", "(", ")", "pdf", "=", "pisa", ".", "CreatePDF", "(", "cStringIO", ".", "StringIO", "(", "data", ")", ",", "result", ")", "if", "pdf", ".", "err", ":", "print", "'Content-Type: text/plain'", "print", "dumpErrors", "(", "pdf", ")", "else", ":", "print", "'Content-Type: application/octet-stream'", "print", "sys", ".", "stdout", ".", "write", "(", "result", ".", "getvalue", "(", ")", ")" ]
this one shows .
train
false
9,956
def pr_delete_role(role_id): resource = current.s3db.resource('pr_role', id=role_id) return resource.delete()
[ "def", "pr_delete_role", "(", "role_id", ")", ":", "resource", "=", "current", ".", "s3db", ".", "resource", "(", "'pr_role'", ",", "id", "=", "role_id", ")", "return", "resource", ".", "delete", "(", ")" ]
back-end method to delete a role .
train
false
9,957
def _copy_array_if_base_present(a): if (a.base is not None): return a.copy() return a
[ "def", "_copy_array_if_base_present", "(", "a", ")", ":", "if", "(", "a", ".", "base", "is", "not", "None", ")", ":", "return", "a", ".", "copy", "(", ")", "return", "a" ]
copies the array if its base points to a parent array .
train
false
9,958
def create_daily_trade_source(sids, sim_params, env, trading_calendar, concurrent=False): return create_trade_source(sids, timedelta(days=1), sim_params, env=env, trading_calendar=trading_calendar, concurrent=concurrent)
[ "def", "create_daily_trade_source", "(", "sids", ",", "sim_params", ",", "env", ",", "trading_calendar", ",", "concurrent", "=", "False", ")", ":", "return", "create_trade_source", "(", "sids", ",", "timedelta", "(", "days", "=", "1", ")", ",", "sim_params", ",", "env", "=", "env", ",", "trading_calendar", "=", "trading_calendar", ",", "concurrent", "=", "concurrent", ")" ]
creates trade_count trades for each sid in sids list .
train
false
9,960
def Range(s1, s2=None): if s2: result = CodeRange(ord(s1), (ord(s2) + 1)) result.str = ('Range(%s,%s)' % (s1, s2)) else: ranges = [] for i in range(0, len(s1), 2): ranges.append(CodeRange(ord(s1[i]), (ord(s1[(i + 1)]) + 1))) result = Alt(*ranges) result.str = ('Range(%s)' % repr(s1)) return result
[ "def", "Range", "(", "s1", ",", "s2", "=", "None", ")", ":", "if", "s2", ":", "result", "=", "CodeRange", "(", "ord", "(", "s1", ")", ",", "(", "ord", "(", "s2", ")", "+", "1", ")", ")", "result", ".", "str", "=", "(", "'Range(%s,%s)'", "%", "(", "s1", ",", "s2", ")", ")", "else", ":", "ranges", "=", "[", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "s1", ")", ",", "2", ")", ":", "ranges", ".", "append", "(", "CodeRange", "(", "ord", "(", "s1", "[", "i", "]", ")", ",", "(", "ord", "(", "s1", "[", "(", "i", "+", "1", ")", "]", ")", "+", "1", ")", ")", ")", "result", "=", "Alt", "(", "*", "ranges", ")", "result", ".", "str", "=", "(", "'Range(%s)'", "%", "repr", "(", "s1", ")", ")", "return", "result" ]
range is an re which matches any single character in the range |c1| to |c2| inclusive .
train
false
9,961
@pytest.mark.usefixtures('reset_standarddir') @pytest.mark.parametrize('data_subdir, config_subdir, expected', [('foo', 'foo', 'foo/data'), ('foo', 'bar', 'foo')]) def test_get_fake_windows_equal_dir(data_subdir, config_subdir, expected, monkeypatch, tmpdir): locations = {QStandardPaths.DataLocation: str((tmpdir / data_subdir)), QStandardPaths.ConfigLocation: str((tmpdir / config_subdir))} monkeypatch.setattr('qutebrowser.utils.standarddir.os.name', 'nt') monkeypatch.setattr('qutebrowser.utils.standarddir.QStandardPaths.writableLocation', locations.get) expected = str((tmpdir / expected)) assert (standarddir.data() == expected)
[ "@", "pytest", ".", "mark", ".", "usefixtures", "(", "'reset_standarddir'", ")", "@", "pytest", ".", "mark", ".", "parametrize", "(", "'data_subdir, config_subdir, expected'", ",", "[", "(", "'foo'", ",", "'foo'", ",", "'foo/data'", ")", ",", "(", "'foo'", ",", "'bar'", ",", "'foo'", ")", "]", ")", "def", "test_get_fake_windows_equal_dir", "(", "data_subdir", ",", "config_subdir", ",", "expected", ",", "monkeypatch", ",", "tmpdir", ")", ":", "locations", "=", "{", "QStandardPaths", ".", "DataLocation", ":", "str", "(", "(", "tmpdir", "/", "data_subdir", ")", ")", ",", "QStandardPaths", ".", "ConfigLocation", ":", "str", "(", "(", "tmpdir", "/", "config_subdir", ")", ")", "}", "monkeypatch", ".", "setattr", "(", "'qutebrowser.utils.standarddir.os.name'", ",", "'nt'", ")", "monkeypatch", ".", "setattr", "(", "'qutebrowser.utils.standarddir.QStandardPaths.writableLocation'", ",", "locations", ".", "get", ")", "expected", "=", "str", "(", "(", "tmpdir", "/", "expected", ")", ")", "assert", "(", "standarddir", ".", "data", "(", ")", "==", "expected", ")" ]
test _get with a fake windows os with equal data/config dirs .
train
false
9,963
def user_cache_dir(appname): if WINDOWS: path = os.path.normpath(_get_win_folder('CSIDL_LOCAL_APPDATA')) path = os.path.join(path, appname, 'Cache') elif (sys.platform == 'darwin'): path = expanduser('~/Library/Caches') path = os.path.join(path, appname) else: path = os.getenv('XDG_CACHE_HOME', expanduser('~/.cache')) path = os.path.join(path, appname) return path
[ "def", "user_cache_dir", "(", "appname", ")", ":", "if", "WINDOWS", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "_get_win_folder", "(", "'CSIDL_LOCAL_APPDATA'", ")", ")", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "appname", ",", "'Cache'", ")", "elif", "(", "sys", ".", "platform", "==", "'darwin'", ")", ":", "path", "=", "expanduser", "(", "'~/Library/Caches'", ")", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "appname", ")", "else", ":", "path", "=", "os", ".", "getenv", "(", "'XDG_CACHE_HOME'", ",", "expanduser", "(", "'~/.cache'", ")", ")", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "appname", ")", "return", "path" ]
return full path to the user-specific cache dir for this application .
train
true
9,964
def get_action(action): if _actions: if (action not in _actions): raise KeyError(("Action '%s' not found" % action)) return _actions.get(action) for action_module_name in ['get', 'create', 'update', 'delete', 'patch']: module_path = ('ckan.logic.action.' + action_module_name) module = __import__(module_path) for part in module_path.split('.')[1:]: module = getattr(module, part) for (k, v) in module.__dict__.items(): if (not k.startswith('_')): if (hasattr(v, '__call__') and ((v.__module__ == module_path) or hasattr(v, '__replaced'))): _actions[k] = v if ((action_module_name == 'get') and (not hasattr(v, 'side_effect_free'))): v.side_effect_free = True resolved_action_plugins = {} fetched_actions = {} for plugin in p.PluginImplementations(p.IActions): for (name, auth_function) in plugin.get_actions().items(): if (name in resolved_action_plugins): raise NameConflict(('The action %r is already implemented in %r' % (name, resolved_action_plugins[name]))) resolved_action_plugins[name] = plugin.name auth_function.auth_audit_exempt = True fetched_actions[name] = auth_function _actions.update(fetched_actions) for (action_name, _action) in _actions.items(): def make_wrapped(_action, action_name): def wrapped(context=None, data_dict=None, **kw): if kw: log.critical(('%s was passed extra keywords %r' % (_action.__name__, kw))) context = _prepopulate_context(context) context.setdefault('__auth_audit', []) context['__auth_audit'].append((action_name, id(_action))) result = _action(context, data_dict, **kw) try: audit = context['__auth_audit'][(-1)] if ((audit[0] == action_name) and (audit[1] == id(_action))): if (action_name not in authz.auth_functions_list()): log.debug(('No auth function for %s' % action_name)) elif (not getattr(_action, 'auth_audit_exempt', False)): raise Exception('Action function {0} did not call its auth function'.format(action_name)) context['__auth_audit'].pop() except IndexError: pass return result return wrapped if hasattr(_action, '__replaced'): _actions[action_name] = _action.__replaced continue fn = make_wrapped(_action, action_name) fn.__doc__ = _action.__doc__ if getattr(_action, 'side_effect_free', False): fn.side_effect_free = True _actions[action_name] = fn return _actions.get(action)
[ "def", "get_action", "(", "action", ")", ":", "if", "_actions", ":", "if", "(", "action", "not", "in", "_actions", ")", ":", "raise", "KeyError", "(", "(", "\"Action '%s' not found\"", "%", "action", ")", ")", "return", "_actions", ".", "get", "(", "action", ")", "for", "action_module_name", "in", "[", "'get'", ",", "'create'", ",", "'update'", ",", "'delete'", ",", "'patch'", "]", ":", "module_path", "=", "(", "'ckan.logic.action.'", "+", "action_module_name", ")", "module", "=", "__import__", "(", "module_path", ")", "for", "part", "in", "module_path", ".", "split", "(", "'.'", ")", "[", "1", ":", "]", ":", "module", "=", "getattr", "(", "module", ",", "part", ")", "for", "(", "k", ",", "v", ")", "in", "module", ".", "__dict__", ".", "items", "(", ")", ":", "if", "(", "not", "k", ".", "startswith", "(", "'_'", ")", ")", ":", "if", "(", "hasattr", "(", "v", ",", "'__call__'", ")", "and", "(", "(", "v", ".", "__module__", "==", "module_path", ")", "or", "hasattr", "(", "v", ",", "'__replaced'", ")", ")", ")", ":", "_actions", "[", "k", "]", "=", "v", "if", "(", "(", "action_module_name", "==", "'get'", ")", "and", "(", "not", "hasattr", "(", "v", ",", "'side_effect_free'", ")", ")", ")", ":", "v", ".", "side_effect_free", "=", "True", "resolved_action_plugins", "=", "{", "}", "fetched_actions", "=", "{", "}", "for", "plugin", "in", "p", ".", "PluginImplementations", "(", "p", ".", "IActions", ")", ":", "for", "(", "name", ",", "auth_function", ")", "in", "plugin", ".", "get_actions", "(", ")", ".", "items", "(", ")", ":", "if", "(", "name", "in", "resolved_action_plugins", ")", ":", "raise", "NameConflict", "(", "(", "'The action %r is already implemented in %r'", "%", "(", "name", ",", "resolved_action_plugins", "[", "name", "]", ")", ")", ")", "resolved_action_plugins", "[", "name", "]", "=", "plugin", ".", "name", "auth_function", ".", "auth_audit_exempt", "=", "True", "fetched_actions", "[", "name", "]", "=", "auth_function", "_actions", ".", "update", "(", "fetched_actions", ")", "for", "(", "action_name", ",", "_action", ")", "in", "_actions", ".", "items", "(", ")", ":", "def", "make_wrapped", "(", "_action", ",", "action_name", ")", ":", "def", "wrapped", "(", "context", "=", "None", ",", "data_dict", "=", "None", ",", "**", "kw", ")", ":", "if", "kw", ":", "log", ".", "critical", "(", "(", "'%s was passed extra keywords %r'", "%", "(", "_action", ".", "__name__", ",", "kw", ")", ")", ")", "context", "=", "_prepopulate_context", "(", "context", ")", "context", ".", "setdefault", "(", "'__auth_audit'", ",", "[", "]", ")", "context", "[", "'__auth_audit'", "]", ".", "append", "(", "(", "action_name", ",", "id", "(", "_action", ")", ")", ")", "result", "=", "_action", "(", "context", ",", "data_dict", ",", "**", "kw", ")", "try", ":", "audit", "=", "context", "[", "'__auth_audit'", "]", "[", "(", "-", "1", ")", "]", "if", "(", "(", "audit", "[", "0", "]", "==", "action_name", ")", "and", "(", "audit", "[", "1", "]", "==", "id", "(", "_action", ")", ")", ")", ":", "if", "(", "action_name", "not", "in", "authz", ".", "auth_functions_list", "(", ")", ")", ":", "log", ".", "debug", "(", "(", "'No auth function for %s'", "%", "action_name", ")", ")", "elif", "(", "not", "getattr", "(", "_action", ",", "'auth_audit_exempt'", ",", "False", ")", ")", ":", "raise", "Exception", "(", "'Action function {0} did not call its auth function'", ".", "format", "(", "action_name", ")", ")", "context", "[", "'__auth_audit'", "]", ".", "pop", "(", ")", "except", "IndexError", ":", "pass", "return", "result", "return", "wrapped", "if", "hasattr", "(", "_action", ",", "'__replaced'", ")", ":", "_actions", "[", "action_name", "]", "=", "_action", ".", "__replaced", "continue", "fn", "=", "make_wrapped", "(", "_action", ",", "action_name", ")", "fn", ".", "__doc__", "=", "_action", ".", "__doc__", "if", "getattr", "(", "_action", ",", "'side_effect_free'", ",", "False", ")", ":", "fn", ".", "side_effect_free", "=", "True", "_actions", "[", "action_name", "]", "=", "fn", "return", "_actions", ".", "get", "(", "action", ")" ]
calls an action function from a template .
train
false
9,965
def dictadd(*dicts): result = {} for dct in dicts: result.update(dct) return result
[ "def", "dictadd", "(", "*", "dicts", ")", ":", "result", "=", "{", "}", "for", "dct", "in", "dicts", ":", "result", ".", "update", "(", "dct", ")", "return", "result" ]
returns a dictionary consisting of the keys in a and b .
train
false
9,966
def Config(**options): names = ['title', 'xlabel', 'ylabel', 'xscale', 'yscale', 'xticks', 'yticks', 'axis', 'xlim', 'ylim'] for name in names: if (name in options): getattr(pyplot, name)(options[name]) loc_dict = {'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4, 'right': 5, 'center left': 6, 'center right': 7, 'lower center': 8, 'upper center': 9, 'center': 10} global LEGEND LEGEND = options.get('legend', LEGEND) if LEGEND: global LOC LOC = options.get('loc', LOC) pyplot.legend(loc=LOC)
[ "def", "Config", "(", "**", "options", ")", ":", "names", "=", "[", "'title'", ",", "'xlabel'", ",", "'ylabel'", ",", "'xscale'", ",", "'yscale'", ",", "'xticks'", ",", "'yticks'", ",", "'axis'", ",", "'xlim'", ",", "'ylim'", "]", "for", "name", "in", "names", ":", "if", "(", "name", "in", "options", ")", ":", "getattr", "(", "pyplot", ",", "name", ")", "(", "options", "[", "name", "]", ")", "loc_dict", "=", "{", "'upper right'", ":", "1", ",", "'upper left'", ":", "2", ",", "'lower left'", ":", "3", ",", "'lower right'", ":", "4", ",", "'right'", ":", "5", ",", "'center left'", ":", "6", ",", "'center right'", ":", "7", ",", "'lower center'", ":", "8", ",", "'upper center'", ":", "9", ",", "'center'", ":", "10", "}", "global", "LEGEND", "LEGEND", "=", "options", ".", "get", "(", "'legend'", ",", "LEGEND", ")", "if", "LEGEND", ":", "global", "LOC", "LOC", "=", "options", ".", "get", "(", "'loc'", ",", "LOC", ")", "pyplot", ".", "legend", "(", "loc", "=", "LOC", ")" ]
configures the plot .
train
false
9,967
def line_extended(line, distance): angle = (((line.angle() / 360) * 2) * math.pi) (dx, dy) = unit_point(angle, r=distance) return QLineF(line.p1(), (line.p2() + QPointF(dx, dy)))
[ "def", "line_extended", "(", "line", ",", "distance", ")", ":", "angle", "=", "(", "(", "(", "line", ".", "angle", "(", ")", "/", "360", ")", "*", "2", ")", "*", "math", ".", "pi", ")", "(", "dx", ",", "dy", ")", "=", "unit_point", "(", "angle", ",", "r", "=", "distance", ")", "return", "QLineF", "(", "line", ".", "p1", "(", ")", ",", "(", "line", ".", "p2", "(", ")", "+", "QPointF", "(", "dx", ",", "dy", ")", ")", ")" ]
return an qlinef extended by distance units in the positive direction .
train
false
9,970
def get_lexer_for_mimetype(_mime, **options): for (modname, name, _, _, mimetypes) in LEXERS.itervalues(): if (_mime in mimetypes): if (name not in _lexer_cache): _load_lexers(modname) return _lexer_cache[name](**options) for cls in find_plugin_lexers(): if (_mime in cls.mimetypes): return cls(**options) raise ClassNotFound(('no lexer for mimetype %r found' % _mime))
[ "def", "get_lexer_for_mimetype", "(", "_mime", ",", "**", "options", ")", ":", "for", "(", "modname", ",", "name", ",", "_", ",", "_", ",", "mimetypes", ")", "in", "LEXERS", ".", "itervalues", "(", ")", ":", "if", "(", "_mime", "in", "mimetypes", ")", ":", "if", "(", "name", "not", "in", "_lexer_cache", ")", ":", "_load_lexers", "(", "modname", ")", "return", "_lexer_cache", "[", "name", "]", "(", "**", "options", ")", "for", "cls", "in", "find_plugin_lexers", "(", ")", ":", "if", "(", "_mime", "in", "cls", ".", "mimetypes", ")", ":", "return", "cls", "(", "**", "options", ")", "raise", "ClassNotFound", "(", "(", "'no lexer for mimetype %r found'", "%", "_mime", ")", ")" ]
get a lexer for a mimetype .
train
true
9,973
def list_minion(saltenv='base'): return _client().file_local_list(saltenv)
[ "def", "list_minion", "(", "saltenv", "=", "'base'", ")", ":", "return", "_client", "(", ")", ".", "file_local_list", "(", "saltenv", ")" ]
list all of the files cached on the minion cli example: .
train
false
9,974
def sh(cmd): return check_call(cmd, shell=True)
[ "def", "sh", "(", "cmd", ")", ":", "return", "check_call", "(", "cmd", ",", "shell", "=", "True", ")" ]
print a command and send it to the shell .
train
false
9,975
@comm_guard(dict, dict) def unify_walk(d1, d2, U): for (k1, v1) in iteritems(d1): if (k1 in d2): U = unify_walk(v1, d2[k1], U) if (U is False): return False return U
[ "@", "comm_guard", "(", "dict", ",", "dict", ")", "def", "unify_walk", "(", "d1", ",", "d2", ",", "U", ")", ":", "for", "(", "k1", ",", "v1", ")", "in", "iteritems", "(", "d1", ")", ":", "if", "(", "k1", "in", "d2", ")", ":", "U", "=", "unify_walk", "(", "v1", ",", "d2", "[", "k1", "]", ",", "U", ")", "if", "(", "U", "is", "False", ")", ":", "return", "False", "return", "U" ]
nv == nv == nv(union) .
train
false
9,976
def pks(objects): return [o.pk for o in objects]
[ "def", "pks", "(", "objects", ")", ":", "return", "[", "o", ".", "pk", "for", "o", "in", "objects", "]" ]
return pks to be able to compare lists .
train
false
9,979
def is_option_value_selected(browser_query, value): select = Select(browser_query.first.results[0]) ddl_selected_value = select.first_selected_option.get_attribute('value') return (ddl_selected_value == value)
[ "def", "is_option_value_selected", "(", "browser_query", ",", "value", ")", ":", "select", "=", "Select", "(", "browser_query", ".", "first", ".", "results", "[", "0", "]", ")", "ddl_selected_value", "=", "select", ".", "first_selected_option", ".", "get_attribute", "(", "'value'", ")", "return", "(", "ddl_selected_value", "==", "value", ")" ]
return true if given value is selected in html select element .
train
false
9,980
def host_pointer(obj): if isinstance(obj, (int, long)): return obj forcewritable = isinstance(obj, np.void) return mviewbuf.memoryview_get_buffer(obj, forcewritable)
[ "def", "host_pointer", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "int", ",", "long", ")", ")", ":", "return", "obj", "forcewritable", "=", "isinstance", "(", "obj", ",", "np", ".", "void", ")", "return", "mviewbuf", ".", "memoryview_get_buffer", "(", "obj", ",", "forcewritable", ")" ]
note: the underlying data pointer from the host data buffer is used and it should not be changed until the operation which can be asynchronous completes .
train
false
9,981
def get_engine_by_slug(slug=None): if ((not slug) or (type(slug) is not str)): raise TypeError("Invalid slug '%s'", slug) selected_engines = filter((lambda engine: (hasattr(engine, 'SLUG') and (engine.SLUG == slug))), get_engines()) if (len(selected_engines) == 0): raise ValueError(("No TTS engine found for slug '%s'" % slug)) else: if (len(selected_engines) > 1): print ("WARNING: Multiple TTS engines found for slug '%s'. " + ('This is most certainly a bug.' % slug)) engine = selected_engines[0] if (not engine.is_available()): raise ValueError((("TTS engine '%s' is not available (due to " + 'missing dependencies, etc.)') % slug)) return engine
[ "def", "get_engine_by_slug", "(", "slug", "=", "None", ")", ":", "if", "(", "(", "not", "slug", ")", "or", "(", "type", "(", "slug", ")", "is", "not", "str", ")", ")", ":", "raise", "TypeError", "(", "\"Invalid slug '%s'\"", ",", "slug", ")", "selected_engines", "=", "filter", "(", "(", "lambda", "engine", ":", "(", "hasattr", "(", "engine", ",", "'SLUG'", ")", "and", "(", "engine", ".", "SLUG", "==", "slug", ")", ")", ")", ",", "get_engines", "(", ")", ")", "if", "(", "len", "(", "selected_engines", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "(", "\"No TTS engine found for slug '%s'\"", "%", "slug", ")", ")", "else", ":", "if", "(", "len", "(", "selected_engines", ")", ">", "1", ")", ":", "print", "(", "\"WARNING: Multiple TTS engines found for slug '%s'. \"", "+", "(", "'This is most certainly a bug.'", "%", "slug", ")", ")", "engine", "=", "selected_engines", "[", "0", "]", "if", "(", "not", "engine", ".", "is_available", "(", ")", ")", ":", "raise", "ValueError", "(", "(", "(", "\"TTS engine '%s' is not available (due to \"", "+", "'missing dependencies, etc.)'", ")", "%", "slug", ")", ")", "return", "engine" ]
returns: an stt engine implementation available on the current platform raises: valueerror if no speaker implementation is supported on this platform .
train
false
9,982
def updateMethod(func): func.isUpdateMethod = True return func
[ "def", "updateMethod", "(", "func", ")", ":", "func", ".", "isUpdateMethod", "=", "True", "return", "func" ]
decorate this resourcetype instance as an update method .
train
false
9,983
def _check_has_primary(sds): for s in sds.values(): if (s.server_type == SERVER_TYPE.RSPrimary): return TOPOLOGY_TYPE.ReplicaSetWithPrimary else: return TOPOLOGY_TYPE.ReplicaSetNoPrimary
[ "def", "_check_has_primary", "(", "sds", ")", ":", "for", "s", "in", "sds", ".", "values", "(", ")", ":", "if", "(", "s", ".", "server_type", "==", "SERVER_TYPE", ".", "RSPrimary", ")", ":", "return", "TOPOLOGY_TYPE", ".", "ReplicaSetWithPrimary", "else", ":", "return", "TOPOLOGY_TYPE", ".", "ReplicaSetNoPrimary" ]
current topology type is replicasetwithprimary .
train
true
9,984
def uses_mysql(connection): return (u'mysql' in connection.settings_dict[u'ENGINE'])
[ "def", "uses_mysql", "(", "connection", ")", ":", "return", "(", "u'mysql'", "in", "connection", ".", "settings_dict", "[", "u'ENGINE'", "]", ")" ]
return whether the connection represents a mysql db .
train
false
9,985
def get_with_search(endpoint, term): try: params = {'q': term, 'client_id': api_key} request = requests.get(API_BASE.format(endpoint), params=params) request.raise_for_status() except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e: raise APIError('Could not find {}: {}'.format(endpoint, e)) json = request.json() if (not json): return None else: return json[0]
[ "def", "get_with_search", "(", "endpoint", ",", "term", ")", ":", "try", ":", "params", "=", "{", "'q'", ":", "term", ",", "'client_id'", ":", "api_key", "}", "request", "=", "requests", ".", "get", "(", "API_BASE", ".", "format", "(", "endpoint", ")", ",", "params", "=", "params", ")", "request", ".", "raise_for_status", "(", ")", "except", "(", "requests", ".", "exceptions", ".", "HTTPError", ",", "requests", ".", "exceptions", ".", "ConnectionError", ")", "as", "e", ":", "raise", "APIError", "(", "'Could not find {}: {}'", ".", "format", "(", "endpoint", ",", "e", ")", ")", "json", "=", "request", ".", "json", "(", ")", "if", "(", "not", "json", ")", ":", "return", "None", "else", ":", "return", "json", "[", "0", "]" ]
searches :endpoint on soundcloud for :term and returns an item .
train
false
9,987
def versionize(app, pagename, templatename, context, doctree): if (not (app.config.canonical_root and app.config.versions)): return context['versions'] = [(vs, _build_url(app.config.canonical_root, vs, pagename)) for vs in app.config.versions.split(',') if (vs != app.config.version)]
[ "def", "versionize", "(", "app", ",", "pagename", ",", "templatename", ",", "context", ",", "doctree", ")", ":", "if", "(", "not", "(", "app", ".", "config", ".", "canonical_root", "and", "app", ".", "config", ".", "versions", ")", ")", ":", "return", "context", "[", "'versions'", "]", "=", "[", "(", "vs", ",", "_build_url", "(", "app", ".", "config", ".", "canonical_root", ",", "vs", ",", "pagename", ")", ")", "for", "vs", "in", "app", ".", "config", ".", "versions", ".", "split", "(", "','", ")", "if", "(", "vs", "!=", "app", ".", "config", ".", "version", ")", "]" ]
adds a version switcher below the menu .
train
false
9,988
def _GetModuleObjectAndName(globals_dict): for (name, module) in sys.modules.items(): if (getattr(module, '__dict__', None) is globals_dict): if (name == '__main__'): name = sys.argv[0] return (module, name) return (None, None)
[ "def", "_GetModuleObjectAndName", "(", "globals_dict", ")", ":", "for", "(", "name", ",", "module", ")", "in", "sys", ".", "modules", ".", "items", "(", ")", ":", "if", "(", "getattr", "(", "module", ",", "'__dict__'", ",", "None", ")", "is", "globals_dict", ")", ":", "if", "(", "name", "==", "'__main__'", ")", ":", "name", "=", "sys", ".", "argv", "[", "0", "]", "return", "(", "module", ",", "name", ")", "return", "(", "None", ",", "None", ")" ]
returns the module that defines a global environment .
train
false
9,989
def symptom_LDAP_file_based_domain_specific_configs_formatted_correctly(): filedir = CONF.identity.domain_config_dir if ((not CONF.identity.domain_specific_drivers_enabled) or CONF.identity.domain_configurations_from_database or (not os.path.isdir(filedir))): return False invalid_files = [] for filename in os.listdir(filedir): if re.match(CONFIG_REGEX, filename): try: parser = configparser.ConfigParser() parser.read(os.path.join(filedir, filename)) except configparser.Error: invalid_files.append(filename) if invalid_files: invalid_str = ', '.join(invalid_files) print('Error: The following config files are formatted incorrectly: ', invalid_str) return True return False
[ "def", "symptom_LDAP_file_based_domain_specific_configs_formatted_correctly", "(", ")", ":", "filedir", "=", "CONF", ".", "identity", ".", "domain_config_dir", "if", "(", "(", "not", "CONF", ".", "identity", ".", "domain_specific_drivers_enabled", ")", "or", "CONF", ".", "identity", ".", "domain_configurations_from_database", "or", "(", "not", "os", ".", "path", ".", "isdir", "(", "filedir", ")", ")", ")", ":", "return", "False", "invalid_files", "=", "[", "]", "for", "filename", "in", "os", ".", "listdir", "(", "filedir", ")", ":", "if", "re", ".", "match", "(", "CONFIG_REGEX", ",", "filename", ")", ":", "try", ":", "parser", "=", "configparser", ".", "ConfigParser", "(", ")", "parser", ".", "read", "(", "os", ".", "path", ".", "join", "(", "filedir", ",", "filename", ")", ")", "except", "configparser", ".", "Error", ":", "invalid_files", ".", "append", "(", "filename", ")", "if", "invalid_files", ":", "invalid_str", "=", "', '", ".", "join", "(", "invalid_files", ")", "print", "(", "'Error: The following config files are formatted incorrectly: '", ",", "invalid_str", ")", "return", "True", "return", "False" ]
ldap domain specific configuration files are not formatted correctly .
train
false
9,990
def _getnames(self, yname=None, xname=None): if (yname is None): if (hasattr(self.model, 'endog_names') and (not (self.model.endog_names is None))): yname = self.model.endog_names else: yname = 'y' if (xname is None): if (hasattr(self.model, 'exog_names') and (not (self.model.exog_names is None))): xname = self.model.exog_names else: xname = [('var_%d' % i) for i in range(len(self.params))] return (yname, xname)
[ "def", "_getnames", "(", "self", ",", "yname", "=", "None", ",", "xname", "=", "None", ")", ":", "if", "(", "yname", "is", "None", ")", ":", "if", "(", "hasattr", "(", "self", ".", "model", ",", "'endog_names'", ")", "and", "(", "not", "(", "self", ".", "model", ".", "endog_names", "is", "None", ")", ")", ")", ":", "yname", "=", "self", ".", "model", ".", "endog_names", "else", ":", "yname", "=", "'y'", "if", "(", "xname", "is", "None", ")", ":", "if", "(", "hasattr", "(", "self", ".", "model", ",", "'exog_names'", ")", "and", "(", "not", "(", "self", ".", "model", ".", "exog_names", "is", "None", ")", ")", ")", ":", "xname", "=", "self", ".", "model", ".", "exog_names", "else", ":", "xname", "=", "[", "(", "'var_%d'", "%", "i", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "params", ")", ")", "]", "return", "(", "yname", ",", "xname", ")" ]
extract names from model or construct names .
train
false
9,991
def _check_read_only(context, resource_id): res = p.toolkit.get_action('resource_show')(context, {'id': resource_id}) if (res.get('url_type') != 'datastore'): raise p.toolkit.ValidationError({'read-only': ['Cannot edit read-only resource. Either pass"force=True" or change url-type to "datastore"']})
[ "def", "_check_read_only", "(", "context", ",", "resource_id", ")", ":", "res", "=", "p", ".", "toolkit", ".", "get_action", "(", "'resource_show'", ")", "(", "context", ",", "{", "'id'", ":", "resource_id", "}", ")", "if", "(", "res", ".", "get", "(", "'url_type'", ")", "!=", "'datastore'", ")", ":", "raise", "p", ".", "toolkit", ".", "ValidationError", "(", "{", "'read-only'", ":", "[", "'Cannot edit read-only resource. Either pass\"force=True\" or change url-type to \"datastore\"'", "]", "}", ")" ]
raises exception if the resource is read-only .
train
false
9,992
def do_request(*args, **kwargs): res = do_single_request(*args, **kwargs) if res: return json.loads(res) return res
[ "def", "do_request", "(", "*", "args", ",", "**", "kwargs", ")", ":", "res", "=", "do_single_request", "(", "*", "args", ",", "**", "kwargs", ")", "if", "res", ":", "return", "json", ".", "loads", "(", "res", ")", "return", "res" ]
convenience function wraps do_single_request .
train
false
9,993
def _convert_asset_timestamp_fields(dict_): for key in (_asset_timestamp_fields & viewkeys(dict_)): value = pd.Timestamp(dict_[key], tz='UTC') dict_[key] = (None if isnull(value) else value) return dict_
[ "def", "_convert_asset_timestamp_fields", "(", "dict_", ")", ":", "for", "key", "in", "(", "_asset_timestamp_fields", "&", "viewkeys", "(", "dict_", ")", ")", ":", "value", "=", "pd", ".", "Timestamp", "(", "dict_", "[", "key", "]", ",", "tz", "=", "'UTC'", ")", "dict_", "[", "key", "]", "=", "(", "None", "if", "isnull", "(", "value", ")", "else", "value", ")", "return", "dict_" ]
takes in a dict of asset init args and converts dates to pd .
train
true
9,995
def member_with_tags_server_selector(tag_sets, selection): return apply_tag_sets(tag_sets, readable_server_selector(selection))
[ "def", "member_with_tags_server_selector", "(", "tag_sets", ",", "selection", ")", ":", "return", "apply_tag_sets", "(", "tag_sets", ",", "readable_server_selector", "(", "selection", ")", ")" ]
all near-enough members matching the tag sets .
train
false
9,997
def dump_header(iterable, allow_token=True): if isinstance(iterable, dict): items = [] for (key, value) in iterable.iteritems(): if (value is None): items.append(key) else: items.append(('%s=%s' % (key, quote_header_value(value, allow_token=allow_token)))) else: items = [quote_header_value(x, allow_token=allow_token) for x in iterable] return ', '.join(items)
[ "def", "dump_header", "(", "iterable", ",", "allow_token", "=", "True", ")", ":", "if", "isinstance", "(", "iterable", ",", "dict", ")", ":", "items", "=", "[", "]", "for", "(", "key", ",", "value", ")", "in", "iterable", ".", "iteritems", "(", ")", ":", "if", "(", "value", "is", "None", ")", ":", "items", ".", "append", "(", "key", ")", "else", ":", "items", ".", "append", "(", "(", "'%s=%s'", "%", "(", "key", ",", "quote_header_value", "(", "value", ",", "allow_token", "=", "allow_token", ")", ")", ")", ")", "else", ":", "items", "=", "[", "quote_header_value", "(", "x", ",", "allow_token", "=", "allow_token", ")", "for", "x", "in", "iterable", "]", "return", "', '", ".", "join", "(", "items", ")" ]
dump an http header again .
train
true
9,998
def all_simple_paths(G, source, target, cutoff=None): if (source not in G): raise nx.NodeNotFound(('source node %s not in graph' % source)) if (target not in G): raise nx.NodeNotFound(('target node %s not in graph' % target)) if (cutoff is None): cutoff = (len(G) - 1) if G.is_multigraph(): return _all_simple_paths_multigraph(G, source, target, cutoff=cutoff) else: return _all_simple_paths_graph(G, source, target, cutoff=cutoff)
[ "def", "all_simple_paths", "(", "G", ",", "source", ",", "target", ",", "cutoff", "=", "None", ")", ":", "if", "(", "source", "not", "in", "G", ")", ":", "raise", "nx", ".", "NodeNotFound", "(", "(", "'source node %s not in graph'", "%", "source", ")", ")", "if", "(", "target", "not", "in", "G", ")", ":", "raise", "nx", ".", "NodeNotFound", "(", "(", "'target node %s not in graph'", "%", "target", ")", ")", "if", "(", "cutoff", "is", "None", ")", ":", "cutoff", "=", "(", "len", "(", "G", ")", "-", "1", ")", "if", "G", ".", "is_multigraph", "(", ")", ":", "return", "_all_simple_paths_multigraph", "(", "G", ",", "source", ",", "target", ",", "cutoff", "=", "cutoff", ")", "else", ":", "return", "_all_simple_paths_graph", "(", "G", ",", "source", ",", "target", ",", "cutoff", "=", "cutoff", ")" ]
generate all simple paths in the graph g from source to target .
train
false
9,999
def python_to_jupyter_cli(args=None, namespace=None): parser = argparse.ArgumentParser(description='Sphinx-Gallery Notebook converter') parser.add_argument('python_src_file', nargs='+', help='Input Python file script to convert. Supports multiple files and shell wildcards (e.g. *.py)') args = parser.parse_args(args, namespace) for src_file in args.python_src_file: blocks = split_code_and_text_blocks(src_file) print('Converting {0}'.format(src_file)) example_nb = jupyter_notebook(blocks) save_notebook(example_nb, src_file.replace('.py', '.ipynb'))
[ "def", "python_to_jupyter_cli", "(", "args", "=", "None", ",", "namespace", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Sphinx-Gallery Notebook converter'", ")", "parser", ".", "add_argument", "(", "'python_src_file'", ",", "nargs", "=", "'+'", ",", "help", "=", "'Input Python file script to convert. Supports multiple files and shell wildcards (e.g. *.py)'", ")", "args", "=", "parser", ".", "parse_args", "(", "args", ",", "namespace", ")", "for", "src_file", "in", "args", ".", "python_src_file", ":", "blocks", "=", "split_code_and_text_blocks", "(", "src_file", ")", "print", "(", "'Converting {0}'", ".", "format", "(", "src_file", ")", ")", "example_nb", "=", "jupyter_notebook", "(", "blocks", ")", "save_notebook", "(", "example_nb", ",", "src_file", ".", "replace", "(", "'.py'", ",", "'.ipynb'", ")", ")" ]
exposes the jupyter notebook renderer to the command line takes the same arguments as argumentparser .
train
false
10,001
def salt_ip_verify_tool(): salt_config = cherrypy.config.get('saltopts', None) if salt_config: cherrypy_conf = salt_config.get('rest_cherrypy', None) if cherrypy_conf: auth_ip_list = cherrypy_conf.get('authorized_ips', None) if auth_ip_list: logger.debug('Found IP list: {0}'.format(auth_ip_list)) rem_ip = cherrypy.request.headers.get('Remote-Addr', None) logger.debug('Request from IP: {0}'.format(rem_ip)) if (rem_ip not in auth_ip_list): logger.error('Blocked IP: {0}'.format(rem_ip)) raise cherrypy.HTTPError(403, 'Bad IP')
[ "def", "salt_ip_verify_tool", "(", ")", ":", "salt_config", "=", "cherrypy", ".", "config", ".", "get", "(", "'saltopts'", ",", "None", ")", "if", "salt_config", ":", "cherrypy_conf", "=", "salt_config", ".", "get", "(", "'rest_cherrypy'", ",", "None", ")", "if", "cherrypy_conf", ":", "auth_ip_list", "=", "cherrypy_conf", ".", "get", "(", "'authorized_ips'", ",", "None", ")", "if", "auth_ip_list", ":", "logger", ".", "debug", "(", "'Found IP list: {0}'", ".", "format", "(", "auth_ip_list", ")", ")", "rem_ip", "=", "cherrypy", ".", "request", ".", "headers", ".", "get", "(", "'Remote-Addr'", ",", "None", ")", "logger", ".", "debug", "(", "'Request from IP: {0}'", ".", "format", "(", "rem_ip", ")", ")", "if", "(", "rem_ip", "not", "in", "auth_ip_list", ")", ":", "logger", ".", "error", "(", "'Blocked IP: {0}'", ".", "format", "(", "rem_ip", ")", ")", "raise", "cherrypy", ".", "HTTPError", "(", "403", ",", "'Bad IP'", ")" ]
if there is a list of restricted ips .
train
true
10,002
def addXIntersectionIndexesFromSegment(index, segment, xIntersectionIndexList): for endpoint in segment: xIntersectionIndexList.append(XIntersectionIndex(index, endpoint.point.real))
[ "def", "addXIntersectionIndexesFromSegment", "(", "index", ",", "segment", ",", "xIntersectionIndexList", ")", ":", "for", "endpoint", "in", "segment", ":", "xIntersectionIndexList", ".", "append", "(", "XIntersectionIndex", "(", "index", ",", "endpoint", ".", "point", ".", "real", ")", ")" ]
add the x intersection indexes from the segment .
train
false
10,004
def update_share_permissions(role_permissions, doc, user): share_ptypes = (u'read', u'write', u'share') permissions_by_share = frappe.db.get_value(u'DocShare', {u'share_doctype': doc.doctype, u'share_name': doc.name, u'user': user}, share_ptypes, as_dict=True) if permissions_by_share: for ptype in share_ptypes: if permissions_by_share[ptype]: role_permissions[ptype] = 1
[ "def", "update_share_permissions", "(", "role_permissions", ",", "doc", ",", "user", ")", ":", "share_ptypes", "=", "(", "u'read'", ",", "u'write'", ",", "u'share'", ")", "permissions_by_share", "=", "frappe", ".", "db", ".", "get_value", "(", "u'DocShare'", ",", "{", "u'share_doctype'", ":", "doc", ".", "doctype", ",", "u'share_name'", ":", "doc", ".", "name", ",", "u'user'", ":", "user", "}", ",", "share_ptypes", ",", "as_dict", "=", "True", ")", "if", "permissions_by_share", ":", "for", "ptype", "in", "share_ptypes", ":", "if", "permissions_by_share", "[", "ptype", "]", ":", "role_permissions", "[", "ptype", "]", "=", "1" ]
updates share permissions on role_permissions for given doc .
train
false
10,005
def is_cjk(character): return any([(start <= ord(character) <= end) for (start, end) in [(4352, 4607), (11904, 42191), (43072, 43135), (44032, 55215), (63744, 64255), (65072, 65103), (65381, 65500), (131072, 196607)]])
[ "def", "is_cjk", "(", "character", ")", ":", "return", "any", "(", "[", "(", "start", "<=", "ord", "(", "character", ")", "<=", "end", ")", "for", "(", "start", ",", "end", ")", "in", "[", "(", "4352", ",", "4607", ")", ",", "(", "11904", ",", "42191", ")", ",", "(", "43072", ",", "43135", ")", ",", "(", "44032", ",", "55215", ")", ",", "(", "63744", ",", "64255", ")", ",", "(", "65072", ",", "65103", ")", ",", "(", "65381", ",", "65500", ")", ",", "(", "131072", ",", "196607", ")", "]", "]", ")" ]
python port of moses code to check for cjk character .
train
false
10,006
@require_role('admin') def log_record(request): if (request.method == 'GET'): return render(request, 'jlog/record.html') elif (request.method == 'POST'): log_id = request.REQUEST.get('id', None) if log_id: TermL = TermLogRecorder(request.user) log = Log.objects.get(id=int(log_id)) if (len(log.filename) == 0): log_file = (log.log_path + '.log') log_time = (log.log_path + '.time') if (os.path.isfile(log_file) and os.path.isfile(log_time)): content = renderJSON(log_file, log_time) return HttpResponse(content) else: return HttpResponse(TermL.load_full_log(filename=log.filename)) else: return HttpResponse('ERROR') else: return HttpResponse('ERROR METHOD!')
[ "@", "require_role", "(", "'admin'", ")", "def", "log_record", "(", "request", ")", ":", "if", "(", "request", ".", "method", "==", "'GET'", ")", ":", "return", "render", "(", "request", ",", "'jlog/record.html'", ")", "elif", "(", "request", ".", "method", "==", "'POST'", ")", ":", "log_id", "=", "request", ".", "REQUEST", ".", "get", "(", "'id'", ",", "None", ")", "if", "log_id", ":", "TermL", "=", "TermLogRecorder", "(", "request", ".", "user", ")", "log", "=", "Log", ".", "objects", ".", "get", "(", "id", "=", "int", "(", "log_id", ")", ")", "if", "(", "len", "(", "log", ".", "filename", ")", "==", "0", ")", ":", "log_file", "=", "(", "log", ".", "log_path", "+", "'.log'", ")", "log_time", "=", "(", "log", ".", "log_path", "+", "'.time'", ")", "if", "(", "os", ".", "path", ".", "isfile", "(", "log_file", ")", "and", "os", ".", "path", ".", "isfile", "(", "log_time", ")", ")", ":", "content", "=", "renderJSON", "(", "log_file", ",", "log_time", ")", "return", "HttpResponse", "(", "content", ")", "else", ":", "return", "HttpResponse", "(", "TermL", ".", "load_full_log", "(", "filename", "=", "log", ".", "filename", ")", ")", "else", ":", "return", "HttpResponse", "(", "'ERROR'", ")", "else", ":", "return", "HttpResponse", "(", "'ERROR METHOD!'", ")" ]
author: liuzheng712@gmail .
train
false
10,007
@conf.commands.register def queso(*args, **kargs): return queso_search(queso_sig(*args, **kargs))
[ "@", "conf", ".", "commands", ".", "register", "def", "queso", "(", "*", "args", ",", "**", "kargs", ")", ":", "return", "queso_search", "(", "queso_sig", "(", "*", "args", ",", "**", "kargs", ")", ")" ]
queso os fingerprinting queso .
train
false
10,008
def validate_minimum(value, minimum): if ((minimum is not None) and (value < minimum)): raise ValueError((u'%r must be larger than %r.' % (value, minimum)))
[ "def", "validate_minimum", "(", "value", ",", "minimum", ")", ":", "if", "(", "(", "minimum", "is", "not", "None", ")", "and", "(", "value", "<", "minimum", ")", ")", ":", "raise", "ValueError", "(", "(", "u'%r must be larger than %r.'", "%", "(", "value", ",", "minimum", ")", ")", ")" ]
validate that value is at least minimum normally called in :meth:~mopidy .
train
false
10,009
def get_ofp_modules(): return ofproto_protocol._versions
[ "def", "get_ofp_modules", "(", ")", ":", "return", "ofproto_protocol", ".", "_versions" ]
get modules pair for the constants and parser of of-wire of a given of version .
train
false
10,012
def get_vendor_from_pci_id(pci_id): cmd = ("lspci -n | awk '/%s/ {print $3}'" % pci_id) return re.sub(':', ' ', commands.getoutput(cmd))
[ "def", "get_vendor_from_pci_id", "(", "pci_id", ")", ":", "cmd", "=", "(", "\"lspci -n | awk '/%s/ {print $3}'\"", "%", "pci_id", ")", "return", "re", ".", "sub", "(", "':'", ",", "' '", ",", "commands", ".", "getoutput", "(", "cmd", ")", ")" ]
check out the device vendor id according to pci_id .
train
false
10,013
def execute_instructions(plan, index=None, verbose=False, _commands=None): if (_commands is None): _commands = commands if verbose: from .console import setup_verbose_handlers setup_verbose_handlers() log.debug('executing plan %s', plan) state = {'i': None, 'prefix': context.root_dir, 'index': index} for (instruction, arg) in plan: log.debug(' %s(%r)', instruction, arg) if ((state['i'] is not None) and (instruction in PROGRESS_COMMANDS)): state['i'] += 1 getLogger('progress.update').info((Dist(arg).dist_name, (state['i'] - 1))) cmd = _commands[instruction] if callable(cmd): cmd(state, arg) if ((state['i'] is not None) and (instruction in PROGRESS_COMMANDS) and (state['maxval'] == state['i'])): state['i'] = None getLogger('progress.stop').info(None)
[ "def", "execute_instructions", "(", "plan", ",", "index", "=", "None", ",", "verbose", "=", "False", ",", "_commands", "=", "None", ")", ":", "if", "(", "_commands", "is", "None", ")", ":", "_commands", "=", "commands", "if", "verbose", ":", "from", ".", "console", "import", "setup_verbose_handlers", "setup_verbose_handlers", "(", ")", "log", ".", "debug", "(", "'executing plan %s'", ",", "plan", ")", "state", "=", "{", "'i'", ":", "None", ",", "'prefix'", ":", "context", ".", "root_dir", ",", "'index'", ":", "index", "}", "for", "(", "instruction", ",", "arg", ")", "in", "plan", ":", "log", ".", "debug", "(", "' %s(%r)'", ",", "instruction", ",", "arg", ")", "if", "(", "(", "state", "[", "'i'", "]", "is", "not", "None", ")", "and", "(", "instruction", "in", "PROGRESS_COMMANDS", ")", ")", ":", "state", "[", "'i'", "]", "+=", "1", "getLogger", "(", "'progress.update'", ")", ".", "info", "(", "(", "Dist", "(", "arg", ")", ".", "dist_name", ",", "(", "state", "[", "'i'", "]", "-", "1", ")", ")", ")", "cmd", "=", "_commands", "[", "instruction", "]", "if", "callable", "(", "cmd", ")", ":", "cmd", "(", "state", ",", "arg", ")", "if", "(", "(", "state", "[", "'i'", "]", "is", "not", "None", ")", "and", "(", "instruction", "in", "PROGRESS_COMMANDS", ")", "and", "(", "state", "[", "'maxval'", "]", "==", "state", "[", "'i'", "]", ")", ")", ":", "state", "[", "'i'", "]", "=", "None", "getLogger", "(", "'progress.stop'", ")", ".", "info", "(", "None", ")" ]
execute the instructions in the plan .
train
false
10,015
def disjoint_union(G, H): R1 = nx.convert_node_labels_to_integers(G) R2 = nx.convert_node_labels_to_integers(H, first_label=len(R1)) R = union(R1, R2) R.name = ('disjoint_union( %s, %s )' % (G.name, H.name)) R.graph.update(G.graph) R.graph.update(H.graph) return R
[ "def", "disjoint_union", "(", "G", ",", "H", ")", ":", "R1", "=", "nx", ".", "convert_node_labels_to_integers", "(", "G", ")", "R2", "=", "nx", ".", "convert_node_labels_to_integers", "(", "H", ",", "first_label", "=", "len", "(", "R1", ")", ")", "R", "=", "union", "(", "R1", ",", "R2", ")", "R", ".", "name", "=", "(", "'disjoint_union( %s, %s )'", "%", "(", "G", ".", "name", ",", "H", ".", "name", ")", ")", "R", ".", "graph", ".", "update", "(", "G", ".", "graph", ")", "R", ".", "graph", ".", "update", "(", "H", ".", "graph", ")", "return", "R" ]
return the disjoint union of graphs g and h .
train
false
10,016
def _ungap(alphabet): if (not hasattr(alphabet, 'gap_char')): return alphabet elif isinstance(alphabet, Gapped): return alphabet.alphabet elif isinstance(alphabet, HasStopCodon): return HasStopCodon(_ungap(alphabet.alphabet), stop_symbol=alphabet.stop_symbol) elif isinstance(alphabet, AlphabetEncoder): return AlphabetEncoder(_ungap(alphabet.alphabet), letters=alphabet.letters) else: raise NotImplementedError
[ "def", "_ungap", "(", "alphabet", ")", ":", "if", "(", "not", "hasattr", "(", "alphabet", ",", "'gap_char'", ")", ")", ":", "return", "alphabet", "elif", "isinstance", "(", "alphabet", ",", "Gapped", ")", ":", "return", "alphabet", ".", "alphabet", "elif", "isinstance", "(", "alphabet", ",", "HasStopCodon", ")", ":", "return", "HasStopCodon", "(", "_ungap", "(", "alphabet", ".", "alphabet", ")", ",", "stop_symbol", "=", "alphabet", ".", "stop_symbol", ")", "elif", "isinstance", "(", "alphabet", ",", "AlphabetEncoder", ")", ":", "return", "AlphabetEncoder", "(", "_ungap", "(", "alphabet", ".", "alphabet", ")", ",", "letters", "=", "alphabet", ".", "letters", ")", "else", ":", "raise", "NotImplementedError" ]
returns the alphabet without any gap encoder .
train
false
10,018
def pandasSQL_builder(con, flavor=None, schema=None, meta=None, is_cursor=False): _validate_flavor_parameter(flavor) con = _engine_builder(con) if _is_sqlalchemy_connectable(con): return SQLDatabase(con, schema=schema, meta=meta) elif isinstance(con, string_types): raise ImportError('Using URI string without sqlalchemy installed.') else: return SQLiteDatabase(con, is_cursor=is_cursor)
[ "def", "pandasSQL_builder", "(", "con", ",", "flavor", "=", "None", ",", "schema", "=", "None", ",", "meta", "=", "None", ",", "is_cursor", "=", "False", ")", ":", "_validate_flavor_parameter", "(", "flavor", ")", "con", "=", "_engine_builder", "(", "con", ")", "if", "_is_sqlalchemy_connectable", "(", "con", ")", ":", "return", "SQLDatabase", "(", "con", ",", "schema", "=", "schema", ",", "meta", "=", "meta", ")", "elif", "isinstance", "(", "con", ",", "string_types", ")", ":", "raise", "ImportError", "(", "'Using URI string without sqlalchemy installed.'", ")", "else", ":", "return", "SQLiteDatabase", "(", "con", ",", "is_cursor", "=", "is_cursor", ")" ]
convenience function to return the correct pandassql subclass based on the provided parameters .
train
false
10,019
def snapshot_object(target, filename, savefun=npz.save_npz, trigger=(1, 'epoch')): @extension.make_extension(trigger=trigger, priority=(-100)) def snapshot_object(trainer): _snapshot_object(trainer, target, filename.format(trainer), savefun) return snapshot_object
[ "def", "snapshot_object", "(", "target", ",", "filename", ",", "savefun", "=", "npz", ".", "save_npz", ",", "trigger", "=", "(", "1", ",", "'epoch'", ")", ")", ":", "@", "extension", ".", "make_extension", "(", "trigger", "=", "trigger", ",", "priority", "=", "(", "-", "100", ")", ")", "def", "snapshot_object", "(", "trainer", ")", ":", "_snapshot_object", "(", "trainer", ",", "target", ",", "filename", ".", "format", "(", "trainer", ")", ",", "savefun", ")", "return", "snapshot_object" ]
returns a trainer extension to take snapshots of a given object .
train
false
10,021
def startproject(project_name, directory): from random import choice if (project_name in INVALID_PROJECT_NAMES): sys.stderr.write(style.ERROR(("Error: '%r' conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name.\n" % project_name))) sys.exit(1) _start_helper('project', project_name, directory) main_settings_file = os.path.join(directory, project_name, 'settings.py') settings_contents = open(main_settings_file, 'r').read() fp = open(main_settings_file, 'w') secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) settings_contents = re.sub("(?<=SECRET_KEY = ')'", (secret_key + "'"), settings_contents) fp.write(settings_contents) fp.close()
[ "def", "startproject", "(", "project_name", ",", "directory", ")", ":", "from", "random", "import", "choice", "if", "(", "project_name", "in", "INVALID_PROJECT_NAMES", ")", ":", "sys", ".", "stderr", ".", "write", "(", "style", ".", "ERROR", "(", "(", "\"Error: '%r' conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name.\\n\"", "%", "project_name", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "_start_helper", "(", "'project'", ",", "project_name", ",", "directory", ")", "main_settings_file", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "project_name", ",", "'settings.py'", ")", "settings_contents", "=", "open", "(", "main_settings_file", ",", "'r'", ")", ".", "read", "(", ")", "fp", "=", "open", "(", "main_settings_file", ",", "'w'", ")", "secret_key", "=", "''", ".", "join", "(", "[", "choice", "(", "'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'", ")", "for", "i", "in", "range", "(", "50", ")", "]", ")", "settings_contents", "=", "re", ".", "sub", "(", "\"(?<=SECRET_KEY = ')'\"", ",", "(", "secret_key", "+", "\"'\"", ")", ",", "settings_contents", ")", "fp", ".", "write", "(", "settings_contents", ")", "fp", ".", "close", "(", ")" ]
creates a django project for the given project_name in the given directory .
train
false
10,022
def _fread3_many(fobj, n): (b1, b2, b3) = np.fromfile(fobj, '>u1', (3 * n)).reshape((-1), 3).astype(np.int).T return (((b1 << 16) + (b2 << 8)) + b3)
[ "def", "_fread3_many", "(", "fobj", ",", "n", ")", ":", "(", "b1", ",", "b2", ",", "b3", ")", "=", "np", ".", "fromfile", "(", "fobj", ",", "'>u1'", ",", "(", "3", "*", "n", ")", ")", ".", "reshape", "(", "(", "-", "1", ")", ",", "3", ")", ".", "astype", "(", "np", ".", "int", ")", ".", "T", "return", "(", "(", "(", "b1", "<<", "16", ")", "+", "(", "b2", "<<", "8", ")", ")", "+", "b3", ")" ]
read 3-byte ints from an open binary file object .
train
false
10,023
def getdevice_by_uuid(uuid): with settings(hide('running', 'warnings', 'stdout'), warn_only=True): res = run_as_root(('blkid -U %s' % uuid)) if (not res.succeeded): return None return res
[ "def", "getdevice_by_uuid", "(", "uuid", ")", ":", "with", "settings", "(", "hide", "(", "'running'", ",", "'warnings'", ",", "'stdout'", ")", ",", "warn_only", "=", "True", ")", ":", "res", "=", "run_as_root", "(", "(", "'blkid -U %s'", "%", "uuid", ")", ")", "if", "(", "not", "res", ".", "succeeded", ")", ":", "return", "None", "return", "res" ]
get a hdd device by uuid example:: from fabtools .
train
true
10,024
def _escapecss(e): s = e.object[e.start:e.end] return (u''.join([(u'\\%s ' % str(hex(ord(x)))[2:].upper()) for x in s]), e.end)
[ "def", "_escapecss", "(", "e", ")", ":", "s", "=", "e", ".", "object", "[", "e", ".", "start", ":", "e", ".", "end", "]", "return", "(", "u''", ".", "join", "(", "[", "(", "u'\\\\%s '", "%", "str", "(", "hex", "(", "ord", "(", "x", ")", ")", ")", "[", "2", ":", "]", ".", "upper", "(", ")", ")", "for", "x", "in", "s", "]", ")", ",", "e", ".", "end", ")" ]
escapes characters not allowed in the current encoding the css way with a backslash followed by a uppercase hex code point e .
train
false
10,026
def create_module(project, name, sourcefolder=None): if (sourcefolder is None): sourcefolder = project.root packages = name.split('.') parent = sourcefolder for package in packages[:(-1)]: parent = parent.get_child(package) return parent.create_file((packages[(-1)] + '.py'))
[ "def", "create_module", "(", "project", ",", "name", ",", "sourcefolder", "=", "None", ")", ":", "if", "(", "sourcefolder", "is", "None", ")", ":", "sourcefolder", "=", "project", ".", "root", "packages", "=", "name", ".", "split", "(", "'.'", ")", "parent", "=", "sourcefolder", "for", "package", "in", "packages", "[", ":", "(", "-", "1", ")", "]", ":", "parent", "=", "parent", ".", "get_child", "(", "package", ")", "return", "parent", ".", "create_file", "(", "(", "packages", "[", "(", "-", "1", ")", "]", "+", "'.py'", ")", ")" ]
creates a module and returns a rope .
train
true
10,027
def start_qtapp(app=None): if (app is None): app = init_qtapp() if (not is_event_loop_running(app)): app._in_event_loop = True app.exec_() app._in_event_loop = False else: app._in_event_loop = True
[ "def", "start_qtapp", "(", "app", "=", "None", ")", ":", "if", "(", "app", "is", "None", ")", ":", "app", "=", "init_qtapp", "(", ")", "if", "(", "not", "is_event_loop_running", "(", "app", ")", ")", ":", "app", ".", "_in_event_loop", "=", "True", "app", ".", "exec_", "(", ")", "app", ".", "_in_event_loop", "=", "False", "else", ":", "app", ".", "_in_event_loop", "=", "True" ]
start qt mainloop .
train
false
10,028
def convert_json_to_yaml(json_str): tpl = jsonutils.loads(json_str, object_pairs_hook=collections.OrderedDict) def top_level_items(tpl): (yield ('HeatTemplateFormatVersion', '2012-12-12')) for (k, v) in six.iteritems(tpl): if (k != 'AWSTemplateFormatVersion'): (yield (k, v)) return yaml.dump(collections.OrderedDict(top_level_items(tpl)), Dumper=yaml_dumper)
[ "def", "convert_json_to_yaml", "(", "json_str", ")", ":", "tpl", "=", "jsonutils", ".", "loads", "(", "json_str", ",", "object_pairs_hook", "=", "collections", ".", "OrderedDict", ")", "def", "top_level_items", "(", "tpl", ")", ":", "(", "yield", "(", "'HeatTemplateFormatVersion'", ",", "'2012-12-12'", ")", ")", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "tpl", ")", ":", "if", "(", "k", "!=", "'AWSTemplateFormatVersion'", ")", ":", "(", "yield", "(", "k", ",", "v", ")", ")", "return", "yaml", ".", "dump", "(", "collections", ".", "OrderedDict", "(", "top_level_items", "(", "tpl", ")", ")", ",", "Dumper", "=", "yaml_dumper", ")" ]
convert aws json template format to heat yaml format .
train
false