id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
42,664
def fixup_enums(obj, name_class_map, suffix='AsString'): for n in name_class_map.keys(): c = name_class_map[n] setattr(obj, (n + suffix), c._VALUES_TO_NAMES[getattr(obj, n)]) return obj
[ "def", "fixup_enums", "(", "obj", ",", "name_class_map", ",", "suffix", "=", "'AsString'", ")", ":", "for", "n", "in", "name_class_map", ".", "keys", "(", ")", ":", "c", "=", "name_class_map", "[", "n", "]", "setattr", "(", "obj", ",", "(", "n", "+", "suffix", ")", ",", "c", ".", "_VALUES_TO_NAMES", "[", "getattr", "(", "obj", ",", "n", ")", "]", ")", "return", "obj" ]
relying on todds thrift-546 patch .
train
false
42,665
def list_updates(software=True, drivers=False, summary=False, skip_installed=True, categories=None, severities=None, download=False, install=False): salt.utils.warn_until('Fluorine', "This function is replaced by 'list' as of Salt Nitrogen. Thiswarning will be removed in Salt Fluorine.") return list(software, drivers, summary, skip_installed, categories, severities, download, install)
[ "def", "list_updates", "(", "software", "=", "True", ",", "drivers", "=", "False", ",", "summary", "=", "False", ",", "skip_installed", "=", "True", ",", "categories", "=", "None", ",", "severities", "=", "None", ",", "download", "=", "False", ",", "install", "=", "False", ")", ":", "salt", ".", "utils", ".", "warn_until", "(", "'Fluorine'", ",", "\"This function is replaced by 'list' as of Salt Nitrogen. Thiswarning will be removed in Salt Fluorine.\"", ")", "return", "list", "(", "software", ",", "drivers", ",", "summary", ",", "skip_installed", ",", "categories", ",", "severities", ",", "download", ",", "install", ")" ]
returns a summary of available updates .
train
false
42,666
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): if (not zipfile.is_zipfile(filename)): raise UnrecognizedFormat(('%s is not a zip file' % (filename,))) z = zipfile.ZipFile(filename) try: for info in z.infolist(): name = info.filename if (name.startswith('/') or ('..' in name)): continue target = os.path.join(extract_dir, *name.split('/')) target = progress_filter(name, target) if (not target): continue if name.endswith('/'): ensure_directory(target) else: ensure_directory(target) data = z.read(info.filename) f = open(target, 'wb') try: f.write(data) finally: f.close() del data finally: z.close()
[ "def", "unpack_zipfile", "(", "filename", ",", "extract_dir", ",", "progress_filter", "=", "default_filter", ")", ":", "if", "(", "not", "zipfile", ".", "is_zipfile", "(", "filename", ")", ")", ":", "raise", "UnrecognizedFormat", "(", "(", "'%s is not a zip file'", "%", "(", "filename", ",", ")", ")", ")", "z", "=", "zipfile", ".", "ZipFile", "(", "filename", ")", "try", ":", "for", "info", "in", "z", ".", "infolist", "(", ")", ":", "name", "=", "info", ".", "filename", "if", "(", "name", ".", "startswith", "(", "'/'", ")", "or", "(", "'..'", "in", "name", ")", ")", ":", "continue", "target", "=", "os", ".", "path", ".", "join", "(", "extract_dir", ",", "*", "name", ".", "split", "(", "'/'", ")", ")", "target", "=", "progress_filter", "(", "name", ",", "target", ")", "if", "(", "not", "target", ")", ":", "continue", "if", "name", ".", "endswith", "(", "'/'", ")", ":", "ensure_directory", "(", "target", ")", "else", ":", "ensure_directory", "(", "target", ")", "data", "=", "z", ".", "read", "(", "info", ".", "filename", ")", "f", "=", "open", "(", "target", ",", "'wb'", ")", "try", ":", "f", ".", "write", "(", "data", ")", "finally", ":", "f", ".", "close", "(", ")", "del", "data", "finally", ":", "z", ".", "close", "(", ")" ]
unpack a zipfile .
train
true
42,669
def get_ohloh_api_request(url, api_key, params=None): parameters = {u'api_key': api_key} if (params is not None): for (key, value) in params.items(): parameters[key] = value xml = urlopen((u'%s?%s' % (url, urlencode(parameters)))) tree = ElementTree.parse(xml) error = tree.getroot().find(u'error') if (error is not None): raise Exception(ElementTree.tostring(error)) return tree
[ "def", "get_ohloh_api_request", "(", "url", ",", "api_key", ",", "params", "=", "None", ")", ":", "parameters", "=", "{", "u'api_key'", ":", "api_key", "}", "if", "(", "params", "is", "not", "None", ")", ":", "for", "(", "key", ",", "value", ")", "in", "params", ".", "items", "(", ")", ":", "parameters", "[", "key", "]", "=", "value", "xml", "=", "urlopen", "(", "(", "u'%s?%s'", "%", "(", "url", ",", "urlencode", "(", "parameters", ")", ")", ")", ")", "tree", "=", "ElementTree", ".", "parse", "(", "xml", ")", "error", "=", "tree", ".", "getroot", "(", ")", ".", "find", "(", "u'error'", ")", "if", "(", "error", "is", "not", "None", ")", ":", "raise", "Exception", "(", "ElementTree", ".", "tostring", "(", "error", ")", ")", "return", "tree" ]
sends an api request to ohloh and returns the resulting xml tree or raises an exception if an error occurred .
train
false
42,670
def set_kernel_var(): Popen(['sysctl', '-w', 'net.ipv4.conf.all.route_localnet=1'], stdout=DN, stderr=PIPE)
[ "def", "set_kernel_var", "(", ")", ":", "Popen", "(", "[", "'sysctl'", ",", "'-w'", ",", "'net.ipv4.conf.all.route_localnet=1'", "]", ",", "stdout", "=", "DN", ",", "stderr", "=", "PIPE", ")" ]
set kernel variables .
train
false
42,671
def test_imap_folder_run_state_always_true(db, default_account): create_foldersyncstatuses(db, default_account) for folderstatus in default_account.foldersyncstatuses: assert (folderstatus.sync_should_run is True)
[ "def", "test_imap_folder_run_state_always_true", "(", "db", ",", "default_account", ")", ":", "create_foldersyncstatuses", "(", "db", ",", "default_account", ")", "for", "folderstatus", "in", "default_account", ".", "foldersyncstatuses", ":", "assert", "(", "folderstatus", ".", "sync_should_run", "is", "True", ")" ]
test that for an imap account .
train
false
42,675
def pad_image(img, padding_image_height, padding_image_width): src_width = img.size[0] src_height = img.size[1] if (padding_image_width < src_width): raise ValueError(('Source image width %d is greater than padding width %d' % (src_width, padding_image_width))) if (padding_image_height < src_height): raise ValueError(('Source image height %d is greater than padding height %d' % (src_height, padding_image_height))) padded_img = PIL.Image.new(img.mode, (padding_image_width, padding_image_height), 'black') padded_img.paste(img, (0, 0)) return padded_img
[ "def", "pad_image", "(", "img", ",", "padding_image_height", ",", "padding_image_width", ")", ":", "src_width", "=", "img", ".", "size", "[", "0", "]", "src_height", "=", "img", ".", "size", "[", "1", "]", "if", "(", "padding_image_width", "<", "src_width", ")", ":", "raise", "ValueError", "(", "(", "'Source image width %d is greater than padding width %d'", "%", "(", "src_width", ",", "padding_image_width", ")", ")", ")", "if", "(", "padding_image_height", "<", "src_height", ")", ":", "raise", "ValueError", "(", "(", "'Source image height %d is greater than padding height %d'", "%", "(", "src_height", ",", "padding_image_height", ")", ")", ")", "padded_img", "=", "PIL", ".", "Image", ".", "new", "(", "img", ".", "mode", ",", "(", "padding_image_width", ",", "padding_image_height", ")", ",", "'black'", ")", "padded_img", ".", "paste", "(", "img", ",", "(", "0", ",", "0", ")", ")", "return", "padded_img" ]
pad a single image to the specified dimensions .
train
false
42,676
def compositepi(n): n = int(n) if (n < 4): return 0 return ((n - primepi(n)) - 1)
[ "def", "compositepi", "(", "n", ")", ":", "n", "=", "int", "(", "n", ")", "if", "(", "n", "<", "4", ")", ":", "return", "0", "return", "(", "(", "n", "-", "primepi", "(", "n", ")", ")", "-", "1", ")" ]
return the number of positive composite numbers less than or equal to n .
train
false
42,677
def _tgrep_bind_node_label_action(_s, _l, tokens): if (len(tokens) == 1): return tokens[0] else: assert (len(tokens) == 3) assert (tokens[1] == u'=') node_pred = tokens[0] node_label = tokens[2] def node_label_bind_pred(n, m=None, l=None): if node_pred(n, m, l): if (l is None): raise TgrepException(u'cannot bind node_label {0}: label_dict is None'.format(node_label)) l[node_label] = n return True else: return False return node_label_bind_pred
[ "def", "_tgrep_bind_node_label_action", "(", "_s", ",", "_l", ",", "tokens", ")", ":", "if", "(", "len", "(", "tokens", ")", "==", "1", ")", ":", "return", "tokens", "[", "0", "]", "else", ":", "assert", "(", "len", "(", "tokens", ")", "==", "3", ")", "assert", "(", "tokens", "[", "1", "]", "==", "u'='", ")", "node_pred", "=", "tokens", "[", "0", "]", "node_label", "=", "tokens", "[", "2", "]", "def", "node_label_bind_pred", "(", "n", ",", "m", "=", "None", ",", "l", "=", "None", ")", ":", "if", "node_pred", "(", "n", ",", "m", ",", "l", ")", ":", "if", "(", "l", "is", "None", ")", ":", "raise", "TgrepException", "(", "u'cannot bind node_label {0}: label_dict is None'", ".", "format", "(", "node_label", ")", ")", "l", "[", "node_label", "]", "=", "n", "return", "True", "else", ":", "return", "False", "return", "node_label_bind_pred" ]
builds a lambda function representing a predicate on a tree node which can optionally bind a matching node into the tgrep2 strings label_dict .
train
false
42,678
def _is_vdev(name): if ((name in ('stripe', 'mirror', 'raidz', 'raidz1', 'raidz2', 'raidz3')) or re.search('^(mirror|raidz|raidz1|raidz2|raidz3)(-\\d+)?$', name)): return True return False
[ "def", "_is_vdev", "(", "name", ")", ":", "if", "(", "(", "name", "in", "(", "'stripe'", ",", "'mirror'", ",", "'raidz'", ",", "'raidz1'", ",", "'raidz2'", ",", "'raidz3'", ")", ")", "or", "re", ".", "search", "(", "'^(mirror|raidz|raidz1|raidz2|raidz3)(-\\\\d+)?$'", ",", "name", ")", ")", ":", "return", "True", "return", "False" ]
find out if a given name is a reserved word in zfs .
train
false
42,679
def time_zones_for_geographical_number(numobj): e164_num = format_number(numobj, PhoneNumberFormat.E164) if (not e164_num.startswith(U_PLUS)): raise Exception('Expect E164 number to start with +') for prefix_len in range(TIMEZONE_LONGEST_PREFIX, 0, (-1)): prefix = e164_num[1:(1 + prefix_len)] if (prefix in TIMEZONE_DATA): return TIMEZONE_DATA[prefix] return _UNKNOWN_TIME_ZONE_LIST
[ "def", "time_zones_for_geographical_number", "(", "numobj", ")", ":", "e164_num", "=", "format_number", "(", "numobj", ",", "PhoneNumberFormat", ".", "E164", ")", "if", "(", "not", "e164_num", ".", "startswith", "(", "U_PLUS", ")", ")", ":", "raise", "Exception", "(", "'Expect E164 number to start with +'", ")", "for", "prefix_len", "in", "range", "(", "TIMEZONE_LONGEST_PREFIX", ",", "0", ",", "(", "-", "1", ")", ")", ":", "prefix", "=", "e164_num", "[", "1", ":", "(", "1", "+", "prefix_len", ")", "]", "if", "(", "prefix", "in", "TIMEZONE_DATA", ")", ":", "return", "TIMEZONE_DATA", "[", "prefix", "]", "return", "_UNKNOWN_TIME_ZONE_LIST" ]
returns a list of time zones to which a phone number belongs .
train
true
42,680
def _main(): (options, args) = process_options() if options.doctest: import doctest return doctest.testmod() start_time = time.time() for path in args: if os.path.isdir(path): input_dir(path) else: input_file(path) elapsed = (time.time() - start_time) if options.statistics: print_statistics() if options.benchmark: print_benchmark(elapsed)
[ "def", "_main", "(", ")", ":", "(", "options", ",", "args", ")", "=", "process_options", "(", ")", "if", "options", ".", "doctest", ":", "import", "doctest", "return", "doctest", ".", "testmod", "(", ")", "start_time", "=", "time", ".", "time", "(", ")", "for", "path", "in", "args", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "input_dir", "(", "path", ")", "else", ":", "input_file", "(", "path", ")", "elapsed", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "if", "options", ".", "statistics", ":", "print_statistics", "(", ")", "if", "options", ".", "benchmark", ":", "print_benchmark", "(", "elapsed", ")" ]
display all information sysconfig detains .
train
true
42,681
def _setup_vmin_vmax(data, vmin, vmax, norm=False): if ((vmax is None) and (vmin is None)): vmax = np.abs(data).max() if norm: vmin = 0.0 else: vmin = (- vmax) else: if callable(vmin): vmin = vmin(data) elif (vmin is None): if norm: vmin = 0.0 else: vmin = np.min(data) if callable(vmax): vmax = vmax(data) elif (vmax is None): vmax = np.max(data) return (vmin, vmax)
[ "def", "_setup_vmin_vmax", "(", "data", ",", "vmin", ",", "vmax", ",", "norm", "=", "False", ")", ":", "if", "(", "(", "vmax", "is", "None", ")", "and", "(", "vmin", "is", "None", ")", ")", ":", "vmax", "=", "np", ".", "abs", "(", "data", ")", ".", "max", "(", ")", "if", "norm", ":", "vmin", "=", "0.0", "else", ":", "vmin", "=", "(", "-", "vmax", ")", "else", ":", "if", "callable", "(", "vmin", ")", ":", "vmin", "=", "vmin", "(", "data", ")", "elif", "(", "vmin", "is", "None", ")", ":", "if", "norm", ":", "vmin", "=", "0.0", "else", ":", "vmin", "=", "np", ".", "min", "(", "data", ")", "if", "callable", "(", "vmax", ")", ":", "vmax", "=", "vmax", "(", "data", ")", "elif", "(", "vmax", "is", "None", ")", ":", "vmax", "=", "np", ".", "max", "(", "data", ")", "return", "(", "vmin", ",", "vmax", ")" ]
aux function to handle vmin and vmax parameters .
train
false
42,682
def dense_orth(dim): from scipy import rand from scipy.linalg import orth return orth(rand(dim, dim))
[ "def", "dense_orth", "(", "dim", ")", ":", "from", "scipy", "import", "rand", "from", "scipy", ".", "linalg", "import", "orth", "return", "orth", "(", "rand", "(", "dim", ",", "dim", ")", ")" ]
constructs a dense orthogonal matrix .
train
false
42,685
def get_tox_env_from_version(): version_info = sys.version_info[:2] try: return ACCEPTED_VERSIONS[version_info] except KeyError: raise EnvironmentError('Invalid Python version', version_info, 'Accepted versions are', sorted(ACCEPTED_VERSIONS.keys()))
[ "def", "get_tox_env_from_version", "(", ")", ":", "version_info", "=", "sys", ".", "version_info", "[", ":", "2", "]", "try", ":", "return", "ACCEPTED_VERSIONS", "[", "version_info", "]", "except", "KeyError", ":", "raise", "EnvironmentError", "(", "'Invalid Python version'", ",", "version_info", ",", "'Accepted versions are'", ",", "sorted", "(", "ACCEPTED_VERSIONS", ".", "keys", "(", ")", ")", ")" ]
get tox environment from the current python version .
train
false
42,688
def _check_setup_scripts(base_release_tag, changed_only=True): setup_scripts = [('scripts/%s' % item) for item in ['setup.sh', 'setup_gae.sh', 'install_third_party.sh', 'install_third_party.py']] changed_files = _git_diff_names_only(base_release_tag) changes_dict = {script: (script in changed_files) for script in setup_scripts} if changed_only: return {name: status for (name, status) in changes_dict.items() if status} else: return changes_dict
[ "def", "_check_setup_scripts", "(", "base_release_tag", ",", "changed_only", "=", "True", ")", ":", "setup_scripts", "=", "[", "(", "'scripts/%s'", "%", "item", ")", "for", "item", "in", "[", "'setup.sh'", ",", "'setup_gae.sh'", ",", "'install_third_party.sh'", ",", "'install_third_party.py'", "]", "]", "changed_files", "=", "_git_diff_names_only", "(", "base_release_tag", ")", "changes_dict", "=", "{", "script", ":", "(", "script", "in", "changed_files", ")", "for", "script", "in", "setup_scripts", "}", "if", "changed_only", ":", "return", "{", "name", ":", "status", "for", "(", "name", ",", "status", ")", "in", "changes_dict", ".", "items", "(", ")", "if", "status", "}", "else", ":", "return", "changes_dict" ]
check if setup scripts have changed .
train
false
42,689
def _TR56(rv, f, g, h, max, pow): def _f(rv): if (not (rv.is_Pow and (rv.base.func == f))): return rv if ((rv.exp < 0) == True): return rv if ((rv.exp > max) == True): return rv if (rv.exp == 2): return h((g(rv.base.args[0]) ** 2)) else: if (rv.exp == 4): e = 2 elif (not pow): if (rv.exp % 2): return rv e = (rv.exp // 2) else: p = perfect_power(rv.exp) if (not p): return rv e = (rv.exp // 2) return (h((g(rv.base.args[0]) ** 2)) ** e) return bottom_up(rv, _f)
[ "def", "_TR56", "(", "rv", ",", "f", ",", "g", ",", "h", ",", "max", ",", "pow", ")", ":", "def", "_f", "(", "rv", ")", ":", "if", "(", "not", "(", "rv", ".", "is_Pow", "and", "(", "rv", ".", "base", ".", "func", "==", "f", ")", ")", ")", ":", "return", "rv", "if", "(", "(", "rv", ".", "exp", "<", "0", ")", "==", "True", ")", ":", "return", "rv", "if", "(", "(", "rv", ".", "exp", ">", "max", ")", "==", "True", ")", ":", "return", "rv", "if", "(", "rv", ".", "exp", "==", "2", ")", ":", "return", "h", "(", "(", "g", "(", "rv", ".", "base", ".", "args", "[", "0", "]", ")", "**", "2", ")", ")", "else", ":", "if", "(", "rv", ".", "exp", "==", "4", ")", ":", "e", "=", "2", "elif", "(", "not", "pow", ")", ":", "if", "(", "rv", ".", "exp", "%", "2", ")", ":", "return", "rv", "e", "=", "(", "rv", ".", "exp", "//", "2", ")", "else", ":", "p", "=", "perfect_power", "(", "rv", ".", "exp", ")", "if", "(", "not", "p", ")", ":", "return", "rv", "e", "=", "(", "rv", ".", "exp", "//", "2", ")", "return", "(", "h", "(", "(", "g", "(", "rv", ".", "base", ".", "args", "[", "0", "]", ")", "**", "2", ")", ")", "**", "e", ")", "return", "bottom_up", "(", "rv", ",", "_f", ")" ]
helper for tr5 and tr6 to replace f**2 with h options max : controls size of exponent that can appear on f e .
train
false
42,690
def set_color_formatter(logger=None, **kw): if (logger is None): logger = logging.getLogger() if (not logger.handlers): logging.basicConfig() format_msg = logger.handlers[0].formatter._fmt fmt = ColorFormatter(format_msg, **kw) fmt.colorfilters.append(xxx_cyan) logger.handlers[0].setFormatter(fmt)
[ "def", "set_color_formatter", "(", "logger", "=", "None", ",", "**", "kw", ")", ":", "if", "(", "logger", "is", "None", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "if", "(", "not", "logger", ".", "handlers", ")", ":", "logging", ".", "basicConfig", "(", ")", "format_msg", "=", "logger", ".", "handlers", "[", "0", "]", ".", "formatter", ".", "_fmt", "fmt", "=", "ColorFormatter", "(", "format_msg", ",", "**", "kw", ")", "fmt", ".", "colorfilters", ".", "append", "(", "xxx_cyan", ")", "logger", ".", "handlers", "[", "0", "]", ".", "setFormatter", "(", "fmt", ")" ]
install a color formatter on the logger .
train
false
42,692
def random_color_func(word=None, font_size=None, position=None, orientation=None, font_path=None, random_state=None): if (random_state is None): random_state = Random() return ('hsl(%d, 80%%, 50%%)' % random_state.randint(0, 255))
[ "def", "random_color_func", "(", "word", "=", "None", ",", "font_size", "=", "None", ",", "position", "=", "None", ",", "orientation", "=", "None", ",", "font_path", "=", "None", ",", "random_state", "=", "None", ")", ":", "if", "(", "random_state", "is", "None", ")", ":", "random_state", "=", "Random", "(", ")", "return", "(", "'hsl(%d, 80%%, 50%%)'", "%", "random_state", ".", "randint", "(", "0", ",", "255", ")", ")" ]
random hue color generation .
train
true
42,693
def get_connector_properties(root_helper, my_ip, multipath, enforce_multipath, host=None): props = {} props['ip'] = my_ip props['host'] = host iscsi = ISCSIConnector('') props['initiator'] = iscsi.get_initiator() props['wwpns'] = ['100010604b019419'] props['wwnns'] = ['200010604b019419'] props['multipath'] = multipath props['platform'] = 'x86_64' props['os_type'] = 'linux2' return props
[ "def", "get_connector_properties", "(", "root_helper", ",", "my_ip", ",", "multipath", ",", "enforce_multipath", ",", "host", "=", "None", ")", ":", "props", "=", "{", "}", "props", "[", "'ip'", "]", "=", "my_ip", "props", "[", "'host'", "]", "=", "host", "iscsi", "=", "ISCSIConnector", "(", "''", ")", "props", "[", "'initiator'", "]", "=", "iscsi", ".", "get_initiator", "(", ")", "props", "[", "'wwpns'", "]", "=", "[", "'100010604b019419'", "]", "props", "[", "'wwnns'", "]", "=", "[", "'200010604b019419'", "]", "props", "[", "'multipath'", "]", "=", "multipath", "props", "[", "'platform'", "]", "=", "'x86_64'", "props", "[", "'os_type'", "]", "=", "'linux2'", "return", "props" ]
fake os-brick .
train
false
42,694
def rmse(x1, x2, axis=0): x1 = np.asanyarray(x1) x2 = np.asanyarray(x2) return np.sqrt(mse(x1, x2, axis=axis))
[ "def", "rmse", "(", "x1", ",", "x2", ",", "axis", "=", "0", ")", ":", "x1", "=", "np", ".", "asanyarray", "(", "x1", ")", "x2", "=", "np", ".", "asanyarray", "(", "x2", ")", "return", "np", ".", "sqrt", "(", "mse", "(", "x1", ",", "x2", ",", "axis", "=", "axis", ")", ")" ]
computes the root mean squared error .
train
false
42,696
def _generate_email(user): return '{0}@ckan.org'.format(user.name).lower()
[ "def", "_generate_email", "(", "user", ")", ":", "return", "'{0}@ckan.org'", ".", "format", "(", "user", ".", "name", ")", ".", "lower", "(", ")" ]
return an email address for the given user factory stub object .
train
false
42,699
def deduce_command(): return (None or check_command(u'powerline') or check_command(os.path.join(POWERLINE_ROOT, u'scripts', u'powerline')) or ((which(u'sh') and which(u'sed') and which(u'socat')) and check_command(os.path.join(POWERLINE_ROOT, u'client', u'powerline.sh'))) or check_command(os.path.join(POWERLINE_ROOT, u'client', u'powerline.py')) or check_command(u'powerline-render') or check_command(os.path.join(POWERLINE_ROOT, u'scripts', u'powerline-render')))
[ "def", "deduce_command", "(", ")", ":", "return", "(", "None", "or", "check_command", "(", "u'powerline'", ")", "or", "check_command", "(", "os", ".", "path", ".", "join", "(", "POWERLINE_ROOT", ",", "u'scripts'", ",", "u'powerline'", ")", ")", "or", "(", "(", "which", "(", "u'sh'", ")", "and", "which", "(", "u'sed'", ")", "and", "which", "(", "u'socat'", ")", ")", "and", "check_command", "(", "os", ".", "path", ".", "join", "(", "POWERLINE_ROOT", ",", "u'client'", ",", "u'powerline.sh'", ")", ")", ")", "or", "check_command", "(", "os", ".", "path", ".", "join", "(", "POWERLINE_ROOT", ",", "u'client'", ",", "u'powerline.py'", ")", ")", "or", "check_command", "(", "u'powerline-render'", ")", "or", "check_command", "(", "os", ".", "path", ".", "join", "(", "POWERLINE_ROOT", ",", "u'scripts'", ",", "u'powerline-render'", ")", ")", ")" ]
deduce which command to use for powerline candidates: * powerline .
train
false
42,700
def StringEncoder(field_number, is_repeated, is_packed): tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) local_EncodeVarint = _EncodeVarint local_len = len assert (not is_packed) if is_repeated: def EncodeRepeatedField(write, value): for element in value: encoded = element.encode('utf-8') write(tag) local_EncodeVarint(write, local_len(encoded)) write(encoded) return EncodeRepeatedField else: def EncodeField(write, value): encoded = value.encode('utf-8') write(tag) local_EncodeVarint(write, local_len(encoded)) return write(encoded) return EncodeField
[ "def", "StringEncoder", "(", "field_number", ",", "is_repeated", ",", "is_packed", ")", ":", "tag", "=", "TagBytes", "(", "field_number", ",", "wire_format", ".", "WIRETYPE_LENGTH_DELIMITED", ")", "local_EncodeVarint", "=", "_EncodeVarint", "local_len", "=", "len", "assert", "(", "not", "is_packed", ")", "if", "is_repeated", ":", "def", "EncodeRepeatedField", "(", "write", ",", "value", ")", ":", "for", "element", "in", "value", ":", "encoded", "=", "element", ".", "encode", "(", "'utf-8'", ")", "write", "(", "tag", ")", "local_EncodeVarint", "(", "write", ",", "local_len", "(", "encoded", ")", ")", "write", "(", "encoded", ")", "return", "EncodeRepeatedField", "else", ":", "def", "EncodeField", "(", "write", ",", "value", ")", ":", "encoded", "=", "value", ".", "encode", "(", "'utf-8'", ")", "write", "(", "tag", ")", "local_EncodeVarint", "(", "write", ",", "local_len", "(", "encoded", ")", ")", "return", "write", "(", "encoded", ")", "return", "EncodeField" ]
returns an encoder for a string field .
train
true
42,701
def kotanchek(data): return (exp((- ((data[0] - 1) ** 2))) / (3.2 + ((data[1] - 2.5) ** 2)))
[ "def", "kotanchek", "(", "data", ")", ":", "return", "(", "exp", "(", "(", "-", "(", "(", "data", "[", "0", "]", "-", "1", ")", "**", "2", ")", ")", ")", "/", "(", "3.2", "+", "(", "(", "data", "[", "1", "]", "-", "2.5", ")", "**", "2", ")", ")", ")" ]
kotanchek benchmark function .
train
false
42,702
def _search_keys(text, keyserver, user=None): gpg = _create_gpg(user) if keyserver: _keys = gpg.search_keys(text, keyserver) else: _keys = gpg.search_keys(text) return _keys
[ "def", "_search_keys", "(", "text", ",", "keyserver", ",", "user", "=", "None", ")", ":", "gpg", "=", "_create_gpg", "(", "user", ")", "if", "keyserver", ":", "_keys", "=", "gpg", ".", "search_keys", "(", "text", ",", "keyserver", ")", "else", ":", "_keys", "=", "gpg", ".", "search_keys", "(", "text", ")", "return", "_keys" ]
helper function for searching keys from keyserver .
train
true
42,705
def embed(**kwargs): config = kwargs.get('config') header = kwargs.pop('header', u'') compile_flags = kwargs.pop('compile_flags', None) if (config is None): config = load_default_config() config.InteractiveShellEmbed = config.TerminalInteractiveShell kwargs['config'] = config ps1 = None ps2 = None try: ps1 = sys.ps1 ps2 = sys.ps2 except AttributeError: pass saved_shell_instance = InteractiveShell._instance if (saved_shell_instance is not None): cls = type(saved_shell_instance) cls.clear_instance() frame = sys._getframe(1) shell = InteractiveShellEmbed.instance(_call_location_id=('%s:%s' % (frame.f_code.co_filename, frame.f_lineno)), **kwargs) shell(header=header, stack_depth=2, compile_flags=compile_flags) InteractiveShellEmbed.clear_instance() if (saved_shell_instance is not None): cls = type(saved_shell_instance) cls.clear_instance() for subclass in cls._walk_mro(): subclass._instance = saved_shell_instance if (ps1 is not None): sys.ps1 = ps1 sys.ps2 = ps2
[ "def", "embed", "(", "**", "kwargs", ")", ":", "config", "=", "kwargs", ".", "get", "(", "'config'", ")", "header", "=", "kwargs", ".", "pop", "(", "'header'", ",", "u''", ")", "compile_flags", "=", "kwargs", ".", "pop", "(", "'compile_flags'", ",", "None", ")", "if", "(", "config", "is", "None", ")", ":", "config", "=", "load_default_config", "(", ")", "config", ".", "InteractiveShellEmbed", "=", "config", ".", "TerminalInteractiveShell", "kwargs", "[", "'config'", "]", "=", "config", "ps1", "=", "None", "ps2", "=", "None", "try", ":", "ps1", "=", "sys", ".", "ps1", "ps2", "=", "sys", ".", "ps2", "except", "AttributeError", ":", "pass", "saved_shell_instance", "=", "InteractiveShell", ".", "_instance", "if", "(", "saved_shell_instance", "is", "not", "None", ")", ":", "cls", "=", "type", "(", "saved_shell_instance", ")", "cls", ".", "clear_instance", "(", ")", "frame", "=", "sys", ".", "_getframe", "(", "1", ")", "shell", "=", "InteractiveShellEmbed", ".", "instance", "(", "_call_location_id", "=", "(", "'%s:%s'", "%", "(", "frame", ".", "f_code", ".", "co_filename", ",", "frame", ".", "f_lineno", ")", ")", ",", "**", "kwargs", ")", "shell", "(", "header", "=", "header", ",", "stack_depth", "=", "2", ",", "compile_flags", "=", "compile_flags", ")", "InteractiveShellEmbed", ".", "clear_instance", "(", ")", "if", "(", "saved_shell_instance", "is", "not", "None", ")", ":", "cls", "=", "type", "(", "saved_shell_instance", ")", "cls", ".", "clear_instance", "(", ")", "for", "subclass", "in", "cls", ".", "_walk_mro", "(", ")", ":", "subclass", ".", "_instance", "=", "saved_shell_instance", "if", "(", "ps1", "is", "not", "None", ")", ":", "sys", ".", "ps1", "=", "ps1", "sys", ".", "ps2", "=", "ps2" ]
embeds existing plotly figure in ipython notebook plotly uniquely identifies figures with a file_owner/file_id pair .
train
false
42,708
def get_proxy_bypass(network_service='Ethernet'): if (__grains__['os'] == 'Windows'): reg_val = __salt__['reg.read_value']('HKEY_CURRENT_USER', 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings', 'ProxyOverride') bypass_servers = reg_val['vdata'].replace('<local>', '').split(';') return bypass_servers out = __salt__['cmd.run']('networksetup -getproxybypassdomains {0}'.format(network_service)) return out.split('\n')
[ "def", "get_proxy_bypass", "(", "network_service", "=", "'Ethernet'", ")", ":", "if", "(", "__grains__", "[", "'os'", "]", "==", "'Windows'", ")", ":", "reg_val", "=", "__salt__", "[", "'reg.read_value'", "]", "(", "'HKEY_CURRENT_USER'", ",", "'SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet Settings'", ",", "'ProxyOverride'", ")", "bypass_servers", "=", "reg_val", "[", "'vdata'", "]", ".", "replace", "(", "'<local>'", ",", "''", ")", ".", "split", "(", "';'", ")", "return", "bypass_servers", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'networksetup -getproxybypassdomains {0}'", ".", "format", "(", "network_service", ")", ")", "return", "out", ".", "split", "(", "'\\n'", ")" ]
returns the current domains that can bypass the proxy network_service the network service to get the bypass domains from .
train
false
42,709
def cache_hostinfo(environ): hostinfo = {} if (environ.get('HTTPS') or (environ.get('wsgi.url_scheme') == 'https') or (environ.get('HTTP_X_FORWARDED_PROTO') == 'https')): hostinfo['protocol'] = 'https' else: hostinfo['protocol'] = 'http' if environ.get('HTTP_X_FORWARDED_HOST'): hostinfo['host'] = environ['HTTP_X_FORWARDED_HOST'] elif environ.get('HTTP_HOST'): hostinfo['host'] = environ['HTTP_HOST'] else: hostinfo['host'] = environ['SERVER_NAME'] if (environ.get('wsgi.url_scheme') == 'https'): if (environ['SERVER_PORT'] != '443'): hostinfo['host'] += (':' + environ['SERVER_PORT']) elif (environ['SERVER_PORT'] != '80'): hostinfo['host'] += (':' + environ['SERVER_PORT']) environ['routes.cached_hostinfo'] = hostinfo return hostinfo
[ "def", "cache_hostinfo", "(", "environ", ")", ":", "hostinfo", "=", "{", "}", "if", "(", "environ", ".", "get", "(", "'HTTPS'", ")", "or", "(", "environ", ".", "get", "(", "'wsgi.url_scheme'", ")", "==", "'https'", ")", "or", "(", "environ", ".", "get", "(", "'HTTP_X_FORWARDED_PROTO'", ")", "==", "'https'", ")", ")", ":", "hostinfo", "[", "'protocol'", "]", "=", "'https'", "else", ":", "hostinfo", "[", "'protocol'", "]", "=", "'http'", "if", "environ", ".", "get", "(", "'HTTP_X_FORWARDED_HOST'", ")", ":", "hostinfo", "[", "'host'", "]", "=", "environ", "[", "'HTTP_X_FORWARDED_HOST'", "]", "elif", "environ", ".", "get", "(", "'HTTP_HOST'", ")", ":", "hostinfo", "[", "'host'", "]", "=", "environ", "[", "'HTTP_HOST'", "]", "else", ":", "hostinfo", "[", "'host'", "]", "=", "environ", "[", "'SERVER_NAME'", "]", "if", "(", "environ", ".", "get", "(", "'wsgi.url_scheme'", ")", "==", "'https'", ")", ":", "if", "(", "environ", "[", "'SERVER_PORT'", "]", "!=", "'443'", ")", ":", "hostinfo", "[", "'host'", "]", "+=", "(", "':'", "+", "environ", "[", "'SERVER_PORT'", "]", ")", "elif", "(", "environ", "[", "'SERVER_PORT'", "]", "!=", "'80'", ")", ":", "hostinfo", "[", "'host'", "]", "+=", "(", "':'", "+", "environ", "[", "'SERVER_PORT'", "]", ")", "environ", "[", "'routes.cached_hostinfo'", "]", "=", "hostinfo", "return", "hostinfo" ]
processes the host information and stores a copy this work was previously done but wasnt stored in environ .
train
false
42,711
@singledispatch def assert_student_view_invalid_html(block, html): assert False, 'student_view should produce valid html'
[ "@", "singledispatch", "def", "assert_student_view_invalid_html", "(", "block", ",", "html", ")", ":", "assert", "False", ",", "'student_view should produce valid html'" ]
asserts that the html generated by the student_view view is correct for the supplied block .
train
false
42,712
@with_setup(step_runner_environ) def test_doesnt_ignore_case(): f = Feature.from_string(FEATURE3) feature_result = f.run(ignore_case=False) scenario_result = feature_result.scenario_results[0] assert_equals(len(scenario_result.steps_passed), 1) assert_equals(len(scenario_result.steps_undefined), 2) assert_equals(scenario_result.total_steps, 3) assert (not all([s.has_definition for s in scenario_result.scenario.steps]))
[ "@", "with_setup", "(", "step_runner_environ", ")", "def", "test_doesnt_ignore_case", "(", ")", ":", "f", "=", "Feature", ".", "from_string", "(", "FEATURE3", ")", "feature_result", "=", "f", ".", "run", "(", "ignore_case", "=", "False", ")", "scenario_result", "=", "feature_result", ".", "scenario_results", "[", "0", "]", "assert_equals", "(", "len", "(", "scenario_result", ".", "steps_passed", ")", ",", "1", ")", "assert_equals", "(", "len", "(", "scenario_result", ".", "steps_undefined", ")", ",", "2", ")", "assert_equals", "(", "scenario_result", ".", "total_steps", ",", "3", ")", "assert", "(", "not", "all", "(", "[", "s", ".", "has_definition", "for", "s", "in", "scenario_result", ".", "scenario", ".", "steps", "]", ")", ")" ]
lettuce can .
train
false
42,716
@block_user_agents @require_GET def documents(request, tag=None): tag_obj = None if tag: matching_tags = get_list_or_404(DocumentTag, name__iexact=tag) for matching_tag in matching_tags: if (matching_tag.name.lower() == tag.lower()): tag_obj = matching_tag break docs = Document.objects.filter_for_list(locale=request.LANGUAGE_CODE, tag=tag_obj) paginated_docs = paginate(request, docs, per_page=DOCUMENTS_PER_PAGE) context = {'documents': paginated_docs, 'count': docs.count(), 'tag': tag} return render(request, 'wiki/list/documents.html', context)
[ "@", "block_user_agents", "@", "require_GET", "def", "documents", "(", "request", ",", "tag", "=", "None", ")", ":", "tag_obj", "=", "None", "if", "tag", ":", "matching_tags", "=", "get_list_or_404", "(", "DocumentTag", ",", "name__iexact", "=", "tag", ")", "for", "matching_tag", "in", "matching_tags", ":", "if", "(", "matching_tag", ".", "name", ".", "lower", "(", ")", "==", "tag", ".", "lower", "(", ")", ")", ":", "tag_obj", "=", "matching_tag", "break", "docs", "=", "Document", ".", "objects", ".", "filter_for_list", "(", "locale", "=", "request", ".", "LANGUAGE_CODE", ",", "tag", "=", "tag_obj", ")", "paginated_docs", "=", "paginate", "(", "request", ",", "docs", ",", "per_page", "=", "DOCUMENTS_PER_PAGE", ")", "context", "=", "{", "'documents'", ":", "paginated_docs", ",", "'count'", ":", "docs", ".", "count", "(", ")", ",", "'tag'", ":", "tag", "}", "return", "render", "(", "request", ",", "'wiki/list/documents.html'", ",", "context", ")" ]
list wiki documents depending on the optionally given tag .
train
false
42,718
def test_post_save(topic, user): post = Post(content='Test Content') post.save(topic=topic, user=user) assert (post.content == 'Test Content') post.content = 'Test Edit Content' post.save() assert (post.content == 'Test Edit Content') assert (topic.user.post_count == 2) assert (topic.post_count == 2) assert (topic.last_post == post) assert (topic.forum.post_count == 2)
[ "def", "test_post_save", "(", "topic", ",", "user", ")", ":", "post", "=", "Post", "(", "content", "=", "'Test Content'", ")", "post", ".", "save", "(", "topic", "=", "topic", ",", "user", "=", "user", ")", "assert", "(", "post", ".", "content", "==", "'Test Content'", ")", "post", ".", "content", "=", "'Test Edit Content'", "post", ".", "save", "(", ")", "assert", "(", "post", ".", "content", "==", "'Test Edit Content'", ")", "assert", "(", "topic", ".", "user", ".", "post_count", "==", "2", ")", "assert", "(", "topic", ".", "post_count", "==", "2", ")", "assert", "(", "topic", ".", "last_post", "==", "post", ")", "assert", "(", "topic", ".", "forum", ".", "post_count", "==", "2", ")" ]
tests the save post method .
train
false
42,719
def get_user_pubkeys(users): if (not isinstance(users, list)): return {'Error': 'A list of users is expected'} ret = {} for user in users: key_ids = [] if isinstance(user, dict): tmp_user = next(six.iterkeys(user)) key_ids = user[tmp_user] user = tmp_user url = 'https://api.github.com/users/{0}/keys'.format(user) result = salt.utils.http.query(url, 'GET', decode=False, text=True) keys = json.loads(result['text']) ret[user] = {} for key in keys: if (len(key_ids) > 0): if (str(key['id']) in key_ids): ret[user][key['id']] = key['key'] else: ret[user][key['id']] = key['key'] return ret
[ "def", "get_user_pubkeys", "(", "users", ")", ":", "if", "(", "not", "isinstance", "(", "users", ",", "list", ")", ")", ":", "return", "{", "'Error'", ":", "'A list of users is expected'", "}", "ret", "=", "{", "}", "for", "user", "in", "users", ":", "key_ids", "=", "[", "]", "if", "isinstance", "(", "user", ",", "dict", ")", ":", "tmp_user", "=", "next", "(", "six", ".", "iterkeys", "(", "user", ")", ")", "key_ids", "=", "user", "[", "tmp_user", "]", "user", "=", "tmp_user", "url", "=", "'https://api.github.com/users/{0}/keys'", ".", "format", "(", "user", ")", "result", "=", "salt", ".", "utils", ".", "http", ".", "query", "(", "url", ",", "'GET'", ",", "decode", "=", "False", ",", "text", "=", "True", ")", "keys", "=", "json", ".", "loads", "(", "result", "[", "'text'", "]", ")", "ret", "[", "user", "]", "=", "{", "}", "for", "key", "in", "keys", ":", "if", "(", "len", "(", "key_ids", ")", ">", "0", ")", ":", "if", "(", "str", "(", "key", "[", "'id'", "]", ")", "in", "key_ids", ")", ":", "ret", "[", "user", "]", "[", "key", "[", "'id'", "]", "]", "=", "key", "[", "'key'", "]", "else", ":", "ret", "[", "user", "]", "[", "key", "[", "'id'", "]", "]", "=", "key", "[", "'key'", "]", "return", "ret" ]
retrieve a set of public keys from github for the specified list of users .
train
true
42,720
def gf_from_int_poly(f, p): return gf_trunc(f, p)
[ "def", "gf_from_int_poly", "(", "f", ",", "p", ")", ":", "return", "gf_trunc", "(", "f", ",", "p", ")" ]
create a gf(p)[x] polynomial from z[x] .
train
false
42,724
def latestVersion(): return availableVersions()[0]
[ "def", "latestVersion", "(", ")", ":", "return", "availableVersions", "(", ")", "[", "0", "]" ]
returns the most recent version available on github .
train
false
42,725
def storeNewTicket(masterKey, ticket, bridge): assert (len(masterKey) == const.MASTER_KEY_LENGTH) assert (len(ticket) == const.TICKET_LENGTH) ticketFile = (const.STATE_LOCATION + const.CLIENT_TICKET_FILE) log.debug(("Storing newly received ticket in `%s'." % ticketFile)) tickets = dict() content = util.readFromFile(ticketFile) if ((content is not None) and (len(content) > 0)): tickets = yaml.safe_load(content) tickets[str(bridge)] = [int(time.time()), masterKey, ticket] util.writeToFile(yaml.dump(tickets), ticketFile)
[ "def", "storeNewTicket", "(", "masterKey", ",", "ticket", ",", "bridge", ")", ":", "assert", "(", "len", "(", "masterKey", ")", "==", "const", ".", "MASTER_KEY_LENGTH", ")", "assert", "(", "len", "(", "ticket", ")", "==", "const", ".", "TICKET_LENGTH", ")", "ticketFile", "=", "(", "const", ".", "STATE_LOCATION", "+", "const", ".", "CLIENT_TICKET_FILE", ")", "log", ".", "debug", "(", "(", "\"Storing newly received ticket in `%s'.\"", "%", "ticketFile", ")", ")", "tickets", "=", "dict", "(", ")", "content", "=", "util", ".", "readFromFile", "(", "ticketFile", ")", "if", "(", "(", "content", "is", "not", "None", ")", "and", "(", "len", "(", "content", ")", ">", "0", ")", ")", ":", "tickets", "=", "yaml", ".", "safe_load", "(", "content", ")", "tickets", "[", "str", "(", "bridge", ")", "]", "=", "[", "int", "(", "time", ".", "time", "(", ")", ")", ",", "masterKey", ",", "ticket", "]", "util", ".", "writeToFile", "(", "yaml", ".", "dump", "(", "tickets", ")", ",", "ticketFile", ")" ]
store a new session ticket and the according master key for future use .
train
false
42,726
def last(seq): return tail(1, seq)[0]
[ "def", "last", "(", "seq", ")", ":", "return", "tail", "(", "1", ",", "seq", ")", "[", "0", "]" ]
returns the last item in a list .
train
false
42,728
def _bind_for_search(anonymous=False, opts=None): connargs = {} params = {'mandatory': ['uri', 'server', 'port', 'tls', 'no_verify', 'anonymous', 'accountattributename', 'activedirectory'], 'additional': ['binddn', 'bindpw', 'filter', 'groupclass', 'auth_by_group_membership_only']} paramvalues = {} for param in params['mandatory']: paramvalues[param] = _config(param, opts=opts) for param in params['additional']: paramvalues[param] = _config(param, mandatory=False, opts=opts) paramvalues['anonymous'] = anonymous if paramvalues['binddn']: connargs['binddn'] = paramvalues['binddn'] if paramvalues['bindpw']: params['mandatory'].append('bindpw') for name in params['mandatory']: connargs[name] = paramvalues[name] if (not paramvalues['anonymous']): if (paramvalues['binddn'] and paramvalues['bindpw']): return _LDAPConnection(**connargs).ldap
[ "def", "_bind_for_search", "(", "anonymous", "=", "False", ",", "opts", "=", "None", ")", ":", "connargs", "=", "{", "}", "params", "=", "{", "'mandatory'", ":", "[", "'uri'", ",", "'server'", ",", "'port'", ",", "'tls'", ",", "'no_verify'", ",", "'anonymous'", ",", "'accountattributename'", ",", "'activedirectory'", "]", ",", "'additional'", ":", "[", "'binddn'", ",", "'bindpw'", ",", "'filter'", ",", "'groupclass'", ",", "'auth_by_group_membership_only'", "]", "}", "paramvalues", "=", "{", "}", "for", "param", "in", "params", "[", "'mandatory'", "]", ":", "paramvalues", "[", "param", "]", "=", "_config", "(", "param", ",", "opts", "=", "opts", ")", "for", "param", "in", "params", "[", "'additional'", "]", ":", "paramvalues", "[", "param", "]", "=", "_config", "(", "param", ",", "mandatory", "=", "False", ",", "opts", "=", "opts", ")", "paramvalues", "[", "'anonymous'", "]", "=", "anonymous", "if", "paramvalues", "[", "'binddn'", "]", ":", "connargs", "[", "'binddn'", "]", "=", "paramvalues", "[", "'binddn'", "]", "if", "paramvalues", "[", "'bindpw'", "]", ":", "params", "[", "'mandatory'", "]", ".", "append", "(", "'bindpw'", ")", "for", "name", "in", "params", "[", "'mandatory'", "]", ":", "connargs", "[", "name", "]", "=", "paramvalues", "[", "name", "]", "if", "(", "not", "paramvalues", "[", "'anonymous'", "]", ")", ":", "if", "(", "paramvalues", "[", "'binddn'", "]", "and", "paramvalues", "[", "'bindpw'", "]", ")", ":", "return", "_LDAPConnection", "(", "**", "connargs", ")", ".", "ldap" ]
bind with binddn and bindpw only for searching ldap .
train
true
42,730
def filename_to_utf8(name): if isinstance(name, unicode): return name.encode('utf8') codec = ('cp1252' if iswindows else 'utf8') return name.decode(codec, 'replace').encode('utf8')
[ "def", "filename_to_utf8", "(", "name", ")", ":", "if", "isinstance", "(", "name", ",", "unicode", ")", ":", "return", "name", ".", "encode", "(", "'utf8'", ")", "codec", "=", "(", "'cp1252'", "if", "iswindows", "else", "'utf8'", ")", "return", "name", ".", "decode", "(", "codec", ",", "'replace'", ")", ".", "encode", "(", "'utf8'", ")" ]
return c{name} encoded in utf8 .
train
false
42,731
def rgw_pools_create(**kwargs): return ceph_cfg.rgw_pools_create(**kwargs)
[ "def", "rgw_pools_create", "(", "**", "kwargs", ")", ":", "return", "ceph_cfg", ".", "rgw_pools_create", "(", "**", "kwargs", ")" ]
create pools for rgw cli example: .
train
false
42,732
def purge_pending_jobs(event=None, site=None, queue=None): purged_task_count = 0 for queue in get_queue_list(queue): q = get_queue(queue) for job in q.jobs: if (site and event): if ((job.kwargs[u'site'] == site) and (job.kwargs[u'event'] == event)): job.delete() purged_task_count += 1 elif site: if (job.kwargs[u'site'] == site): job.delete() purged_task_count += 1 elif event: if (job.kwargs[u'event'] == event): job.delete() purged_task_count += 1 else: purged_task_count += q.count q.empty() return purged_task_count
[ "def", "purge_pending_jobs", "(", "event", "=", "None", ",", "site", "=", "None", ",", "queue", "=", "None", ")", ":", "purged_task_count", "=", "0", "for", "queue", "in", "get_queue_list", "(", "queue", ")", ":", "q", "=", "get_queue", "(", "queue", ")", "for", "job", "in", "q", ".", "jobs", ":", "if", "(", "site", "and", "event", ")", ":", "if", "(", "(", "job", ".", "kwargs", "[", "u'site'", "]", "==", "site", ")", "and", "(", "job", ".", "kwargs", "[", "u'event'", "]", "==", "event", ")", ")", ":", "job", ".", "delete", "(", ")", "purged_task_count", "+=", "1", "elif", "site", ":", "if", "(", "job", ".", "kwargs", "[", "u'site'", "]", "==", "site", ")", ":", "job", ".", "delete", "(", ")", "purged_task_count", "+=", "1", "elif", "event", ":", "if", "(", "job", ".", "kwargs", "[", "u'event'", "]", "==", "event", ")", ":", "job", ".", "delete", "(", ")", "purged_task_count", "+=", "1", "else", ":", "purged_task_count", "+=", "q", ".", "count", "q", ".", "empty", "(", ")", "return", "purged_task_count" ]
purge tasks of the event event type .
train
false
42,733
def setup_wizard_visible_panes(shop): return (len(load_setup_wizard_panes(shop)) > 0)
[ "def", "setup_wizard_visible_panes", "(", "shop", ")", ":", "return", "(", "len", "(", "load_setup_wizard_panes", "(", "shop", ")", ")", ">", "0", ")" ]
check if shop wizard has visible panes that require merchant configuration .
train
false
42,734
def delete_task_from_mem(task_id): logging.info("Deleting task '{0}' from memory.".format(task_id)) TASK_STATUS_LOCK.acquire(True) if (task_id in TASK_STATUS.keys()): del TASK_STATUS[task_id] TASK_STATUS_LOCK.release()
[ "def", "delete_task_from_mem", "(", "task_id", ")", ":", "logging", ".", "info", "(", "\"Deleting task '{0}' from memory.\"", ".", "format", "(", "task_id", ")", ")", "TASK_STATUS_LOCK", ".", "acquire", "(", "True", ")", "if", "(", "task_id", "in", "TASK_STATUS", ".", "keys", "(", ")", ")", ":", "del", "TASK_STATUS", "[", "task_id", "]", "TASK_STATUS_LOCK", ".", "release", "(", ")" ]
deletes a task and its status from memory .
train
false
42,735
@not_implemented_for('directed') @not_implemented_for('multigraph') def subgraph_centrality_exp(G): import scipy.linalg nodelist = list(G) A = nx.to_numpy_matrix(G, nodelist) A[(A != 0.0)] = 1 expA = scipy.linalg.expm(A.A) sc = dict(zip(nodelist, map(float, expA.diagonal()))) return sc
[ "@", "not_implemented_for", "(", "'directed'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "subgraph_centrality_exp", "(", "G", ")", ":", "import", "scipy", ".", "linalg", "nodelist", "=", "list", "(", "G", ")", "A", "=", "nx", ".", "to_numpy_matrix", "(", "G", ",", "nodelist", ")", "A", "[", "(", "A", "!=", "0.0", ")", "]", "=", "1", "expA", "=", "scipy", ".", "linalg", ".", "expm", "(", "A", ".", "A", ")", "sc", "=", "dict", "(", "zip", "(", "nodelist", ",", "map", "(", "float", ",", "expA", ".", "diagonal", "(", ")", ")", ")", ")", "return", "sc" ]
return the subgraph centrality for each node of g .
train
false
42,736
def register_contributor(request): return register(request, contributor=True)
[ "def", "register_contributor", "(", "request", ")", ":", "return", "register", "(", "request", ",", "contributor", "=", "True", ")" ]
register a new user from the superheroes page .
train
false
42,738
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialize a new vm cli example: .
train
false
42,739
def _is_small_course(course_key): is_small_course = False enrollment_count = CourseEnrollment.objects.num_enrolled_in(course_key) max_enrollment_for_buttons = settings.FEATURES.get('MAX_ENROLLMENT_INSTR_BUTTONS') if (max_enrollment_for_buttons is not None): is_small_course = (enrollment_count <= max_enrollment_for_buttons) return is_small_course
[ "def", "_is_small_course", "(", "course_key", ")", ":", "is_small_course", "=", "False", "enrollment_count", "=", "CourseEnrollment", ".", "objects", ".", "num_enrolled_in", "(", "course_key", ")", "max_enrollment_for_buttons", "=", "settings", ".", "FEATURES", ".", "get", "(", "'MAX_ENROLLMENT_INSTR_BUTTONS'", ")", "if", "(", "max_enrollment_for_buttons", "is", "not", "None", ")", ":", "is_small_course", "=", "(", "enrollment_count", "<=", "max_enrollment_for_buttons", ")", "return", "is_small_course" ]
compares against max_enrollment_instr_buttons to determine if course enrollment is considered small .
train
false
42,741
def RemoveSurroundingQuotes(text): if text: if ((text[0] == '"') and (text[(-1)] == '"')): text = text[1:(-1)] return text
[ "def", "RemoveSurroundingQuotes", "(", "text", ")", ":", "if", "text", ":", "if", "(", "(", "text", "[", "0", "]", "==", "'\"'", ")", "and", "(", "text", "[", "(", "-", "1", ")", "]", "==", "'\"'", ")", ")", ":", "text", "=", "text", "[", "1", ":", "(", "-", "1", ")", "]", "return", "text" ]
removes outer quotation marks .
train
false
42,744
def ofp_msg_from_jsondict(dp, jsondict): parser = dp.ofproto_parser assert (len(jsondict) == 1) for (k, v) in jsondict.items(): cls = getattr(parser, k) assert issubclass(cls, MsgBase) return cls.from_jsondict(v, datapath=dp)
[ "def", "ofp_msg_from_jsondict", "(", "dp", ",", "jsondict", ")", ":", "parser", "=", "dp", ".", "ofproto_parser", "assert", "(", "len", "(", "jsondict", ")", "==", "1", ")", "for", "(", "k", ",", "v", ")", "in", "jsondict", ".", "items", "(", ")", ":", "cls", "=", "getattr", "(", "parser", ",", "k", ")", "assert", "issubclass", "(", "cls", ",", "MsgBase", ")", "return", "cls", ".", "from_jsondict", "(", "v", ",", "datapath", "=", "dp", ")" ]
this function instanticates an appropriate openflow message class from the given json style dictionary .
train
true
42,745
def test_argument_eval_order(): x = [1] def noop(a, b, c): pass noop(x.append(2), x.append(3), x.append(4)) AreEqual(x, [1, 2, 3, 4])
[ "def", "test_argument_eval_order", "(", ")", ":", "x", "=", "[", "1", "]", "def", "noop", "(", "a", ",", "b", ",", "c", ")", ":", "pass", "noop", "(", "x", ".", "append", "(", "2", ")", ",", "x", ".", "append", "(", "3", ")", ",", "x", ".", "append", "(", "4", ")", ")", "AreEqual", "(", "x", ",", "[", "1", ",", "2", ",", "3", ",", "4", "]", ")" ]
check order of evaluation of function arguments .
train
false
42,746
def step2(system, X0=None, T=None, N=None, **kwargs): if isinstance(system, lti): sys = system._as_ss() elif isinstance(system, dlti): raise AttributeError('step2 can only be used with continuous-time systems.') else: sys = lti(*system)._as_ss() if (N is None): N = 100 if (T is None): T = _default_response_times(sys.A, N) else: T = asarray(T) U = ones(T.shape, sys.A.dtype) vals = lsim2(sys, U, T, X0=X0, **kwargs) return (vals[0], vals[1])
[ "def", "step2", "(", "system", ",", "X0", "=", "None", ",", "T", "=", "None", ",", "N", "=", "None", ",", "**", "kwargs", ")", ":", "if", "isinstance", "(", "system", ",", "lti", ")", ":", "sys", "=", "system", ".", "_as_ss", "(", ")", "elif", "isinstance", "(", "system", ",", "dlti", ")", ":", "raise", "AttributeError", "(", "'step2 can only be used with continuous-time systems.'", ")", "else", ":", "sys", "=", "lti", "(", "*", "system", ")", ".", "_as_ss", "(", ")", "if", "(", "N", "is", "None", ")", ":", "N", "=", "100", "if", "(", "T", "is", "None", ")", ":", "T", "=", "_default_response_times", "(", "sys", ".", "A", ",", "N", ")", "else", ":", "T", "=", "asarray", "(", "T", ")", "U", "=", "ones", "(", "T", ".", "shape", ",", "sys", ".", "A", ".", "dtype", ")", "vals", "=", "lsim2", "(", "sys", ",", "U", ",", "T", ",", "X0", "=", "X0", ",", "**", "kwargs", ")", "return", "(", "vals", "[", "0", "]", ",", "vals", "[", "1", "]", ")" ]
step response of continuous-time system .
train
false
42,747
def get_agent_service_types(): return _SERVICE_TYPES.keys()
[ "def", "get_agent_service_types", "(", ")", ":", "return", "_SERVICE_TYPES", ".", "keys", "(", ")" ]
get the sysservices types that can be configured .
train
false
42,748
def flagsimap2keywords(flagstring): imapflagset = set(flagstring[1:(-1)].split()) serverflagset = set([flag for (flag, c) in flagmap]) return (imapflagset - serverflagset)
[ "def", "flagsimap2keywords", "(", "flagstring", ")", ":", "imapflagset", "=", "set", "(", "flagstring", "[", "1", ":", "(", "-", "1", ")", "]", ".", "split", "(", ")", ")", "serverflagset", "=", "set", "(", "[", "flag", "for", "(", "flag", ",", "c", ")", "in", "flagmap", "]", ")", "return", "(", "imapflagset", "-", "serverflagset", ")" ]
convert string into a keyword set .
train
false
42,750
def test_raise_attrerror(): class A: def __getattr__(self, name): raise AttributeError, 'get outta here' def __repr__(self): return 'foo' class B: def __getattr__(self, name): raise AttributeError, 'get outta here' def __str__(self): return 'foo' AreEqual(str(A()), 'foo') AreEqual(repr(A()), 'foo') AreEqual(str(B()), 'foo') Assert((repr(B()).find('B instance') != (-1)))
[ "def", "test_raise_attrerror", "(", ")", ":", "class", "A", ":", "def", "__getattr__", "(", "self", ",", "name", ")", ":", "raise", "AttributeError", ",", "'get outta here'", "def", "__repr__", "(", "self", ")", ":", "return", "'foo'", "class", "B", ":", "def", "__getattr__", "(", "self", ",", "name", ")", ":", "raise", "AttributeError", ",", "'get outta here'", "def", "__str__", "(", "self", ")", ":", "return", "'foo'", "AreEqual", "(", "str", "(", "A", "(", ")", ")", ",", "'foo'", ")", "AreEqual", "(", "repr", "(", "A", "(", ")", ")", ",", "'foo'", ")", "AreEqual", "(", "str", "(", "B", "(", ")", ")", ",", "'foo'", ")", "Assert", "(", "(", "repr", "(", "B", "(", ")", ")", ".", "find", "(", "'B instance'", ")", "!=", "(", "-", "1", ")", ")", ")" ]
raising attributeerror from __getattr__ should be ok .
train
false
42,751
def test_cert_validation_sensitivity(monkeypatch): monkeypatch.setenv('AWS_REGION', 'us-east-1') for bn in SUBDOMAIN_OK: if ('.' not in bn): cinfo = calling_format.from_store_name(bn) assert (cinfo.calling_format == boto.s3.connection.SubdomainCallingFormat) else: assert ('.' in bn) cinfo = calling_format.from_store_name(bn) assert (cinfo.calling_format == connection.OrdinaryCallingFormat) assert (cinfo.region == 'us-east-1') assert (cinfo.ordinary_endpoint == 's3.amazonaws.com')
[ "def", "test_cert_validation_sensitivity", "(", "monkeypatch", ")", ":", "monkeypatch", ".", "setenv", "(", "'AWS_REGION'", ",", "'us-east-1'", ")", "for", "bn", "in", "SUBDOMAIN_OK", ":", "if", "(", "'.'", "not", "in", "bn", ")", ":", "cinfo", "=", "calling_format", ".", "from_store_name", "(", "bn", ")", "assert", "(", "cinfo", ".", "calling_format", "==", "boto", ".", "s3", ".", "connection", ".", "SubdomainCallingFormat", ")", "else", ":", "assert", "(", "'.'", "in", "bn", ")", "cinfo", "=", "calling_format", ".", "from_store_name", "(", "bn", ")", "assert", "(", "cinfo", ".", "calling_format", "==", "connection", ".", "OrdinaryCallingFormat", ")", "assert", "(", "cinfo", ".", "region", "==", "'us-east-1'", ")", "assert", "(", "cinfo", ".", "ordinary_endpoint", "==", "'s3.amazonaws.com'", ")" ]
test degradation of dotted bucket names to ordinarycallingformat although legal bucket names with subdomaincallingformat .
train
false
42,752
def test_difficult_univariate(): x = Normal('x', 0, 1) assert density((x ** 3)) assert density(exp((x ** 2))) assert density(log(x))
[ "def", "test_difficult_univariate", "(", ")", ":", "x", "=", "Normal", "(", "'x'", ",", "0", ",", "1", ")", "assert", "density", "(", "(", "x", "**", "3", ")", ")", "assert", "density", "(", "exp", "(", "(", "x", "**", "2", ")", ")", ")", "assert", "density", "(", "log", "(", "x", ")", ")" ]
since using solve in place of deltaintegrate were able to perform substantially more complex density computations on single continuous random variables .
train
false
42,754
def get_tours(index=None): return get_tour(index)
[ "def", "get_tours", "(", "index", "=", "None", ")", ":", "return", "get_tour", "(", "index", ")" ]
get the list of available tours .
train
false
42,756
def containsAtLeastOneWord(name, words): if isinstance(words, basestring): words = words.split(u',') items = [(re.compile((u'(^|[\\W_])%s($|[\\W_])' % re.escape(word.strip())), re.I), word.strip()) for word in words] for (regexp, word) in items: if regexp.search(name): return word return False
[ "def", "containsAtLeastOneWord", "(", "name", ",", "words", ")", ":", "if", "isinstance", "(", "words", ",", "basestring", ")", ":", "words", "=", "words", ".", "split", "(", "u','", ")", "items", "=", "[", "(", "re", ".", "compile", "(", "(", "u'(^|[\\\\W_])%s($|[\\\\W_])'", "%", "re", ".", "escape", "(", "word", ".", "strip", "(", ")", ")", ")", ",", "re", ".", "I", ")", ",", "word", ".", "strip", "(", ")", ")", "for", "word", "in", "words", "]", "for", "(", "regexp", ",", "word", ")", "in", "items", ":", "if", "regexp", ".", "search", "(", "name", ")", ":", "return", "word", "return", "False" ]
filters out results based on filter_words name: name to check words : string of words separated by a .
train
false
42,757
def oid_diff(git, oid, filename=None): args = [(oid + u'~'), oid] opts = common_diff_opts() _add_filename(args, filename) (status, out, err) = git.diff(*args, **opts) if (status != 0): args = [(oid + u'^!')] _add_filename(args, filename) (status, out, err) = git.show(pretty=u'format:', *args, **opts) out = out.lstrip() return out
[ "def", "oid_diff", "(", "git", ",", "oid", ",", "filename", "=", "None", ")", ":", "args", "=", "[", "(", "oid", "+", "u'~'", ")", ",", "oid", "]", "opts", "=", "common_diff_opts", "(", ")", "_add_filename", "(", "args", ",", "filename", ")", "(", "status", ",", "out", ",", "err", ")", "=", "git", ".", "diff", "(", "*", "args", ",", "**", "opts", ")", "if", "(", "status", "!=", "0", ")", ":", "args", "=", "[", "(", "oid", "+", "u'^!'", ")", "]", "_add_filename", "(", "args", ",", "filename", ")", "(", "status", ",", "out", ",", "err", ")", "=", "git", ".", "show", "(", "pretty", "=", "u'format:'", ",", "*", "args", ",", "**", "opts", ")", "out", "=", "out", ".", "lstrip", "(", ")", "return", "out" ]
return the diff for an oid .
train
false
42,758
def _invalidates_cache(f): def inner_func(self, *args, **kwargs): rv = f(self, *args, **kwargs) self._invalidate_cache() return rv return inner_func
[ "def", "_invalidates_cache", "(", "f", ")", ":", "def", "inner_func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "rv", "=", "f", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "self", ".", "_invalidate_cache", "(", ")", "return", "rv", "return", "inner_func" ]
decorator for rruleset methods which may invalidate the cached length .
train
true
42,759
def on_request_success(request_type, name, response_time, response_length): stats['content-length'] += response_length
[ "def", "on_request_success", "(", "request_type", ",", "name", ",", "response_time", ",", "response_length", ")", ":", "stats", "[", "'content-length'", "]", "+=", "response_length" ]
event handler that get triggered on every successful request .
train
false
42,761
def _find_keypair(cs, keypair): return utils.find_resource(cs.keypairs, keypair)
[ "def", "_find_keypair", "(", "cs", ",", "keypair", ")", ":", "return", "utils", ".", "find_resource", "(", "cs", ".", "keypairs", ",", "keypair", ")" ]
get a keypair by name .
train
false
42,762
def coalesce_options(options, types): out = {} for (key, value) in options.items(): if (key in types): out[key] = coalesce_option_value(value, types[key], key) else: out[key] = value return out
[ "def", "coalesce_options", "(", "options", ",", "types", ")", ":", "out", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "options", ".", "items", "(", ")", ":", "if", "(", "key", "in", "types", ")", ":", "out", "[", "key", "]", "=", "coalesce_option_value", "(", "value", ",", "types", "[", "key", "]", ",", "key", ")", "else", ":", "out", "[", "key", "]", "=", "value", "return", "out" ]
coalesce options dictionary according to types dictionary .
train
false
42,764
@receiver(SignalHandler.item_deleted) def handle_item_deleted(**kwargs): usage_key = kwargs.get('usage_key') if usage_key: usage_key = usage_key.for_branch(None) course_key = usage_key.course_key deleted_module = modulestore().get_item(usage_key) for module in yield_dynamic_descriptor_descendants(deleted_module, kwargs.get('user_id')): gating_api.remove_prerequisite(module.location) gating_api.set_required_content(course_key, module.location, None, None)
[ "@", "receiver", "(", "SignalHandler", ".", "item_deleted", ")", "def", "handle_item_deleted", "(", "**", "kwargs", ")", ":", "usage_key", "=", "kwargs", ".", "get", "(", "'usage_key'", ")", "if", "usage_key", ":", "usage_key", "=", "usage_key", ".", "for_branch", "(", "None", ")", "course_key", "=", "usage_key", ".", "course_key", "deleted_module", "=", "modulestore", "(", ")", ".", "get_item", "(", "usage_key", ")", "for", "module", "in", "yield_dynamic_descriptor_descendants", "(", "deleted_module", ",", "kwargs", ".", "get", "(", "'user_id'", ")", ")", ":", "gating_api", ".", "remove_prerequisite", "(", "module", ".", "location", ")", "gating_api", ".", "set_required_content", "(", "course_key", ",", "module", ".", "location", ",", "None", ",", "None", ")" ]
receives the item_deleted signal sent by studio when an xblock is removed from the course structure and removes any gating milestone data associated with it or its descendants .
train
false
42,765
def unregister_pkg(name, conn=None): if (conn is None): conn = init() conn.execute('DELETE FROM packages WHERE package=?', (name,))
[ "def", "unregister_pkg", "(", "name", ",", "conn", "=", "None", ")", ":", "if", "(", "conn", "is", "None", ")", ":", "conn", "=", "init", "(", ")", "conn", ".", "execute", "(", "'DELETE FROM packages WHERE package=?'", ",", "(", "name", ",", ")", ")" ]
unregister a package from the package database .
train
false
42,769
@require_admin_context def qos_specs_get_all(context, filters=None, marker=None, limit=None, offset=None, sort_keys=None, sort_dirs=None): session = get_session() with session.begin(): query = _generate_paginate_query(context, session, marker, limit, sort_keys, sort_dirs, filters, offset, models.QualityOfServiceSpecs) if (query is None): return [] rows = query.all() return _dict_with_qos_specs(rows)
[ "@", "require_admin_context", "def", "qos_specs_get_all", "(", "context", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "offset", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ")", ":", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "query", "=", "_generate_paginate_query", "(", "context", ",", "session", ",", "marker", ",", "limit", ",", "sort_keys", ",", "sort_dirs", ",", "filters", ",", "offset", ",", "models", ".", "QualityOfServiceSpecs", ")", "if", "(", "query", "is", "None", ")", ":", "return", "[", "]", "rows", "=", "query", ".", "all", "(", ")", "return", "_dict_with_qos_specs", "(", "rows", ")" ]
returns a list of all qos_specs .
train
false
42,772
def screen(): _lib.RAND_screen()
[ "def", "screen", "(", ")", ":", "_lib", ".", "RAND_screen", "(", ")" ]
add the current contents of the screen to the prng state .
train
false
42,773
def pad_lines_after_first(prefix, s): return ('\n' + prefix).join(s.splitlines())
[ "def", "pad_lines_after_first", "(", "prefix", ",", "s", ")", ":", "return", "(", "'\\n'", "+", "prefix", ")", ".", "join", "(", "s", ".", "splitlines", "(", ")", ")" ]
apply a prefix to each line in s after the first .
train
false
42,774
def mssql_encode(t): return ('CHAR(%s)' % ')+CHAR('.join((str(ord(c)) for c in t)))
[ "def", "mssql_encode", "(", "t", ")", ":", "return", "(", "'CHAR(%s)'", "%", "')+CHAR('", ".", "join", "(", "(", "str", "(", "ord", "(", "c", ")", ")", "for", "c", "in", "t", ")", ")", ")" ]
convert the text to a char-like ms sql command .
train
false
42,777
@task def test_travis_osf(ctx): flake(ctx) jshint(ctx) test_osf(ctx) test_addons(ctx) test_osf_models(ctx)
[ "@", "task", "def", "test_travis_osf", "(", "ctx", ")", ":", "flake", "(", "ctx", ")", "jshint", "(", "ctx", ")", "test_osf", "(", "ctx", ")", "test_addons", "(", "ctx", ")", "test_osf_models", "(", "ctx", ")" ]
run half of the tests to help travis go faster .
train
false
42,778
def utcoffset(time=_time, localtime=_time.localtime): if localtime().tm_isdst: return (time.altzone // 3600) return (time.timezone // 3600)
[ "def", "utcoffset", "(", "time", "=", "_time", ",", "localtime", "=", "_time", ".", "localtime", ")", ":", "if", "localtime", "(", ")", ".", "tm_isdst", ":", "return", "(", "time", ".", "altzone", "//", "3600", ")", "return", "(", "time", ".", "timezone", "//", "3600", ")" ]
return the current offset to utc in hours .
train
false
42,779
def runX11(node, cmd): (_display, tunnel) = tunnelX11(node) if (_display is None): return [] popen = node.popen(cmd) return [tunnel, popen]
[ "def", "runX11", "(", "node", ",", "cmd", ")", ":", "(", "_display", ",", "tunnel", ")", "=", "tunnelX11", "(", "node", ")", "if", "(", "_display", "is", "None", ")", ":", "return", "[", "]", "popen", "=", "node", ".", "popen", "(", "cmd", ")", "return", "[", "tunnel", ",", "popen", "]" ]
run an x11 client on a node .
train
false
42,780
def BuildCGIRequest(base_env_dict, request, dev_appserver): if (request.headers is None): request.headers = {} request.headers['Content-Type'] = 'application/json' url = (SPI_ROOT_FORMAT % (request.port, request.path)) base_env_dict['REQUEST_METHOD'] = 'POST' header_outfile = cStringIO.StringIO() body_outfile = cStringIO.StringIO() WriteHeaders(request.headers, header_outfile, len(request.body)) body_outfile.write(request.body) header_outfile.seek(0) body_outfile.seek(0) return dev_appserver.AppServerRequest(url, None, mimetools.Message(header_outfile), body_outfile)
[ "def", "BuildCGIRequest", "(", "base_env_dict", ",", "request", ",", "dev_appserver", ")", ":", "if", "(", "request", ".", "headers", "is", "None", ")", ":", "request", ".", "headers", "=", "{", "}", "request", ".", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "url", "=", "(", "SPI_ROOT_FORMAT", "%", "(", "request", ".", "port", ",", "request", ".", "path", ")", ")", "base_env_dict", "[", "'REQUEST_METHOD'", "]", "=", "'POST'", "header_outfile", "=", "cStringIO", ".", "StringIO", "(", ")", "body_outfile", "=", "cStringIO", ".", "StringIO", "(", ")", "WriteHeaders", "(", "request", ".", "headers", ",", "header_outfile", ",", "len", "(", "request", ".", "body", ")", ")", "body_outfile", ".", "write", "(", "request", ".", "body", ")", "header_outfile", ".", "seek", "(", "0", ")", "body_outfile", ".", "seek", "(", "0", ")", "return", "dev_appserver", ".", "AppServerRequest", "(", "url", ",", "None", ",", "mimetools", ".", "Message", "(", "header_outfile", ")", ",", "body_outfile", ")" ]
build a cgi request to call a method on an spi backend .
train
false
42,781
def trycmd(*args, **kwargs): discard_warnings = kwargs.pop('discard_warnings', False) try: (out, err) = execute(*args, **kwargs) failed = False except exception.ProcessExecutionError as exn: (out, err) = ('', str(exn)) LOG.debug(err) failed = True if ((not failed) and discard_warnings and err): LOG.debug(err) err = '' return (out, err)
[ "def", "trycmd", "(", "*", "args", ",", "**", "kwargs", ")", ":", "discard_warnings", "=", "kwargs", ".", "pop", "(", "'discard_warnings'", ",", "False", ")", "try", ":", "(", "out", ",", "err", ")", "=", "execute", "(", "*", "args", ",", "**", "kwargs", ")", "failed", "=", "False", "except", "exception", ".", "ProcessExecutionError", "as", "exn", ":", "(", "out", ",", "err", ")", "=", "(", "''", ",", "str", "(", "exn", ")", ")", "LOG", ".", "debug", "(", "err", ")", "failed", "=", "True", "if", "(", "(", "not", "failed", ")", "and", "discard_warnings", "and", "err", ")", ":", "LOG", ".", "debug", "(", "err", ")", "err", "=", "''", "return", "(", "out", ",", "err", ")" ]
a wrapper around execute() to more easily handle warnings and errors .
train
false
42,783
def CDLINNECK(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLINNECK)
[ "def", "CDLINNECK", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLINNECK", ")" ]
in-neck pattern .
train
false
42,784
def n_queens(queen_count): cols = range(queen_count) for vec in permutations(cols): if (queen_count == len(set(((vec[i] + i) for i in cols))) == len(set(((vec[i] - i) for i in cols)))): (yield vec)
[ "def", "n_queens", "(", "queen_count", ")", ":", "cols", "=", "range", "(", "queen_count", ")", "for", "vec", "in", "permutations", "(", "cols", ")", ":", "if", "(", "queen_count", "==", "len", "(", "set", "(", "(", "(", "vec", "[", "i", "]", "+", "i", ")", "for", "i", "in", "cols", ")", ")", ")", "==", "len", "(", "set", "(", "(", "(", "vec", "[", "i", "]", "-", "i", ")", "for", "i", "in", "cols", ")", ")", ")", ")", ":", "(", "yield", "vec", ")" ]
n-queens solver .
train
true
42,786
def _clean_rerp_input(X, data, reject, flat, decim, info, tstep): has_val = np.unique(X.nonzero()[0]) if (reject is not None): (_, inds) = _reject_data_segments(data, reject, flat, decim=None, info=info, tstep=tstep) for (t0, t1) in inds: has_val = np.setdiff1d(has_val, range(t0, t1)) return (X.tocsr()[has_val], data[:, has_val])
[ "def", "_clean_rerp_input", "(", "X", ",", "data", ",", "reject", ",", "flat", ",", "decim", ",", "info", ",", "tstep", ")", ":", "has_val", "=", "np", ".", "unique", "(", "X", ".", "nonzero", "(", ")", "[", "0", "]", ")", "if", "(", "reject", "is", "not", "None", ")", ":", "(", "_", ",", "inds", ")", "=", "_reject_data_segments", "(", "data", ",", "reject", ",", "flat", ",", "decim", "=", "None", ",", "info", "=", "info", ",", "tstep", "=", "tstep", ")", "for", "(", "t0", ",", "t1", ")", "in", "inds", ":", "has_val", "=", "np", ".", "setdiff1d", "(", "has_val", ",", "range", "(", "t0", ",", "t1", ")", ")", "return", "(", "X", ".", "tocsr", "(", ")", "[", "has_val", "]", ",", "data", "[", ":", ",", "has_val", "]", ")" ]
remove empty and contaminated points from data & predictor matrices .
train
false
42,787
def get_user_permission_level(user, site): if (not user.is_authenticated()): raise NoPermissionsException if (user.is_superuser or (not get_cms_setting('PERMISSION'))): return ROOT_USER_LEVEL has_global_perms = GlobalPagePermission.objects.get_with_change_permissions(user, site.pk).exists() if has_global_perms: return ROOT_USER_LEVEL try: permission = PagePermission.objects.get_with_change_permissions(user, site).order_by('page__path')[0] except IndexError: raise NoPermissionsException return permission.page.depth
[ "def", "get_user_permission_level", "(", "user", ",", "site", ")", ":", "if", "(", "not", "user", ".", "is_authenticated", "(", ")", ")", ":", "raise", "NoPermissionsException", "if", "(", "user", ".", "is_superuser", "or", "(", "not", "get_cms_setting", "(", "'PERMISSION'", ")", ")", ")", ":", "return", "ROOT_USER_LEVEL", "has_global_perms", "=", "GlobalPagePermission", ".", "objects", ".", "get_with_change_permissions", "(", "user", ",", "site", ".", "pk", ")", ".", "exists", "(", ")", "if", "has_global_perms", ":", "return", "ROOT_USER_LEVEL", "try", ":", "permission", "=", "PagePermission", ".", "objects", ".", "get_with_change_permissions", "(", "user", ",", "site", ")", ".", "order_by", "(", "'page__path'", ")", "[", "0", "]", "except", "IndexError", ":", "raise", "NoPermissionsException", "return", "permission", ".", "page", ".", "depth" ]
returns highest user level from the page/permission hierarchy on which user haves can_change_permission .
train
false
42,788
def _mne_root_problem(mne_root): if (mne_root is None): return 'MNE_ROOT is not set.' elif (not os.path.exists(mne_root)): return ('MNE_ROOT (%s) does not exist.' % mne_root) else: test_dir = os.path.join(mne_root, 'share', 'mne', 'mne_analyze') if (not os.path.exists(test_dir)): return ('MNE_ROOT (%s) is missing files. If this is your MNE installation, consider reinstalling.' % mne_root)
[ "def", "_mne_root_problem", "(", "mne_root", ")", ":", "if", "(", "mne_root", "is", "None", ")", ":", "return", "'MNE_ROOT is not set.'", "elif", "(", "not", "os", ".", "path", ".", "exists", "(", "mne_root", ")", ")", ":", "return", "(", "'MNE_ROOT (%s) does not exist.'", "%", "mne_root", ")", "else", ":", "test_dir", "=", "os", ".", "path", ".", "join", "(", "mne_root", ",", "'share'", ",", "'mne'", ",", "'mne_analyze'", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "test_dir", ")", ")", ":", "return", "(", "'MNE_ROOT (%s) is missing files. If this is your MNE installation, consider reinstalling.'", "%", "mne_root", ")" ]
check mne_root path .
train
false
42,789
def fold_arguments(pysig, args, kws, normal_handler, default_handler, stararg_handler): ba = pysig.bind(*args, **kws) defargs = [] for (i, param) in enumerate(pysig.parameters.values()): name = param.name default = param.default if (param.kind == param.VAR_POSITIONAL): ba.arguments[name] = stararg_handler(i, param, ba.arguments.get(name, ())) elif (name in ba.arguments): ba.arguments[name] = normal_handler(i, param, ba.arguments[name]) else: assert (default is not param.empty) ba.arguments[name] = default_handler(i, param, default) if ba.kwargs: raise NotImplementedError(('unhandled keyword argument: %s' % list(ba.kwargs))) args = tuple((ba.arguments[param.name] for param in pysig.parameters.values())) return args
[ "def", "fold_arguments", "(", "pysig", ",", "args", ",", "kws", ",", "normal_handler", ",", "default_handler", ",", "stararg_handler", ")", ":", "ba", "=", "pysig", ".", "bind", "(", "*", "args", ",", "**", "kws", ")", "defargs", "=", "[", "]", "for", "(", "i", ",", "param", ")", "in", "enumerate", "(", "pysig", ".", "parameters", ".", "values", "(", ")", ")", ":", "name", "=", "param", ".", "name", "default", "=", "param", ".", "default", "if", "(", "param", ".", "kind", "==", "param", ".", "VAR_POSITIONAL", ")", ":", "ba", ".", "arguments", "[", "name", "]", "=", "stararg_handler", "(", "i", ",", "param", ",", "ba", ".", "arguments", ".", "get", "(", "name", ",", "(", ")", ")", ")", "elif", "(", "name", "in", "ba", ".", "arguments", ")", ":", "ba", ".", "arguments", "[", "name", "]", "=", "normal_handler", "(", "i", ",", "param", ",", "ba", ".", "arguments", "[", "name", "]", ")", "else", ":", "assert", "(", "default", "is", "not", "param", ".", "empty", ")", "ba", ".", "arguments", "[", "name", "]", "=", "default_handler", "(", "i", ",", "param", ",", "default", ")", "if", "ba", ".", "kwargs", ":", "raise", "NotImplementedError", "(", "(", "'unhandled keyword argument: %s'", "%", "list", "(", "ba", ".", "kwargs", ")", ")", ")", "args", "=", "tuple", "(", "(", "ba", ".", "arguments", "[", "param", ".", "name", "]", "for", "param", "in", "pysig", ".", "parameters", ".", "values", "(", ")", ")", ")", "return", "args" ]
given the signature *pysig* .
train
false
42,790
@newrelic.agent.function_trace() @allow_CORS_GET def _document_raw(request, doc, doc_html, rendering_params): response = HttpResponse(doc_html) response['X-Frame-Options'] = 'Allow' response['X-Robots-Tag'] = 'noindex' absolute_url = urlunquote_plus(doc.get_absolute_url()) if (absolute_url in config.KUMA_CUSTOM_SAMPLE_CSS_PATH): response['Content-Type'] = 'text/css; charset=utf-8' elif doc.is_template: response['Content-Type'] = 'text/plain; charset=utf-8' return _set_common_headers(doc, rendering_params['section'], response)
[ "@", "newrelic", ".", "agent", ".", "function_trace", "(", ")", "@", "allow_CORS_GET", "def", "_document_raw", "(", "request", ",", "doc", ",", "doc_html", ",", "rendering_params", ")", ":", "response", "=", "HttpResponse", "(", "doc_html", ")", "response", "[", "'X-Frame-Options'", "]", "=", "'Allow'", "response", "[", "'X-Robots-Tag'", "]", "=", "'noindex'", "absolute_url", "=", "urlunquote_plus", "(", "doc", ".", "get_absolute_url", "(", ")", ")", "if", "(", "absolute_url", "in", "config", ".", "KUMA_CUSTOM_SAMPLE_CSS_PATH", ")", ":", "response", "[", "'Content-Type'", "]", "=", "'text/css; charset=utf-8'", "elif", "doc", ".", "is_template", ":", "response", "[", "'Content-Type'", "]", "=", "'text/plain; charset=utf-8'", "return", "_set_common_headers", "(", "doc", ",", "rendering_params", "[", "'section'", "]", ",", "response", ")" ]
display a raw document .
train
false
42,791
def _app_show(): pyplot = sys.modules['matplotlib.pyplot'] pyplot.show()
[ "def", "_app_show", "(", ")", ":", "pyplot", "=", "sys", ".", "modules", "[", "'matplotlib.pyplot'", "]", "pyplot", ".", "show", "(", ")" ]
block the gui .
train
false
42,792
def set_lock_status(use_lock): get_lock.lock_is_enabled = use_lock
[ "def", "set_lock_status", "(", "use_lock", ")", ":", "get_lock", ".", "lock_is_enabled", "=", "use_lock" ]
enable or disable the lock on the compilation directory .
train
false
42,794
def expose_api_raw_anonymous(func): return expose_api(func, to_json=False, user_required=False)
[ "def", "expose_api_raw_anonymous", "(", "func", ")", ":", "return", "expose_api", "(", "func", ",", "to_json", "=", "False", ",", "user_required", "=", "False", ")" ]
expose this function via the api but dont dump the results to json .
train
false
42,796
def _get_func(func, ps='sdzc'): for p in ps: f = getattr(fblas, (p + func), None) if (f is None): continue (yield f)
[ "def", "_get_func", "(", "func", ",", "ps", "=", "'sdzc'", ")", ":", "for", "p", "in", "ps", ":", "f", "=", "getattr", "(", "fblas", ",", "(", "p", "+", "func", ")", ",", "None", ")", "if", "(", "f", "is", "None", ")", ":", "continue", "(", "yield", "f", ")" ]
just a helper: return a specified blas function w/typecode .
train
false
42,797
def ss2zpk(A, B, C, D, input=0): return tf2zpk(*ss2tf(A, B, C, D, input=input))
[ "def", "ss2zpk", "(", "A", ",", "B", ",", "C", ",", "D", ",", "input", "=", "0", ")", ":", "return", "tf2zpk", "(", "*", "ss2tf", "(", "A", ",", "B", ",", "C", ",", "D", ",", "input", "=", "input", ")", ")" ]
state-space representation to zero-pole-gain representation .
train
false
42,798
def _strip_to_integer(trigger): return int(''.join([x for x in trigger if x.isdigit()]))
[ "def", "_strip_to_integer", "(", "trigger", ")", ":", "return", "int", "(", "''", ".", "join", "(", "[", "x", "for", "x", "in", "trigger", "if", "x", ".", "isdigit", "(", ")", "]", ")", ")" ]
return only the integer part of a string .
train
false
42,800
@handle_response_format @treeio_login_required def sla_edit(request, sla_id, response_format='html'): sla = get_object_or_404(ServiceLevelAgreement, pk=sla_id) if (not request.user.profile.has_permission(sla, mode='w')): return user_denied(request, message="You don't have access to this Service Level Agreement") if request.POST: if ('cancel' not in request.POST): form = ServiceLevelAgreementForm(request.user.profile, request.POST, instance=sla) if form.is_valid(): sla = form.save() return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id])) else: return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id])) else: form = ServiceLevelAgreementForm(request.user.profile, instance=sla) context = _get_default_context(request) context.update({'sla': sla, 'form': form}) return render_to_response('services/sla_edit', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "sla_edit", "(", "request", ",", "sla_id", ",", "response_format", "=", "'html'", ")", ":", "sla", "=", "get_object_or_404", "(", "ServiceLevelAgreement", ",", "pk", "=", "sla_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "sla", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Service Level Agreement\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "ServiceLevelAgreementForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "sla", ")", "if", "form", ".", "is_valid", "(", ")", ":", "sla", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_sla_view'", ",", "args", "=", "[", "sla", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'services_sla_view'", ",", "args", "=", "[", "sla", ".", "id", "]", ")", ")", "else", ":", "form", "=", "ServiceLevelAgreementForm", "(", "request", ".", "user", ".", "profile", ",", "instance", "=", "sla", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'sla'", ":", "sla", ",", "'form'", ":", "form", "}", ")", "return", "render_to_response", "(", "'services/sla_edit'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
servicelevelagreement edit .
train
false
42,801
def bytes2NativeString(x, encoding='utf-8'): if (isinstance(x, bytes) and (str != bytes)): return x.decode(encoding) return x
[ "def", "bytes2NativeString", "(", "x", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "(", "isinstance", "(", "x", ",", "bytes", ")", "and", "(", "str", "!=", "bytes", ")", ")", ":", "return", "x", ".", "decode", "(", "encoding", ")", "return", "x" ]
convert c{bytes} to a native c{str} .
train
true
42,802
def _ethernet(src, dst, protocol, payload): return (((dst + src) + _H(protocol)) + payload)
[ "def", "_ethernet", "(", "src", ",", "dst", ",", "protocol", ",", "payload", ")", ":", "return", "(", "(", "(", "dst", "+", "src", ")", "+", "_H", "(", "protocol", ")", ")", "+", "payload", ")" ]
construct an ethernet frame .
train
false
42,804
@permission_required('questions.tag_question') @require_POST def add_tag_async(request, question_id): try: (question, canonical_name) = _add_tag(request, question_id) except Tag.DoesNotExist: return HttpResponse(json.dumps({'error': unicode(UNAPPROVED_TAG)}), content_type='application/json', status=400) if canonical_name: question.clear_cached_tags() tag = Tag.objects.get(name=canonical_name) tag_url = urlparams(reverse('questions.list', args=[question.product_slug]), tagged=tag.slug) data = {'canonicalName': canonical_name, 'tagUrl': tag_url} return HttpResponse(json.dumps(data), content_type='application/json') return HttpResponse(json.dumps({'error': unicode(NO_TAG)}), content_type='application/json', status=400)
[ "@", "permission_required", "(", "'questions.tag_question'", ")", "@", "require_POST", "def", "add_tag_async", "(", "request", ",", "question_id", ")", ":", "try", ":", "(", "question", ",", "canonical_name", ")", "=", "_add_tag", "(", "request", ",", "question_id", ")", "except", "Tag", ".", "DoesNotExist", ":", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'error'", ":", "unicode", "(", "UNAPPROVED_TAG", ")", "}", ")", ",", "content_type", "=", "'application/json'", ",", "status", "=", "400", ")", "if", "canonical_name", ":", "question", ".", "clear_cached_tags", "(", ")", "tag", "=", "Tag", ".", "objects", ".", "get", "(", "name", "=", "canonical_name", ")", "tag_url", "=", "urlparams", "(", "reverse", "(", "'questions.list'", ",", "args", "=", "[", "question", ".", "product_slug", "]", ")", ",", "tagged", "=", "tag", ".", "slug", ")", "data", "=", "{", "'canonicalName'", ":", "canonical_name", ",", "'tagUrl'", ":", "tag_url", "}", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "data", ")", ",", "content_type", "=", "'application/json'", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'error'", ":", "unicode", "(", "NO_TAG", ")", "}", ")", ",", "content_type", "=", "'application/json'", ",", "status", "=", "400", ")" ]
add a tag to question asyncronously .
train
false
42,805
def check_db_locks(func): def new_func(app, *args, **kwargs): retval = func(app, *args, **kwargs) temp_session = app.session_factory() temp_session.execute('CREATE TABLE dummy (foo INT)') temp_session.execute('DROP TABLE dummy') temp_session.close() return retval return new_func
[ "def", "check_db_locks", "(", "func", ")", ":", "def", "new_func", "(", "app", ",", "*", "args", ",", "**", "kwargs", ")", ":", "retval", "=", "func", "(", "app", ",", "*", "args", ",", "**", "kwargs", ")", "temp_session", "=", "app", ".", "session_factory", "(", ")", "temp_session", ".", "execute", "(", "'CREATE TABLE dummy (foo INT)'", ")", "temp_session", ".", "execute", "(", "'DROP TABLE dummy'", ")", "temp_session", ".", "close", "(", ")", "return", "retval", "return", "new_func" ]
decorator for test functions that verifies no locks are held on the applications database upon exit by creating and dropping a dummy table .
train
false
42,806
@pytest.mark.parametrize(u'rotation, expectedlatlon', [((0 * u.deg), ([0, 1] * u.deg)), ((180 * u.deg), ([0, (-1)] * u.deg)), ((90 * u.deg), ([(-1), 0] * u.deg)), (((-90) * u.deg), ([1, 0] * u.deg))]) def test_skycoord_skyoffset_frame_rotation(rotation, expectedlatlon): origin = SkyCoord((45 * u.deg), (45 * u.deg)) target = SkyCoord((45 * u.deg), (46 * u.deg)) aframe = origin.skyoffset_frame(rotation=rotation) trans = target.transform_to(aframe) assert_allclose([trans.lon.wrap_at((180 * u.deg)), trans.lat], expectedlatlon, atol=(1e-10 * u.deg))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "u'rotation, expectedlatlon'", ",", "[", "(", "(", "0", "*", "u", ".", "deg", ")", ",", "(", "[", "0", ",", "1", "]", "*", "u", ".", "deg", ")", ")", ",", "(", "(", "180", "*", "u", ".", "deg", ")", ",", "(", "[", "0", ",", "(", "-", "1", ")", "]", "*", "u", ".", "deg", ")", ")", ",", "(", "(", "90", "*", "u", ".", "deg", ")", ",", "(", "[", "(", "-", "1", ")", ",", "0", "]", "*", "u", ".", "deg", ")", ")", ",", "(", "(", "(", "-", "90", ")", "*", "u", ".", "deg", ")", ",", "(", "[", "1", ",", "0", "]", "*", "u", ".", "deg", ")", ")", "]", ")", "def", "test_skycoord_skyoffset_frame_rotation", "(", "rotation", ",", "expectedlatlon", ")", ":", "origin", "=", "SkyCoord", "(", "(", "45", "*", "u", ".", "deg", ")", ",", "(", "45", "*", "u", ".", "deg", ")", ")", "target", "=", "SkyCoord", "(", "(", "45", "*", "u", ".", "deg", ")", ",", "(", "46", "*", "u", ".", "deg", ")", ")", "aframe", "=", "origin", ".", "skyoffset_frame", "(", "rotation", "=", "rotation", ")", "trans", "=", "target", ".", "transform_to", "(", "aframe", ")", "assert_allclose", "(", "[", "trans", ".", "lon", ".", "wrap_at", "(", "(", "180", "*", "u", ".", "deg", ")", ")", ",", "trans", ".", "lat", "]", ",", "expectedlatlon", ",", "atol", "=", "(", "1e-10", "*", "u", ".", "deg", ")", ")" ]
test if passing a rotation argument via skycoord works .
train
false
42,809
@pytest.mark.parametrize(u'st', spacetimes) def test_atciqd_aticq(st): (t, pos) = st (jd1, jd2) = get_jd12(t, u'tdb') (astrom, _) = erfa.apci13(jd1, jd2) (ra, dec) = pos ra = ra.value dec = dec.value assert_allclose(erfa.atciqd(ra, dec, astrom), atciqd(ra, dec, astrom)) assert_allclose(erfa.aticq(ra, dec, astrom), aticq(ra, dec, astrom))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "u'st'", ",", "spacetimes", ")", "def", "test_atciqd_aticq", "(", "st", ")", ":", "(", "t", ",", "pos", ")", "=", "st", "(", "jd1", ",", "jd2", ")", "=", "get_jd12", "(", "t", ",", "u'tdb'", ")", "(", "astrom", ",", "_", ")", "=", "erfa", ".", "apci13", "(", "jd1", ",", "jd2", ")", "(", "ra", ",", "dec", ")", "=", "pos", "ra", "=", "ra", ".", "value", "dec", "=", "dec", ".", "value", "assert_allclose", "(", "erfa", ".", "atciqd", "(", "ra", ",", "dec", ",", "astrom", ")", ",", "atciqd", "(", "ra", ",", "dec", ",", "astrom", ")", ")", "assert_allclose", "(", "erfa", ".", "aticq", "(", "ra", ",", "dec", ",", "astrom", ")", ",", "aticq", "(", "ra", ",", "dec", ",", "astrom", ")", ")" ]
check replacements against erfa versions for consistency .
train
false
42,810
def jobconf_from_dict(jobconf, name, default=None): if (name in jobconf): return jobconf[name] for alternative in _JOBCONF_MAP.get(name, {}).values(): if (alternative in jobconf): return jobconf[alternative] return default
[ "def", "jobconf_from_dict", "(", "jobconf", ",", "name", ",", "default", "=", "None", ")", ":", "if", "(", "name", "in", "jobconf", ")", ":", "return", "jobconf", "[", "name", "]", "for", "alternative", "in", "_JOBCONF_MAP", ".", "get", "(", "name", ",", "{", "}", ")", ".", "values", "(", ")", ":", "if", "(", "alternative", "in", "jobconf", ")", ":", "return", "jobconf", "[", "alternative", "]", "return", "default" ]
get the value of a jobconf variable from the given dictionary .
train
false
42,812
def p_ignorable_statement(s): if (s.sy == 'BEGIN_STRING'): pos = s.position() string_node = p_atom(s) s.expect_newline('Syntax error in string', ignore_semicolon=True) return Nodes.ExprStatNode(pos, expr=string_node) return None
[ "def", "p_ignorable_statement", "(", "s", ")", ":", "if", "(", "s", ".", "sy", "==", "'BEGIN_STRING'", ")", ":", "pos", "=", "s", ".", "position", "(", ")", "string_node", "=", "p_atom", "(", "s", ")", "s", ".", "expect_newline", "(", "'Syntax error in string'", ",", "ignore_semicolon", "=", "True", ")", "return", "Nodes", ".", "ExprStatNode", "(", "pos", ",", "expr", "=", "string_node", ")", "return", "None" ]
parses any kind of ignorable statement that is allowed in .
train
false
42,813
def find_empty_alt(content, path): imgs = re.compile(u'\n (?:\n # src before alt\n <img\n [^\\>]*\n src=([\'"])(.*?)\\1\n [^\\>]*\n alt=([\'"])\\3\n )|(?:\n # alt before src\n <img\n [^\\>]*\n alt=([\'"])\\4\n [^\\>]*\n src=([\'"])(.*?)\\5\n )\n ', re.X) for match in re.findall(imgs, content): logger.warning(u'Empty alt attribute for image %s in %s', os.path.basename((match[1] + match[5])), path, extra={u'limit_msg': u'Other images have empty alt attributes'})
[ "def", "find_empty_alt", "(", "content", ",", "path", ")", ":", "imgs", "=", "re", ".", "compile", "(", "u'\\n (?:\\n # src before alt\\n <img\\n [^\\\\>]*\\n src=([\\'\"])(.*?)\\\\1\\n [^\\\\>]*\\n alt=([\\'\"])\\\\3\\n )|(?:\\n # alt before src\\n <img\\n [^\\\\>]*\\n alt=([\\'\"])\\\\4\\n [^\\\\>]*\\n src=([\\'\"])(.*?)\\\\5\\n )\\n '", ",", "re", ".", "X", ")", "for", "match", "in", "re", ".", "findall", "(", "imgs", ",", "content", ")", ":", "logger", ".", "warning", "(", "u'Empty alt attribute for image %s in %s'", ",", "os", ".", "path", ".", "basename", "(", "(", "match", "[", "1", "]", "+", "match", "[", "5", "]", ")", ")", ",", "path", ",", "extra", "=", "{", "u'limit_msg'", ":", "u'Other images have empty alt attributes'", "}", ")" ]
find images with empty alt create warnings for all images with empty alt .
train
false
42,814
def toPyModel(model_ptr): if (bool(model_ptr) == False): raise ValueError('Null pointer') m = model_ptr.contents m.__createfrom__ = 'C' return m
[ "def", "toPyModel", "(", "model_ptr", ")", ":", "if", "(", "bool", "(", "model_ptr", ")", "==", "False", ")", ":", "raise", "ValueError", "(", "'Null pointer'", ")", "m", "=", "model_ptr", ".", "contents", "m", ".", "__createfrom__", "=", "'C'", "return", "m" ]
topymodel -> svm_model convert a ctypes pointer to a python svm_model .
train
true
42,816
def get_date(): ret = salt.utils.mac_utils.execute_return_result('systemsetup -getdate') return salt.utils.mac_utils.parse_return(ret)
[ "def", "get_date", "(", ")", ":", "ret", "=", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_result", "(", "'systemsetup -getdate'", ")", "return", "salt", ".", "utils", ".", "mac_utils", ".", "parse_return", "(", "ret", ")" ]
displays the current date :return: the system date :rtype: str cli example: .
train
true