id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
42,251
def _ssh_read_next_string(data): if (len(data) < 4): raise ValueError('Key is not in the proper format') (str_len,) = struct.unpack('>I', data[:4]) if (len(data) < (str_len + 4)): raise ValueError('Key is not in the proper format') return (data[4:(4 + str_len)], data[(4 + str_len):])
[ "def", "_ssh_read_next_string", "(", "data", ")", ":", "if", "(", "len", "(", "data", ")", "<", "4", ")", ":", "raise", "ValueError", "(", "'Key is not in the proper format'", ")", "(", "str_len", ",", ")", "=", "struct", ".", "unpack", "(", "'>I'", ",", "data", "[", ":", "4", "]", ")", "if", "(", "len", "(", "data", ")", "<", "(", "str_len", "+", "4", ")", ")", ":", "raise", "ValueError", "(", "'Key is not in the proper format'", ")", "return", "(", "data", "[", "4", ":", "(", "4", "+", "str_len", ")", "]", ",", "data", "[", "(", "4", "+", "str_len", ")", ":", "]", ")" ]
retrieves the next rfc 4251 string value from the data .
train
false
42,253
def _create_vocab(captions): print('Creating vocabulary.') counter = Counter() for c in captions: counter.update(c) print('Total words:', len(counter)) word_counts = [x for x in counter.items() if (x[1] >= FLAGS.min_word_count)] word_counts.sort(key=(lambda x: x[1]), reverse=True) print('Words in vocabulary:', len(word_counts)) with tf.gfile.FastGFile(FLAGS.word_counts_output_file, 'w') as f: f.write('\n'.join([('%s %d' % (w, c)) for (w, c) in word_counts])) print('Wrote vocabulary file:', FLAGS.word_counts_output_file) reverse_vocab = [x[0] for x in word_counts] unk_id = len(reverse_vocab) vocab_dict = dict([(x, y) for (y, x) in enumerate(reverse_vocab)]) vocab = Vocabulary(vocab_dict, unk_id) return vocab
[ "def", "_create_vocab", "(", "captions", ")", ":", "print", "(", "'Creating vocabulary.'", ")", "counter", "=", "Counter", "(", ")", "for", "c", "in", "captions", ":", "counter", ".", "update", "(", "c", ")", "print", "(", "'Total words:'", ",", "len", "(", "counter", ")", ")", "word_counts", "=", "[", "x", "for", "x", "in", "counter", ".", "items", "(", ")", "if", "(", "x", "[", "1", "]", ">=", "FLAGS", ".", "min_word_count", ")", "]", "word_counts", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", "[", "1", "]", ")", ",", "reverse", "=", "True", ")", "print", "(", "'Words in vocabulary:'", ",", "len", "(", "word_counts", ")", ")", "with", "tf", ".", "gfile", ".", "FastGFile", "(", "FLAGS", ".", "word_counts_output_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'\\n'", ".", "join", "(", "[", "(", "'%s %d'", "%", "(", "w", ",", "c", ")", ")", "for", "(", "w", ",", "c", ")", "in", "word_counts", "]", ")", ")", "print", "(", "'Wrote vocabulary file:'", ",", "FLAGS", ".", "word_counts_output_file", ")", "reverse_vocab", "=", "[", "x", "[", "0", "]", "for", "x", "in", "word_counts", "]", "unk_id", "=", "len", "(", "reverse_vocab", ")", "vocab_dict", "=", "dict", "(", "[", "(", "x", ",", "y", ")", "for", "(", "y", ",", "x", ")", "in", "enumerate", "(", "reverse_vocab", ")", "]", ")", "vocab", "=", "Vocabulary", "(", "vocab_dict", ",", "unk_id", ")", "return", "vocab" ]
creates the vocabulary of word to word_id .
train
false
42,254
def get_network_str(layer, get_network=True, incomings=False, outgoings=False): if get_network: network = get_all_layers(layer) else: network = layer network_str = deque([]) network_str = _insert_header(network_str, incomings=incomings, outgoings=outgoings) if (incomings or outgoings): (ins, outs) = _get_adjacency_lists(network) for (i, current_layer) in enumerate(network): layer_str = deque([]) if incomings: layer_str.append(ins[i]) layer_str.append(i) if outgoings: layer_str.append(outs[i]) layer_str.append(str(current_layer)) network_str.append(layer_str) return _get_table_str(network_str)
[ "def", "get_network_str", "(", "layer", ",", "get_network", "=", "True", ",", "incomings", "=", "False", ",", "outgoings", "=", "False", ")", ":", "if", "get_network", ":", "network", "=", "get_all_layers", "(", "layer", ")", "else", ":", "network", "=", "layer", "network_str", "=", "deque", "(", "[", "]", ")", "network_str", "=", "_insert_header", "(", "network_str", ",", "incomings", "=", "incomings", ",", "outgoings", "=", "outgoings", ")", "if", "(", "incomings", "or", "outgoings", ")", ":", "(", "ins", ",", "outs", ")", "=", "_get_adjacency_lists", "(", "network", ")", "for", "(", "i", ",", "current_layer", ")", "in", "enumerate", "(", "network", ")", ":", "layer_str", "=", "deque", "(", "[", "]", ")", "if", "incomings", ":", "layer_str", ".", "append", "(", "ins", "[", "i", "]", ")", "layer_str", ".", "append", "(", "i", ")", "if", "outgoings", ":", "layer_str", ".", "append", "(", "outs", "[", "i", "]", ")", "layer_str", ".", "append", "(", "str", "(", "current_layer", ")", ")", "network_str", ".", "append", "(", "layer_str", ")", "return", "_get_table_str", "(", "network_str", ")" ]
returns a string representation of the entire network contained under this layer .
train
false
42,255
def dh_shared_key(key, b): (p, _, x) = key if ((1 >= b) or (b >= p)): raise ValueError(filldedent(('\n Value of b should be greater 1 and less\n than prime %s.' % p))) return pow(x, b, p)
[ "def", "dh_shared_key", "(", "key", ",", "b", ")", ":", "(", "p", ",", "_", ",", "x", ")", "=", "key", "if", "(", "(", "1", ">=", "b", ")", "or", "(", "b", ">=", "p", ")", ")", ":", "raise", "ValueError", "(", "filldedent", "(", "(", "'\\n Value of b should be greater 1 and less\\n than prime %s.'", "%", "p", ")", ")", ")", "return", "pow", "(", "x", ",", "b", ",", "p", ")" ]
return an integer that is the shared key .
train
false
42,256
def _parse_current_network_settings(): opts = salt.utils.odict.OrderedDict() opts['networking'] = '' if os.path.isfile(_DEB_NETWORKING_FILE): with salt.utils.fopen(_DEB_NETWORKING_FILE) as contents: for line in contents: if line.startswith('#'): continue elif line.startswith('CONFIGURE_INTERFACES'): opts['networking'] = line.split('=', 1)[1].strip() hostname = _parse_hostname() domainname = _parse_domainname() searchdomain = _parse_searchdomain() opts['hostname'] = hostname opts['domainname'] = domainname opts['searchdomain'] = searchdomain return opts
[ "def", "_parse_current_network_settings", "(", ")", ":", "opts", "=", "salt", ".", "utils", ".", "odict", ".", "OrderedDict", "(", ")", "opts", "[", "'networking'", "]", "=", "''", "if", "os", ".", "path", ".", "isfile", "(", "_DEB_NETWORKING_FILE", ")", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "_DEB_NETWORKING_FILE", ")", "as", "contents", ":", "for", "line", "in", "contents", ":", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "elif", "line", ".", "startswith", "(", "'CONFIGURE_INTERFACES'", ")", ":", "opts", "[", "'networking'", "]", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "[", "1", "]", ".", "strip", "(", ")", "hostname", "=", "_parse_hostname", "(", ")", "domainname", "=", "_parse_domainname", "(", ")", "searchdomain", "=", "_parse_searchdomain", "(", ")", "opts", "[", "'hostname'", "]", "=", "hostname", "opts", "[", "'domainname'", "]", "=", "domainname", "opts", "[", "'searchdomain'", "]", "=", "searchdomain", "return", "opts" ]
parse /etc/default/networking and return current configuration .
train
true
42,257
def is_event_loop_running_qt4(app=None): ip = get_ipython() if (ip is not None): return (ip.active_eventloop and ip.active_eventloop.startswith('qt')) if (app is None): app = get_app_qt4(['']) if hasattr(app, '_in_event_loop'): return app._in_event_loop else: return False
[ "def", "is_event_loop_running_qt4", "(", "app", "=", "None", ")", ":", "ip", "=", "get_ipython", "(", ")", "if", "(", "ip", "is", "not", "None", ")", ":", "return", "(", "ip", ".", "active_eventloop", "and", "ip", ".", "active_eventloop", ".", "startswith", "(", "'qt'", ")", ")", "if", "(", "app", "is", "None", ")", ":", "app", "=", "get_app_qt4", "(", "[", "''", "]", ")", "if", "hasattr", "(", "app", ",", "'_in_event_loop'", ")", ":", "return", "app", ".", "_in_event_loop", "else", ":", "return", "False" ]
is the qt4 event loop running .
train
false
42,259
def bisect_left(a, x, lo=0, hi=None): if (lo < 0): raise ValueError('lo must be non-negative') if (hi is None): hi = len(a) while (lo < hi): mid = ((lo + hi) // 2) if (a[mid] < x): lo = (mid + 1) else: hi = mid return lo
[ "def", "bisect_left", "(", "a", ",", "x", ",", "lo", "=", "0", ",", "hi", "=", "None", ")", ":", "if", "(", "lo", "<", "0", ")", ":", "raise", "ValueError", "(", "'lo must be non-negative'", ")", "if", "(", "hi", "is", "None", ")", ":", "hi", "=", "len", "(", "a", ")", "while", "(", "lo", "<", "hi", ")", ":", "mid", "=", "(", "(", "lo", "+", "hi", ")", "//", "2", ")", "if", "(", "a", "[", "mid", "]", "<", "x", ")", ":", "lo", "=", "(", "mid", "+", "1", ")", "else", ":", "hi", "=", "mid", "return", "lo" ]
return the index where to insert item x in list a .
train
true
42,260
def sequentially(changes, sleep_when_empty=timedelta(seconds=60)): if all((isinstance(c, NoOp) for c in changes)): sleep = (min((c.sleep for c in changes)) if changes else sleep_when_empty) return NoOp(sleep=sleep) return _Sequentially(changes=changes)
[ "def", "sequentially", "(", "changes", ",", "sleep_when_empty", "=", "timedelta", "(", "seconds", "=", "60", ")", ")", ":", "if", "all", "(", "(", "isinstance", "(", "c", ",", "NoOp", ")", "for", "c", "in", "changes", ")", ")", ":", "sleep", "=", "(", "min", "(", "(", "c", ".", "sleep", "for", "c", "in", "changes", ")", ")", "if", "changes", "else", "sleep_when_empty", ")", "return", "NoOp", "(", "sleep", "=", "sleep", ")", "return", "_Sequentially", "(", "changes", "=", "changes", ")" ]
run a series of changes in sequence .
train
false
42,261
@utils.arg('tenant', metavar='<tenant_id>', help='UUID of tenant to list the default quotas for.') @utils.service_type('monitor') def do_quota_defaults(cs, args): _quota_show(cs.quotas.defaults(args.tenant))
[ "@", "utils", ".", "arg", "(", "'tenant'", ",", "metavar", "=", "'<tenant_id>'", ",", "help", "=", "'UUID of tenant to list the default quotas for.'", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_quota_defaults", "(", "cs", ",", "args", ")", ":", "_quota_show", "(", "cs", ".", "quotas", ".", "defaults", "(", "args", ".", "tenant", ")", ")" ]
list the default quotas for a tenant .
train
false
42,262
def identify_names(code): finder = NameFinder() try: finder.visit(ast.parse(code)) except SyntaxError: return {} example_code_obj = {} for (name, full_name) in finder.get_mapping(): splitted = full_name.rsplit('.', 1) if (len(splitted) == 1): continue (module, attribute) = splitted module_short = get_short_module_name(module, attribute) cobj = {'name': attribute, 'module': module, 'module_short': module_short} example_code_obj[name] = cobj return example_code_obj
[ "def", "identify_names", "(", "code", ")", ":", "finder", "=", "NameFinder", "(", ")", "try", ":", "finder", ".", "visit", "(", "ast", ".", "parse", "(", "code", ")", ")", "except", "SyntaxError", ":", "return", "{", "}", "example_code_obj", "=", "{", "}", "for", "(", "name", ",", "full_name", ")", "in", "finder", ".", "get_mapping", "(", ")", ":", "splitted", "=", "full_name", ".", "rsplit", "(", "'.'", ",", "1", ")", "if", "(", "len", "(", "splitted", ")", "==", "1", ")", ":", "continue", "(", "module", ",", "attribute", ")", "=", "splitted", "module_short", "=", "get_short_module_name", "(", "module", ",", "attribute", ")", "cobj", "=", "{", "'name'", ":", "attribute", ",", "'module'", ":", "module", ",", "'module_short'", ":", "module_short", "}", "example_code_obj", "[", "name", "]", "=", "cobj", "return", "example_code_obj" ]
builds a codeobj summary by identifying and resovles used names .
train
true
42,264
def clear_from_fields(item, fields, is_subobject=False): for elems in fields: if elems[0].startswith('*'): continue if is_subobject: val = elems[2] else: val = elems[1] if isinstance(val, basestring): if val.startswith('SETTINGS:'): setkey = val.split(':')[(-1)] val = getattr(item.settings, setkey) setattr(item, elems[0], val) if (item.COLLECTION_TYPE == 'system'): item.interfaces = {}
[ "def", "clear_from_fields", "(", "item", ",", "fields", ",", "is_subobject", "=", "False", ")", ":", "for", "elems", "in", "fields", ":", "if", "elems", "[", "0", "]", ".", "startswith", "(", "'*'", ")", ":", "continue", "if", "is_subobject", ":", "val", "=", "elems", "[", "2", "]", "else", ":", "val", "=", "elems", "[", "1", "]", "if", "isinstance", "(", "val", ",", "basestring", ")", ":", "if", "val", ".", "startswith", "(", "'SETTINGS:'", ")", ":", "setkey", "=", "val", ".", "split", "(", "':'", ")", "[", "(", "-", "1", ")", "]", "val", "=", "getattr", "(", "item", ".", "settings", ",", "setkey", ")", "setattr", "(", "item", ",", "elems", "[", "0", "]", ",", "val", ")", "if", "(", "item", ".", "COLLECTION_TYPE", "==", "'system'", ")", ":", "item", ".", "interfaces", "=", "{", "}" ]
used by various item_* .
train
false
42,265
@pytest.mark.network def test_uninstall_with_scripts(script): result = script.run('easy_install', 'PyLogo', expect_stderr=True) easy_install_pth = (script.site_packages / 'easy-install.pth') pylogo = (((sys.platform == 'win32') and 'pylogo') or 'PyLogo') assert (pylogo in result.files_updated[easy_install_pth].bytes) result2 = script.pip('uninstall', 'pylogo', '-y') assert_all_changes(result, result2, [(script.venv / 'build'), 'cache', easy_install_pth])
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_uninstall_with_scripts", "(", "script", ")", ":", "result", "=", "script", ".", "run", "(", "'easy_install'", ",", "'PyLogo'", ",", "expect_stderr", "=", "True", ")", "easy_install_pth", "=", "(", "script", ".", "site_packages", "/", "'easy-install.pth'", ")", "pylogo", "=", "(", "(", "(", "sys", ".", "platform", "==", "'win32'", ")", "and", "'pylogo'", ")", "or", "'PyLogo'", ")", "assert", "(", "pylogo", "in", "result", ".", "files_updated", "[", "easy_install_pth", "]", ".", "bytes", ")", "result2", "=", "script", ".", "pip", "(", "'uninstall'", ",", "'pylogo'", ",", "'-y'", ")", "assert_all_changes", "(", "result", ",", "result2", ",", "[", "(", "script", ".", "venv", "/", "'build'", ")", ",", "'cache'", ",", "easy_install_pth", "]", ")" ]
uninstall an easy_installed package with scripts .
train
false
42,270
def setBEGINLIBPATH(): dllpath = os.path.join(sys.prefix, 'Lib', 'lib-dynload') libpath = os.environ['BEGINLIBPATH'].split(';') if libpath[(-1)]: libpath.append(dllpath) else: libpath[(-1)] = dllpath os.environ['BEGINLIBPATH'] = ';'.join(libpath)
[ "def", "setBEGINLIBPATH", "(", ")", ":", "dllpath", "=", "os", ".", "path", ".", "join", "(", "sys", ".", "prefix", ",", "'Lib'", ",", "'lib-dynload'", ")", "libpath", "=", "os", ".", "environ", "[", "'BEGINLIBPATH'", "]", ".", "split", "(", "';'", ")", "if", "libpath", "[", "(", "-", "1", ")", "]", ":", "libpath", ".", "append", "(", "dllpath", ")", "else", ":", "libpath", "[", "(", "-", "1", ")", "]", "=", "dllpath", "os", ".", "environ", "[", "'BEGINLIBPATH'", "]", "=", "';'", ".", "join", "(", "libpath", ")" ]
the os/2 emx port has optional extension modules that do double duty as dlls for other extensions .
train
true
42,271
def test_install_curdir(script, data): run_from = data.packages.join('FSPkg') egg_info = join(run_from, 'FSPkg.egg-info') if os.path.isdir(egg_info): rmtree(egg_info) result = script.pip('install', curdir, cwd=run_from, expect_error=False) fspkg_folder = (script.site_packages / 'fspkg') egg_info_folder = ((script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info') % pyversion) assert (fspkg_folder in result.files_created), str(result.stdout) assert (egg_info_folder in result.files_created), str(result)
[ "def", "test_install_curdir", "(", "script", ",", "data", ")", ":", "run_from", "=", "data", ".", "packages", ".", "join", "(", "'FSPkg'", ")", "egg_info", "=", "join", "(", "run_from", ",", "'FSPkg.egg-info'", ")", "if", "os", ".", "path", ".", "isdir", "(", "egg_info", ")", ":", "rmtree", "(", "egg_info", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "curdir", ",", "cwd", "=", "run_from", ",", "expect_error", "=", "False", ")", "fspkg_folder", "=", "(", "script", ".", "site_packages", "/", "'fspkg'", ")", "egg_info_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'FSPkg-0.1.dev0-py%s.egg-info'", ")", "%", "pyversion", ")", "assert", "(", "fspkg_folder", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ".", "stdout", ")", "assert", "(", "egg_info_folder", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")" ]
test installing current directory ( .
train
false
42,272
@world.absorb def select_option(name, value, wait_time=GLOBAL_WAIT_FOR_TIMEOUT): select_css = "select[name='{}']".format(name) option_css = "option[value='{}']".format(value) css_selector = '{} {}'.format(select_css, option_css) css_click(css_selector=css_selector, wait_time=wait_time) wait_for((lambda _: css_has_value(select_css, value))) return True
[ "@", "world", ".", "absorb", "def", "select_option", "(", "name", ",", "value", ",", "wait_time", "=", "GLOBAL_WAIT_FOR_TIMEOUT", ")", ":", "select_css", "=", "\"select[name='{}']\"", ".", "format", "(", "name", ")", "option_css", "=", "\"option[value='{}']\"", ".", "format", "(", "value", ")", "css_selector", "=", "'{} {}'", ".", "format", "(", "select_css", ",", "option_css", ")", "css_click", "(", "css_selector", "=", "css_selector", ",", "wait_time", "=", "wait_time", ")", "wait_for", "(", "(", "lambda", "_", ":", "css_has_value", "(", "select_css", ",", "value", ")", ")", ")", "return", "True" ]
a method to select an option then for synchronization purposes .
train
false
42,273
def _encode(s): if isinstance(s, unicode): for (char, repl) in URL_CHARACTERS.items(): s = s.replace(char, repl) s = s.encode('utf8', 'ignore') return urllib.quote(s)
[ "def", "_encode", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "for", "(", "char", ",", "repl", ")", "in", "URL_CHARACTERS", ".", "items", "(", ")", ":", "s", "=", "s", ".", "replace", "(", "char", ",", "repl", ")", "s", "=", "s", ".", "encode", "(", "'utf8'", ",", "'ignore'", ")", "return", "urllib", ".", "quote", "(", "s", ")" ]
encode a single character .
train
false
42,274
def addLoopToPixelTable(loop, pixelDictionary, width): for pointIndex in xrange(len(loop)): pointBegin = loop[pointIndex] pointEnd = loop[((pointIndex + 1) % len(loop))] addValueSegmentToPixelTable(pointBegin, pointEnd, pixelDictionary, None, width)
[ "def", "addLoopToPixelTable", "(", "loop", ",", "pixelDictionary", ",", "width", ")", ":", "for", "pointIndex", "in", "xrange", "(", "len", "(", "loop", ")", ")", ":", "pointBegin", "=", "loop", "[", "pointIndex", "]", "pointEnd", "=", "loop", "[", "(", "(", "pointIndex", "+", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "addValueSegmentToPixelTable", "(", "pointBegin", ",", "pointEnd", ",", "pixelDictionary", ",", "None", ",", "width", ")" ]
add loop to the pixel table .
train
false
42,275
def gnm_random_graph(n, m, seed=None, directed=False): if directed: G = nx.DiGraph() else: G = nx.Graph() G.add_nodes_from(range(n)) G.name = ('gnm_random_graph(%s,%s)' % (n, m)) if (seed is not None): random.seed(seed) if (n == 1): return G max_edges = (n * (n - 1)) if (not directed): max_edges /= 2.0 if (m >= max_edges): return complete_graph(n, create_using=G) nlist = list(G) edge_count = 0 while (edge_count < m): u = random.choice(nlist) v = random.choice(nlist) if ((u == v) or G.has_edge(u, v)): continue else: G.add_edge(u, v) edge_count = (edge_count + 1) return G
[ "def", "gnm_random_graph", "(", "n", ",", "m", ",", "seed", "=", "None", ",", "directed", "=", "False", ")", ":", "if", "directed", ":", "G", "=", "nx", ".", "DiGraph", "(", ")", "else", ":", "G", "=", "nx", ".", "Graph", "(", ")", "G", ".", "add_nodes_from", "(", "range", "(", "n", ")", ")", "G", ".", "name", "=", "(", "'gnm_random_graph(%s,%s)'", "%", "(", "n", ",", "m", ")", ")", "if", "(", "seed", "is", "not", "None", ")", ":", "random", ".", "seed", "(", "seed", ")", "if", "(", "n", "==", "1", ")", ":", "return", "G", "max_edges", "=", "(", "n", "*", "(", "n", "-", "1", ")", ")", "if", "(", "not", "directed", ")", ":", "max_edges", "/=", "2.0", "if", "(", "m", ">=", "max_edges", ")", ":", "return", "complete_graph", "(", "n", ",", "create_using", "=", "G", ")", "nlist", "=", "list", "(", "G", ")", "edge_count", "=", "0", "while", "(", "edge_count", "<", "m", ")", ":", "u", "=", "random", ".", "choice", "(", "nlist", ")", "v", "=", "random", ".", "choice", "(", "nlist", ")", "if", "(", "(", "u", "==", "v", ")", "or", "G", ".", "has_edge", "(", "u", ",", "v", ")", ")", ":", "continue", "else", ":", "G", ".", "add_edge", "(", "u", ",", "v", ")", "edge_count", "=", "(", "edge_count", "+", "1", ")", "return", "G" ]
returns a g_{n .
train
false
42,276
def programme_hours(): mode = session.s3.hrm.mode def prep(r): if (mode is not None): auth.permission.fail() return True s3.prep = prep return s3_rest_controller('hrm', resourcename, csv_stylesheet=('hrm', 'programme_hours.xsl'), csv_template=('hrm', 'programme_hours'))
[ "def", "programme_hours", "(", ")", ":", "mode", "=", "session", ".", "s3", ".", "hrm", ".", "mode", "def", "prep", "(", "r", ")", ":", "if", "(", "mode", "is", "not", "None", ")", ":", "auth", ".", "permission", ".", "fail", "(", ")", "return", "True", "s3", ".", "prep", "=", "prep", "return", "s3_rest_controller", "(", "'hrm'", ",", "resourcename", ",", "csv_stylesheet", "=", "(", "'hrm'", ",", "'programme_hours.xsl'", ")", ",", "csv_template", "=", "(", "'hrm'", ",", "'programme_hours'", ")", ")" ]
volunteer programme hours controller - used for imports & reports .
train
false
42,277
def _encode_mapping(name, value, check_keys, opts): if _raw_document_class(value): return (('\x03' + name) + value.raw) data = ''.join([_element_to_bson(key, val, check_keys, opts) for (key, val) in iteritems(value)]) return (((('\x03' + name) + _PACK_INT((len(data) + 5))) + data) + '\x00')
[ "def", "_encode_mapping", "(", "name", ",", "value", ",", "check_keys", ",", "opts", ")", ":", "if", "_raw_document_class", "(", "value", ")", ":", "return", "(", "(", "'\\x03'", "+", "name", ")", "+", "value", ".", "raw", ")", "data", "=", "''", ".", "join", "(", "[", "_element_to_bson", "(", "key", ",", "val", ",", "check_keys", ",", "opts", ")", "for", "(", "key", ",", "val", ")", "in", "iteritems", "(", "value", ")", "]", ")", "return", "(", "(", "(", "(", "'\\x03'", "+", "name", ")", "+", "_PACK_INT", "(", "(", "len", "(", "data", ")", "+", "5", ")", ")", ")", "+", "data", ")", "+", "'\\x00'", ")" ]
encode a mapping type .
train
false
42,278
def _orm_annotate(element, exclude=None): return sql_util._deep_annotate(element, {'_orm_adapt': True}, exclude)
[ "def", "_orm_annotate", "(", "element", ",", "exclude", "=", "None", ")", ":", "return", "sql_util", ".", "_deep_annotate", "(", "element", ",", "{", "'_orm_adapt'", ":", "True", "}", ",", "exclude", ")" ]
deep copy the given clauseelement .
train
false
42,280
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
42,281
@aborts def test_require_mixed_state_keys(): require('foo', 'version')
[ "@", "aborts", "def", "test_require_mixed_state_keys", "(", ")", ":", "require", "(", "'foo'", ",", "'version'", ")" ]
when given mixed-state keys .
train
false
42,283
def load_pcoa_files(pcoa_dir): support_pcoas = [] pcoa_filenames = listdir(pcoa_dir) pcoa_filenames = [fname for fname in pcoa_filenames if (not fname.startswith('.'))] master_pcoa = open(join(pcoa_dir, pcoa_filenames[0]), 'U') master_pcoa = parse_coords(master_pcoa) for fname in pcoa_filenames: try: f = open(join(pcoa_dir, fname), 'U') pcoa_res = parse_coords(f) support_pcoas.append(pcoa_res) f.close() except IOError as err: stderr.write((('error loading support pcoa ' + fname) + '\n')) exit(1) return (master_pcoa, support_pcoas)
[ "def", "load_pcoa_files", "(", "pcoa_dir", ")", ":", "support_pcoas", "=", "[", "]", "pcoa_filenames", "=", "listdir", "(", "pcoa_dir", ")", "pcoa_filenames", "=", "[", "fname", "for", "fname", "in", "pcoa_filenames", "if", "(", "not", "fname", ".", "startswith", "(", "'.'", ")", ")", "]", "master_pcoa", "=", "open", "(", "join", "(", "pcoa_dir", ",", "pcoa_filenames", "[", "0", "]", ")", ",", "'U'", ")", "master_pcoa", "=", "parse_coords", "(", "master_pcoa", ")", "for", "fname", "in", "pcoa_filenames", ":", "try", ":", "f", "=", "open", "(", "join", "(", "pcoa_dir", ",", "fname", ")", ",", "'U'", ")", "pcoa_res", "=", "parse_coords", "(", "f", ")", "support_pcoas", ".", "append", "(", "pcoa_res", ")", "f", ".", "close", "(", ")", "except", "IOError", "as", "err", ":", "stderr", ".", "write", "(", "(", "(", "'error loading support pcoa '", "+", "fname", ")", "+", "'\\n'", ")", ")", "exit", "(", "1", ")", "return", "(", "master_pcoa", ",", "support_pcoas", ")" ]
loads pcoa files from filepaths .
train
false
42,284
def connection_from_url(url, **kw): (scheme, host, port) = get_host(url) if (scheme == 'https'): return HTTPSConnectionPool(host, port=port, **kw) else: return HTTPConnectionPool(host, port=port, **kw)
[ "def", "connection_from_url", "(", "url", ",", "**", "kw", ")", ":", "(", "scheme", ",", "host", ",", "port", ")", "=", "get_host", "(", "url", ")", "if", "(", "scheme", "==", "'https'", ")", ":", "return", "HTTPSConnectionPool", "(", "host", ",", "port", "=", "port", ",", "**", "kw", ")", "else", ":", "return", "HTTPConnectionPool", "(", "host", ",", "port", "=", "port", ",", "**", "kw", ")" ]
given a url .
train
true
42,285
def _untranslate_snapshot_summary_view(context, snapshot): d = {} d['id'] = snapshot.id d['status'] = snapshot.status d['progress'] = snapshot.progress d['size'] = snapshot.size d['created_at'] = snapshot.created_at d['display_name'] = snapshot.display_name d['display_description'] = snapshot.display_description d['volume_id'] = snapshot.volume_id d['project_id'] = snapshot.project_id d['volume_size'] = snapshot.size return d
[ "def", "_untranslate_snapshot_summary_view", "(", "context", ",", "snapshot", ")", ":", "d", "=", "{", "}", "d", "[", "'id'", "]", "=", "snapshot", ".", "id", "d", "[", "'status'", "]", "=", "snapshot", ".", "status", "d", "[", "'progress'", "]", "=", "snapshot", ".", "progress", "d", "[", "'size'", "]", "=", "snapshot", ".", "size", "d", "[", "'created_at'", "]", "=", "snapshot", ".", "created_at", "d", "[", "'display_name'", "]", "=", "snapshot", ".", "display_name", "d", "[", "'display_description'", "]", "=", "snapshot", ".", "display_description", "d", "[", "'volume_id'", "]", "=", "snapshot", ".", "volume_id", "d", "[", "'project_id'", "]", "=", "snapshot", ".", "project_id", "d", "[", "'volume_size'", "]", "=", "snapshot", ".", "size", "return", "d" ]
maps keys for snapshots summary view .
train
false
42,287
def is_enabled_and_bind_by_default(): global _IS_IPV6_ENABLED if (_IS_IPV6_ENABLED is None): disabled_ipv6_path = '/proc/sys/net/ipv6/conf/default/disable_ipv6' if os.path.exists(disabled_ipv6_path): with open(disabled_ipv6_path, 'r') as f: disabled = f.read().strip() _IS_IPV6_ENABLED = (disabled == '0') else: _IS_IPV6_ENABLED = False if (not _IS_IPV6_ENABLED): LOG.info(_LI('IPv6 not present or configured not to bind to new interfaces on this system. Please ensure IPv6 is enabled and /proc/sys/net/ipv6/conf/default/disable_ipv6 is set to 0 to enable IPv6.')) return _IS_IPV6_ENABLED
[ "def", "is_enabled_and_bind_by_default", "(", ")", ":", "global", "_IS_IPV6_ENABLED", "if", "(", "_IS_IPV6_ENABLED", "is", "None", ")", ":", "disabled_ipv6_path", "=", "'/proc/sys/net/ipv6/conf/default/disable_ipv6'", "if", "os", ".", "path", ".", "exists", "(", "disabled_ipv6_path", ")", ":", "with", "open", "(", "disabled_ipv6_path", ",", "'r'", ")", "as", "f", ":", "disabled", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "_IS_IPV6_ENABLED", "=", "(", "disabled", "==", "'0'", ")", "else", ":", "_IS_IPV6_ENABLED", "=", "False", "if", "(", "not", "_IS_IPV6_ENABLED", ")", ":", "LOG", ".", "info", "(", "_LI", "(", "'IPv6 not present or configured not to bind to new interfaces on this system. Please ensure IPv6 is enabled and /proc/sys/net/ipv6/conf/default/disable_ipv6 is set to 0 to enable IPv6.'", ")", ")", "return", "_IS_IPV6_ENABLED" ]
check if host has the ipv6 support and is configured to bind ipv6 address to new interfaces by default .
train
false
42,289
def encipher_affine(msg, key, symbols=None, _inverse=False): (msg, _, A) = _prep(msg, '', symbols) N = len(A) (a, b) = key assert (gcd(a, N) == 1) if _inverse: c = mod_inverse(a, N) d = ((- b) * c) (a, b) = (c, d) B = ''.join([A[(((a * i) + b) % N)] for i in range(N)]) return translate(msg, A, B)
[ "def", "encipher_affine", "(", "msg", ",", "key", ",", "symbols", "=", "None", ",", "_inverse", "=", "False", ")", ":", "(", "msg", ",", "_", ",", "A", ")", "=", "_prep", "(", "msg", ",", "''", ",", "symbols", ")", "N", "=", "len", "(", "A", ")", "(", "a", ",", "b", ")", "=", "key", "assert", "(", "gcd", "(", "a", ",", "N", ")", "==", "1", ")", "if", "_inverse", ":", "c", "=", "mod_inverse", "(", "a", ",", "N", ")", "d", "=", "(", "(", "-", "b", ")", "*", "c", ")", "(", "a", ",", "b", ")", "=", "(", "c", ",", "d", ")", "B", "=", "''", ".", "join", "(", "[", "A", "[", "(", "(", "(", "a", "*", "i", ")", "+", "b", ")", "%", "N", ")", "]", "for", "i", "in", "range", "(", "N", ")", "]", ")", "return", "translate", "(", "msg", ",", "A", ",", "B", ")" ]
performs the affine cipher encryption on plaintext msg .
train
false
42,290
def run_as_root(command, *args, **kwargs): if (env.user == 'root'): func = run else: func = sudo return func(command, *args, **kwargs)
[ "def", "run_as_root", "(", "command", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "env", ".", "user", "==", "'root'", ")", ":", "func", "=", "run", "else", ":", "func", "=", "sudo", "return", "func", "(", "command", ",", "*", "args", ",", "**", "kwargs", ")" ]
run a remote command as the root user .
train
false
42,291
def safe_ref(target, on_delete=None): if (getattr(target, 'im_self', None) is not None): assert hasattr(target, 'im_func'), ('safe_ref target %r has im_self, but no im_func, " "don\'t know how to create reference' % (target,)) return get_bound_method_weakref(target=target, on_delete=on_delete) if callable(on_delete): return weakref.ref(target, on_delete) else: return weakref.ref(target)
[ "def", "safe_ref", "(", "target", ",", "on_delete", "=", "None", ")", ":", "if", "(", "getattr", "(", "target", ",", "'im_self'", ",", "None", ")", "is", "not", "None", ")", ":", "assert", "hasattr", "(", "target", ",", "'im_func'", ")", ",", "(", "'safe_ref target %r has im_self, but no im_func, \" \"don\\'t know how to create reference'", "%", "(", "target", ",", ")", ")", "return", "get_bound_method_weakref", "(", "target", "=", "target", ",", "on_delete", "=", "on_delete", ")", "if", "callable", "(", "on_delete", ")", ":", "return", "weakref", ".", "ref", "(", "target", ",", "on_delete", ")", "else", ":", "return", "weakref", ".", "ref", "(", "target", ")" ]
return a *safe* weak reference to a callable target .
train
false
42,292
def breaks_cusumolsresid(olsresidual, ddof=0): resid = olsresidual.ravel() nobs = len(resid) nobssigma2 = (resid ** 2).sum() if (ddof > 0): nobssigma2 = ((nobssigma2 / (nobs - ddof)) * nobs) B = (resid.cumsum() / np.sqrt(nobssigma2)) sup_b = np.abs(B).max() crit = [(1, 1.63), (5, 1.36), (10, 1.22)] pval = stats.kstwobign.sf(sup_b) return (sup_b, pval, crit)
[ "def", "breaks_cusumolsresid", "(", "olsresidual", ",", "ddof", "=", "0", ")", ":", "resid", "=", "olsresidual", ".", "ravel", "(", ")", "nobs", "=", "len", "(", "resid", ")", "nobssigma2", "=", "(", "resid", "**", "2", ")", ".", "sum", "(", ")", "if", "(", "ddof", ">", "0", ")", ":", "nobssigma2", "=", "(", "(", "nobssigma2", "/", "(", "nobs", "-", "ddof", ")", ")", "*", "nobs", ")", "B", "=", "(", "resid", ".", "cumsum", "(", ")", "/", "np", ".", "sqrt", "(", "nobssigma2", ")", ")", "sup_b", "=", "np", ".", "abs", "(", "B", ")", ".", "max", "(", ")", "crit", "=", "[", "(", "1", ",", "1.63", ")", ",", "(", "5", ",", "1.36", ")", ",", "(", "10", ",", "1.22", ")", "]", "pval", "=", "stats", ".", "kstwobign", ".", "sf", "(", "sup_b", ")", "return", "(", "sup_b", ",", "pval", ",", "crit", ")" ]
cusum test for parameter stability based on ols residuals parameters olsresiduals : ndarray array of residuals from an ols estimation ddof : int number of parameters in the ols estimation .
train
false
42,293
def sendStayAwake(): return False
[ "def", "sendStayAwake", "(", ")", ":", "return", "False" ]
sends a signal to your system to indicate that the computer is in use and should not sleep .
train
false
42,294
def test_hive_site_null_hs2krb(): tmpdir = tempfile.mkdtemp() saved = None try: class Getter(object, ): def get(self): return tmpdir xml = hive_site_xml(is_local=True, use_sasl=False, hs2_kerberos_principal=None) file(os.path.join(tmpdir, 'hive-site.xml'), 'w').write(xml) beeswax.hive_site.reset() saved = beeswax.conf.HIVE_CONF_DIR beeswax.conf.HIVE_CONF_DIR = Getter() assert_equal(beeswax.hive_site.get_conf()['hive.metastore.warehouse.dir'], u'/abc') assert_equal(beeswax.hive_site.get_hiveserver2_kerberos_principal('localhost'), None) assert_equal(beeswax.hive_site.get_hiveserver2_authentication(), 'NOSASL') finally: beeswax.hive_site.reset() if (saved is not None): beeswax.conf.HIVE_CONF_DIR = saved shutil.rmtree(tmpdir)
[ "def", "test_hive_site_null_hs2krb", "(", ")", ":", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "saved", "=", "None", "try", ":", "class", "Getter", "(", "object", ",", ")", ":", "def", "get", "(", "self", ")", ":", "return", "tmpdir", "xml", "=", "hive_site_xml", "(", "is_local", "=", "True", ",", "use_sasl", "=", "False", ",", "hs2_kerberos_principal", "=", "None", ")", "file", "(", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'hive-site.xml'", ")", ",", "'w'", ")", ".", "write", "(", "xml", ")", "beeswax", ".", "hive_site", ".", "reset", "(", ")", "saved", "=", "beeswax", ".", "conf", ".", "HIVE_CONF_DIR", "beeswax", ".", "conf", ".", "HIVE_CONF_DIR", "=", "Getter", "(", ")", "assert_equal", "(", "beeswax", ".", "hive_site", ".", "get_conf", "(", ")", "[", "'hive.metastore.warehouse.dir'", "]", ",", "u'/abc'", ")", "assert_equal", "(", "beeswax", ".", "hive_site", ".", "get_hiveserver2_kerberos_principal", "(", "'localhost'", ")", ",", "None", ")", "assert_equal", "(", "beeswax", ".", "hive_site", ".", "get_hiveserver2_authentication", "(", ")", ",", "'NOSASL'", ")", "finally", ":", "beeswax", ".", "hive_site", ".", "reset", "(", ")", "if", "(", "saved", "is", "not", "None", ")", ":", "beeswax", ".", "conf", ".", "HIVE_CONF_DIR", "=", "saved", "shutil", ".", "rmtree", "(", "tmpdir", ")" ]
test hive-site parsing with null hs2 kerberos principal .
train
false
42,295
def rooted_glob(root, glob): return remove_root(root, sorted(glob2.glob('{root}/{glob}'.format(root=root, glob=glob))))
[ "def", "rooted_glob", "(", "root", ",", "glob", ")", ":", "return", "remove_root", "(", "root", ",", "sorted", "(", "glob2", ".", "glob", "(", "'{root}/{glob}'", ".", "format", "(", "root", "=", "root", ",", "glob", "=", "glob", ")", ")", ")", ")" ]
returns the results of running glob rooted in the directory root .
train
false
42,297
def _lg_directed(G, create_using=None): if (create_using is None): L = G.__class__() else: L = create_using get_edges = _edge_func(G) for from_node in get_edges(): L.add_node(from_node) for to_node in get_edges(from_node[1]): L.add_edge(from_node, to_node) return L
[ "def", "_lg_directed", "(", "G", ",", "create_using", "=", "None", ")", ":", "if", "(", "create_using", "is", "None", ")", ":", "L", "=", "G", ".", "__class__", "(", ")", "else", ":", "L", "=", "create_using", "get_edges", "=", "_edge_func", "(", "G", ")", "for", "from_node", "in", "get_edges", "(", ")", ":", "L", ".", "add_node", "(", "from_node", ")", "for", "to_node", "in", "get_edges", "(", "from_node", "[", "1", "]", ")", ":", "L", ".", "add_edge", "(", "from_node", ",", "to_node", ")", "return", "L" ]
return the line graph l of the digraph g .
train
false
42,298
def diagnose_configurator_problem(cfg_type, requested, plugins): if requested: if (requested not in plugins): msg = 'The requested {0} plugin does not appear to be installed'.format(requested) else: msg = 'The {0} plugin is not working; there may be problems with your existing configuration.\nThe error was: {1!r}'.format(requested, plugins[requested].problem) elif (cfg_type == 'installer'): from certbot.cli import cli_command msg = 'Certbot doesn\'t know how to automatically configure the web server on this system. However, it can still get a certificate for you. Please run "{0} certonly" to do so. You\'ll need to manually configure your web server to use the resulting certificate.'.format(cli_command) else: msg = '{0} could not be determined or is not installed'.format(cfg_type) raise errors.PluginSelectionError(msg)
[ "def", "diagnose_configurator_problem", "(", "cfg_type", ",", "requested", ",", "plugins", ")", ":", "if", "requested", ":", "if", "(", "requested", "not", "in", "plugins", ")", ":", "msg", "=", "'The requested {0} plugin does not appear to be installed'", ".", "format", "(", "requested", ")", "else", ":", "msg", "=", "'The {0} plugin is not working; there may be problems with your existing configuration.\\nThe error was: {1!r}'", ".", "format", "(", "requested", ",", "plugins", "[", "requested", "]", ".", "problem", ")", "elif", "(", "cfg_type", "==", "'installer'", ")", ":", "from", "certbot", ".", "cli", "import", "cli_command", "msg", "=", "'Certbot doesn\\'t know how to automatically configure the web server on this system. However, it can still get a certificate for you. Please run \"{0} certonly\" to do so. You\\'ll need to manually configure your web server to use the resulting certificate.'", ".", "format", "(", "cli_command", ")", "else", ":", "msg", "=", "'{0} could not be determined or is not installed'", ".", "format", "(", "cfg_type", ")", "raise", "errors", ".", "PluginSelectionError", "(", "msg", ")" ]
raise the most helpful error message about a plugin being unavailable .
train
false
42,300
def getRotationMatrixByPolar(arrayDictionary, polar, polarLength): polar /= polarLength arrayDictionary['_arrayRotation'] = math.degrees(math.atan2(polar.imag, polar.real)) return matrix.Matrix(matrix.getDiagonalSwitchedTetragridByPolar([0, 1], polar))
[ "def", "getRotationMatrixByPolar", "(", "arrayDictionary", ",", "polar", ",", "polarLength", ")", ":", "polar", "/=", "polarLength", "arrayDictionary", "[", "'_arrayRotation'", "]", "=", "math", ".", "degrees", "(", "math", ".", "atan2", "(", "polar", ".", "imag", ",", "polar", ".", "real", ")", ")", "return", "matrix", ".", "Matrix", "(", "matrix", ".", "getDiagonalSwitchedTetragridByPolar", "(", "[", "0", ",", "1", "]", ",", "polar", ")", ")" ]
get rotationmatrix by polar and polarlength .
train
false
42,301
def set_defaults(): global dataset_conf_path, dataset_data_path, root_conf_path, root_data_path, user_conf_path, super_powers root_conf_path = '/etc/pylearn/' root_data_path = '/usr/share/pylearn/dataset/' user_conf_path = os.path.join(os.environ['HOME'], '.local/share/pylearn/') user_data_path = os.path.join(os.environ['HOME'], '.local/share/pylearn/dataset/') if has_super_powers(): dataset_conf_path = root_conf_path dataset_data_path = root_data_path super_powers = True else: dataset_conf_path = user_conf_path dataset_data_path = user_data_path super_powers = False if (not os.path.exists(dataset_conf_path)): os.makedirs(dataset_conf_path) if (not os.path.exists(os.path.join(dataset_conf_path, dataset_sources))): atomic_update(os.path.join(dataset_web, dataset_sources), os.path.join(dataset_conf_path, dataset_sources), progress_bar) if (not os.path.exists(dataset_data_path)): os.makedirs(dataset_data_path) read_packages_sources() read_installed_packages_list()
[ "def", "set_defaults", "(", ")", ":", "global", "dataset_conf_path", ",", "dataset_data_path", ",", "root_conf_path", ",", "root_data_path", ",", "user_conf_path", ",", "super_powers", "root_conf_path", "=", "'/etc/pylearn/'", "root_data_path", "=", "'/usr/share/pylearn/dataset/'", "user_conf_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "environ", "[", "'HOME'", "]", ",", "'.local/share/pylearn/'", ")", "user_data_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "environ", "[", "'HOME'", "]", ",", "'.local/share/pylearn/dataset/'", ")", "if", "has_super_powers", "(", ")", ":", "dataset_conf_path", "=", "root_conf_path", "dataset_data_path", "=", "root_data_path", "super_powers", "=", "True", "else", ":", "dataset_conf_path", "=", "user_conf_path", "dataset_data_path", "=", "user_data_path", "super_powers", "=", "False", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "dataset_conf_path", ")", ")", ":", "os", ".", "makedirs", "(", "dataset_conf_path", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "dataset_conf_path", ",", "dataset_sources", ")", ")", ")", ":", "atomic_update", "(", "os", ".", "path", ".", "join", "(", "dataset_web", ",", "dataset_sources", ")", ",", "os", ".", "path", ".", "join", "(", "dataset_conf_path", ",", "dataset_sources", ")", ",", "progress_bar", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "dataset_data_path", ")", ")", ":", "os", ".", "makedirs", "(", "dataset_data_path", ")", "read_packages_sources", "(", ")", "read_installed_packages_list", "(", ")" ]
update options with default values .
train
false
42,302
def _check_size(size): if (not isinstance(size, (list, tuple))): raise ValueError('Size must be a tuple') if (len(size) != 2): raise ValueError('Size must be a tuple of length 2') if ((size[0] < 0) or (size[1] < 0)): raise ValueError('Width and Height must be => 0') return True
[ "def", "_check_size", "(", "size", ")", ":", "if", "(", "not", "isinstance", "(", "size", ",", "(", "list", ",", "tuple", ")", ")", ")", ":", "raise", "ValueError", "(", "'Size must be a tuple'", ")", "if", "(", "len", "(", "size", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'Size must be a tuple of length 2'", ")", "if", "(", "(", "size", "[", "0", "]", "<", "0", ")", "or", "(", "size", "[", "1", "]", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "'Width and Height must be => 0'", ")", "return", "True" ]
common check to enforce type and sanity check on size tuples .
train
false
42,303
def pid_invoked_with_cmdline(pid, expected_cmd): cmd = get_cmdline_from_pid(pid) return cmd_matches_expected(cmd, expected_cmd)
[ "def", "pid_invoked_with_cmdline", "(", "pid", ",", "expected_cmd", ")", ":", "cmd", "=", "get_cmdline_from_pid", "(", "pid", ")", "return", "cmd_matches_expected", "(", "cmd", ",", "expected_cmd", ")" ]
validate process with given pid is running with provided parameters .
train
false
42,304
def cc_binary(name, srcs=[], deps=[], warning='yes', defs=[], incs=[], embed_version=True, optimize=[], dynamic_link=False, extra_cppflags=[], extra_linkflags=[], export_dynamic=False, **kwargs): cc_binary_target = CcBinary(name, srcs, deps, warning, defs, incs, embed_version, optimize, dynamic_link, extra_cppflags, extra_linkflags, export_dynamic, blade.blade, kwargs) blade.blade.register_target(cc_binary_target)
[ "def", "cc_binary", "(", "name", ",", "srcs", "=", "[", "]", ",", "deps", "=", "[", "]", ",", "warning", "=", "'yes'", ",", "defs", "=", "[", "]", ",", "incs", "=", "[", "]", ",", "embed_version", "=", "True", ",", "optimize", "=", "[", "]", ",", "dynamic_link", "=", "False", ",", "extra_cppflags", "=", "[", "]", ",", "extra_linkflags", "=", "[", "]", ",", "export_dynamic", "=", "False", ",", "**", "kwargs", ")", ":", "cc_binary_target", "=", "CcBinary", "(", "name", ",", "srcs", ",", "deps", ",", "warning", ",", "defs", ",", "incs", ",", "embed_version", ",", "optimize", ",", "dynamic_link", ",", "extra_cppflags", ",", "extra_linkflags", ",", "export_dynamic", ",", "blade", ".", "blade", ",", "kwargs", ")", "blade", ".", "blade", ".", "register_target", "(", "cc_binary_target", ")" ]
cc_binary target .
train
false
42,307
def test_simple_two_model_class_compose_1d(): S1 = (Shift | Scale) assert issubclass(S1, Model) assert (S1.n_inputs == 1) assert (S1.n_outputs == 1) s1 = S1(2, 3) assert (s1(1) == 9.0) S2 = (Scale | Shift) assert issubclass(S2, Model) assert (S2.n_inputs == 1) assert (S2.n_outputs == 1) s2 = S2(2, 3) assert (s2(1) == 5.0) assert_array_equal(s2([1, 2, 3]), [5.0, 7.0, 9.0])
[ "def", "test_simple_two_model_class_compose_1d", "(", ")", ":", "S1", "=", "(", "Shift", "|", "Scale", ")", "assert", "issubclass", "(", "S1", ",", "Model", ")", "assert", "(", "S1", ".", "n_inputs", "==", "1", ")", "assert", "(", "S1", ".", "n_outputs", "==", "1", ")", "s1", "=", "S1", "(", "2", ",", "3", ")", "assert", "(", "s1", "(", "1", ")", "==", "9.0", ")", "S2", "=", "(", "Scale", "|", "Shift", ")", "assert", "issubclass", "(", "S2", ",", "Model", ")", "assert", "(", "S2", ".", "n_inputs", "==", "1", ")", "assert", "(", "S2", ".", "n_outputs", "==", "1", ")", "s2", "=", "S2", "(", "2", ",", "3", ")", "assert", "(", "s2", "(", "1", ")", "==", "5.0", ")", "assert_array_equal", "(", "s2", "(", "[", "1", ",", "2", ",", "3", "]", ")", ",", "[", "5.0", ",", "7.0", ",", "9.0", "]", ")" ]
shift and scale are two of the simplest models to test model composition with .
train
false
42,309
def arma_generate_sample(ar, ma, nsample, sigma=1, distrvs=np.random.randn, burnin=0): eta = (sigma * distrvs((nsample + burnin))) return signal.lfilter(ma, ar, eta)[burnin:]
[ "def", "arma_generate_sample", "(", "ar", ",", "ma", ",", "nsample", ",", "sigma", "=", "1", ",", "distrvs", "=", "np", ".", "random", ".", "randn", ",", "burnin", "=", "0", ")", ":", "eta", "=", "(", "sigma", "*", "distrvs", "(", "(", "nsample", "+", "burnin", ")", ")", ")", "return", "signal", ".", "lfilter", "(", "ma", ",", "ar", ",", "eta", ")", "[", "burnin", ":", "]" ]
generate a random sample of an arma process parameters ar : array_like .
train
false
42,310
def _get_logical(source_lines, result, logical_start, logical_end): row = (result[u'line'] - 1) col = (result[u'column'] - 1) ls = None le = None for i in range(0, len(logical_start), 1): assert logical_end x = logical_end[i] if ((x[0] > row) or ((x[0] == row) and (x[1] > col))): le = x ls = logical_start[i] break if (ls is None): return None original = source_lines[ls[0]:(le[0] + 1)] return (ls, le, original)
[ "def", "_get_logical", "(", "source_lines", ",", "result", ",", "logical_start", ",", "logical_end", ")", ":", "row", "=", "(", "result", "[", "u'line'", "]", "-", "1", ")", "col", "=", "(", "result", "[", "u'column'", "]", "-", "1", ")", "ls", "=", "None", "le", "=", "None", "for", "i", "in", "range", "(", "0", ",", "len", "(", "logical_start", ")", ",", "1", ")", ":", "assert", "logical_end", "x", "=", "logical_end", "[", "i", "]", "if", "(", "(", "x", "[", "0", "]", ">", "row", ")", "or", "(", "(", "x", "[", "0", "]", "==", "row", ")", "and", "(", "x", "[", "1", "]", ">", "col", ")", ")", ")", ":", "le", "=", "x", "ls", "=", "logical_start", "[", "i", "]", "break", "if", "(", "ls", "is", "None", ")", ":", "return", "None", "original", "=", "source_lines", "[", "ls", "[", "0", "]", ":", "(", "le", "[", "0", "]", "+", "1", ")", "]", "return", "(", "ls", ",", "le", ",", "original", ")" ]
return the logical line corresponding to the result .
train
true
42,312
def update_available(name): return (name in _get_available())
[ "def", "update_available", "(", "name", ")", ":", "return", "(", "name", "in", "_get_available", "(", ")", ")" ]
check whether or not an update is available with a given name .
train
false
42,317
def _lazyselect(condlist, choicelist, arrays, default=0): arrays = np.broadcast_arrays(*arrays) tcode = np.mintypecode([a.dtype.char for a in arrays]) out = _valarray(np.shape(arrays[0]), value=default, typecode=tcode) for index in range(len(condlist)): (func, cond) = (choicelist[index], condlist[index]) if np.all((cond is False)): continue (cond, _) = np.broadcast_arrays(cond, arrays[0]) temp = tuple((np.extract(cond, arr) for arr in arrays)) np.place(out, cond, func(*temp)) return out
[ "def", "_lazyselect", "(", "condlist", ",", "choicelist", ",", "arrays", ",", "default", "=", "0", ")", ":", "arrays", "=", "np", ".", "broadcast_arrays", "(", "*", "arrays", ")", "tcode", "=", "np", ".", "mintypecode", "(", "[", "a", ".", "dtype", ".", "char", "for", "a", "in", "arrays", "]", ")", "out", "=", "_valarray", "(", "np", ".", "shape", "(", "arrays", "[", "0", "]", ")", ",", "value", "=", "default", ",", "typecode", "=", "tcode", ")", "for", "index", "in", "range", "(", "len", "(", "condlist", ")", ")", ":", "(", "func", ",", "cond", ")", "=", "(", "choicelist", "[", "index", "]", ",", "condlist", "[", "index", "]", ")", "if", "np", ".", "all", "(", "(", "cond", "is", "False", ")", ")", ":", "continue", "(", "cond", ",", "_", ")", "=", "np", ".", "broadcast_arrays", "(", "cond", ",", "arrays", "[", "0", "]", ")", "temp", "=", "tuple", "(", "(", "np", ".", "extract", "(", "cond", ",", "arr", ")", "for", "arr", "in", "arrays", ")", ")", "np", ".", "place", "(", "out", ",", "cond", ",", "func", "(", "*", "temp", ")", ")", "return", "out" ]
mimic np .
train
false
42,318
def knapsack_rep(weights, values, W): k = ([0] * (W + 1)) for w in range(1, (W + 1)): k[w] = max([((k[(w - i)] + values[i]) if (weights[i] <= w) else 0) for i in range(len(weights))]) return k[(-1)]
[ "def", "knapsack_rep", "(", "weights", ",", "values", ",", "W", ")", ":", "k", "=", "(", "[", "0", "]", "*", "(", "W", "+", "1", ")", ")", "for", "w", "in", "range", "(", "1", ",", "(", "W", "+", "1", ")", ")", ":", "k", "[", "w", "]", "=", "max", "(", "[", "(", "(", "k", "[", "(", "w", "-", "i", ")", "]", "+", "values", "[", "i", "]", ")", "if", "(", "weights", "[", "i", "]", "<=", "w", ")", "else", "0", ")", "for", "i", "in", "range", "(", "len", "(", "weights", ")", ")", "]", ")", "return", "k", "[", "(", "-", "1", ")", "]" ]
knapsack with repetition .
train
false
42,320
def test_render_data_uri(Chart): chart = Chart(fill=True) chart.add(u('\xc3\xa9\xc3\xa9\xc3\xa9'), [1, 2, 3]) chart.add(u('\xc3\xa8\xc3\xa8\xc3\xa8'), [10, 21, 5]) assert chart.render_data_uri().startswith('data:image/svg+xml;charset=utf-8;base64,')
[ "def", "test_render_data_uri", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", "fill", "=", "True", ")", "chart", ".", "add", "(", "u", "(", "'\\xc3\\xa9\\xc3\\xa9\\xc3\\xa9'", ")", ",", "[", "1", ",", "2", ",", "3", "]", ")", "chart", ".", "add", "(", "u", "(", "'\\xc3\\xa8\\xc3\\xa8\\xc3\\xa8'", ")", ",", "[", "10", ",", "21", ",", "5", "]", ")", "assert", "chart", ".", "render_data_uri", "(", ")", ".", "startswith", "(", "'data:image/svg+xml;charset=utf-8;base64,'", ")" ]
test the render data uri .
train
false
42,322
def _filer_file_from_upload(model, request, path, upload_data, sha1=None): if sha1: upload = model.objects.filter(sha1=sha1).first() if upload: return upload file_form_cls = modelform_factory(model=model, fields=('original_filename', 'owner', 'file')) upload_form = file_form_cls(data={'original_filename': upload_data.name, 'owner': (request.user.pk if (request and (not request.user.is_anonymous())) else None)}, files={'file': upload_data}) upload = upload_form.save(commit=False) upload.is_public = True if isinstance(path, Folder): upload.folder = path else: upload.folder = filer_folder_from_path(path) upload.save() return upload
[ "def", "_filer_file_from_upload", "(", "model", ",", "request", ",", "path", ",", "upload_data", ",", "sha1", "=", "None", ")", ":", "if", "sha1", ":", "upload", "=", "model", ".", "objects", ".", "filter", "(", "sha1", "=", "sha1", ")", ".", "first", "(", ")", "if", "upload", ":", "return", "upload", "file_form_cls", "=", "modelform_factory", "(", "model", "=", "model", ",", "fields", "=", "(", "'original_filename'", ",", "'owner'", ",", "'file'", ")", ")", "upload_form", "=", "file_form_cls", "(", "data", "=", "{", "'original_filename'", ":", "upload_data", ".", "name", ",", "'owner'", ":", "(", "request", ".", "user", ".", "pk", "if", "(", "request", "and", "(", "not", "request", ".", "user", ".", "is_anonymous", "(", ")", ")", ")", "else", "None", ")", "}", ",", "files", "=", "{", "'file'", ":", "upload_data", "}", ")", "upload", "=", "upload_form", ".", "save", "(", "commit", "=", "False", ")", "upload", ".", "is_public", "=", "True", "if", "isinstance", "(", "path", ",", "Folder", ")", ":", "upload", ".", "folder", "=", "path", "else", ":", "upload", ".", "folder", "=", "filer_folder_from_path", "(", "path", ")", "upload", ".", "save", "(", ")", "return", "upload" ]
create some sort of filer file from the given upload data .
train
false
42,323
def to_hexstr(str_): return ''.join([('\\x%02x' % ord(i)) for i in bytes_iterator(str_)])
[ "def", "to_hexstr", "(", "str_", ")", ":", "return", "''", ".", "join", "(", "[", "(", "'\\\\x%02x'", "%", "ord", "(", "i", ")", ")", "for", "i", "in", "bytes_iterator", "(", "str_", ")", "]", ")" ]
convert a binary string to hex escape format .
train
false
42,324
def make_histogram(points, bucket_size): return Counter((bucketize(point, bucket_size) for point in points))
[ "def", "make_histogram", "(", "points", ",", "bucket_size", ")", ":", "return", "Counter", "(", "(", "bucketize", "(", "point", ",", "bucket_size", ")", "for", "point", "in", "points", ")", ")" ]
buckets the points and counts how many in each bucket .
train
false
42,325
def even_ext(x, n, axis=(-1)): if (n < 1): return x if (n > (x.shape[axis] - 1)): raise ValueError((('The extension length n (%d) is too big. ' + 'It must not exceed x.shape[axis]-1, which is %d.') % (n, (x.shape[axis] - 1)))) left_ext = axis_slice(x, start=n, stop=0, step=(-1), axis=axis) right_ext = axis_slice(x, start=(-2), stop=(- (n + 2)), step=(-1), axis=axis) ext = np.concatenate((left_ext, x, right_ext), axis=axis) return ext
[ "def", "even_ext", "(", "x", ",", "n", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "if", "(", "n", "<", "1", ")", ":", "return", "x", "if", "(", "n", ">", "(", "x", ".", "shape", "[", "axis", "]", "-", "1", ")", ")", ":", "raise", "ValueError", "(", "(", "(", "'The extension length n (%d) is too big. '", "+", "'It must not exceed x.shape[axis]-1, which is %d.'", ")", "%", "(", "n", ",", "(", "x", ".", "shape", "[", "axis", "]", "-", "1", ")", ")", ")", ")", "left_ext", "=", "axis_slice", "(", "x", ",", "start", "=", "n", ",", "stop", "=", "0", ",", "step", "=", "(", "-", "1", ")", ",", "axis", "=", "axis", ")", "right_ext", "=", "axis_slice", "(", "x", ",", "start", "=", "(", "-", "2", ")", ",", "stop", "=", "(", "-", "(", "n", "+", "2", ")", ")", ",", "step", "=", "(", "-", "1", ")", ",", "axis", "=", "axis", ")", "ext", "=", "np", ".", "concatenate", "(", "(", "left_ext", ",", "x", ",", "right_ext", ")", ",", "axis", "=", "axis", ")", "return", "ext" ]
create an ndarray that is an even extension of x along an axis .
train
false
42,327
def diff_pos(string1, string2): for (count, c) in enumerate(string1): if (len(string2) <= count): return count if (string2[count] != c): return count
[ "def", "diff_pos", "(", "string1", ",", "string2", ")", ":", "for", "(", "count", ",", "c", ")", "in", "enumerate", "(", "string1", ")", ":", "if", "(", "len", "(", "string2", ")", "<=", "count", ")", ":", "return", "count", "if", "(", "string2", "[", "count", "]", "!=", "c", ")", ":", "return", "count" ]
returns first position where string1 and string2 differ .
train
false
42,328
def parse_datetime(value): match = datetime_re.match(value) if match: kw = match.groupdict() if kw['microsecond']: kw['microsecond'] = kw['microsecond'].ljust(6, '0') tzinfo = kw.pop('tzinfo') if (tzinfo == 'Z'): tzinfo = utc elif (tzinfo is not None): offset_mins = (int(tzinfo[(-2):]) if (len(tzinfo) > 3) else 0) offset = ((60 * int(tzinfo[1:3])) + offset_mins) if (tzinfo[0] == '-'): offset = (- offset) tzinfo = get_fixed_timezone(offset) kw = {k: int(v) for (k, v) in kw.items() if (v is not None)} kw['tzinfo'] = tzinfo return datetime.datetime(**kw)
[ "def", "parse_datetime", "(", "value", ")", ":", "match", "=", "datetime_re", ".", "match", "(", "value", ")", "if", "match", ":", "kw", "=", "match", ".", "groupdict", "(", ")", "if", "kw", "[", "'microsecond'", "]", ":", "kw", "[", "'microsecond'", "]", "=", "kw", "[", "'microsecond'", "]", ".", "ljust", "(", "6", ",", "'0'", ")", "tzinfo", "=", "kw", ".", "pop", "(", "'tzinfo'", ")", "if", "(", "tzinfo", "==", "'Z'", ")", ":", "tzinfo", "=", "utc", "elif", "(", "tzinfo", "is", "not", "None", ")", ":", "offset_mins", "=", "(", "int", "(", "tzinfo", "[", "(", "-", "2", ")", ":", "]", ")", "if", "(", "len", "(", "tzinfo", ")", ">", "3", ")", "else", "0", ")", "offset", "=", "(", "(", "60", "*", "int", "(", "tzinfo", "[", "1", ":", "3", "]", ")", ")", "+", "offset_mins", ")", "if", "(", "tzinfo", "[", "0", "]", "==", "'-'", ")", ":", "offset", "=", "(", "-", "offset", ")", "tzinfo", "=", "get_fixed_timezone", "(", "offset", ")", "kw", "=", "{", "k", ":", "int", "(", "v", ")", "for", "(", "k", ",", "v", ")", "in", "kw", ".", "items", "(", ")", "if", "(", "v", "is", "not", "None", ")", "}", "kw", "[", "'tzinfo'", "]", "=", "tzinfo", "return", "datetime", ".", "datetime", "(", "**", "kw", ")" ]
parses a string and return a datetime .
train
true
42,329
def parse_streaming_media_json(json_object, room_from_url): streams = {} for group in json_object: for room in group['rooms']: match = _url_streaming_media_re.match(room['link']) if (not (match.group('room') == room_from_url)): continue for stream in room['streams']: if (stream['isTranslated'] == False): language = 'native' else: language = 'translated' hls_stream = stream['urls'].get('hls') if hls_stream: stream_url = hls_stream['url'] name = None if (language == 'native'): name = ('%sp' % stream['videoSize'][(-1)]) long_name = ('hls_%s_%sp' % ('native', stream['videoSize'][(-1)])) streams[name] = stream_url streams[long_name] = stream_url elif (language == 'translated'): long_name = ('hls_%s_%sp' % ('translated', stream['videoSize'][(-1)])) streams[long_name] = stream_url mp3_stream = stream['urls'].get('mp3') if mp3_stream: stream_url = mp3_stream['url'] name = ('audio_%s_mpeg' % language) streams[name] = stream_url opus_stream = stream['urls'].get('opus') if opus_stream: stream_url = opus_stream['url'] name = ('audio_%s_opus' % language) streams[name] = stream_url return streams
[ "def", "parse_streaming_media_json", "(", "json_object", ",", "room_from_url", ")", ":", "streams", "=", "{", "}", "for", "group", "in", "json_object", ":", "for", "room", "in", "group", "[", "'rooms'", "]", ":", "match", "=", "_url_streaming_media_re", ".", "match", "(", "room", "[", "'link'", "]", ")", "if", "(", "not", "(", "match", ".", "group", "(", "'room'", ")", "==", "room_from_url", ")", ")", ":", "continue", "for", "stream", "in", "room", "[", "'streams'", "]", ":", "if", "(", "stream", "[", "'isTranslated'", "]", "==", "False", ")", ":", "language", "=", "'native'", "else", ":", "language", "=", "'translated'", "hls_stream", "=", "stream", "[", "'urls'", "]", ".", "get", "(", "'hls'", ")", "if", "hls_stream", ":", "stream_url", "=", "hls_stream", "[", "'url'", "]", "name", "=", "None", "if", "(", "language", "==", "'native'", ")", ":", "name", "=", "(", "'%sp'", "%", "stream", "[", "'videoSize'", "]", "[", "(", "-", "1", ")", "]", ")", "long_name", "=", "(", "'hls_%s_%sp'", "%", "(", "'native'", ",", "stream", "[", "'videoSize'", "]", "[", "(", "-", "1", ")", "]", ")", ")", "streams", "[", "name", "]", "=", "stream_url", "streams", "[", "long_name", "]", "=", "stream_url", "elif", "(", "language", "==", "'translated'", ")", ":", "long_name", "=", "(", "'hls_%s_%sp'", "%", "(", "'translated'", ",", "stream", "[", "'videoSize'", "]", "[", "(", "-", "1", ")", "]", ")", ")", "streams", "[", "long_name", "]", "=", "stream_url", "mp3_stream", "=", "stream", "[", "'urls'", "]", ".", "get", "(", "'mp3'", ")", "if", "mp3_stream", ":", "stream_url", "=", "mp3_stream", "[", "'url'", "]", "name", "=", "(", "'audio_%s_mpeg'", "%", "language", ")", "streams", "[", "name", "]", "=", "stream_url", "opus_stream", "=", "stream", "[", "'urls'", "]", ".", "get", "(", "'opus'", ")", "if", "opus_stream", ":", "stream_url", "=", "opus_stream", "[", "'url'", "]", "name", "=", "(", "'audio_%s_opus'", "%", "language", ")", "streams", "[", "name", "]", "=", "stream_url", "return", "streams" ]
filter all availabe live streams for given json and room name .
train
false
42,330
def _PromptForAdminCookie(user, pwd, otp_entry): if (user is None): user = raw_input('Please enter admin username: ') else: print ('Username: %s' % user) if (pwd is None): pwd = getpass.getpass('Please enter admin password: ') if (otp_entry is None): otp_entry = int(getpass.getpass('Please enter OTP code: ')) return (user, pwd, otp_entry)
[ "def", "_PromptForAdminCookie", "(", "user", ",", "pwd", ",", "otp_entry", ")", ":", "if", "(", "user", "is", "None", ")", ":", "user", "=", "raw_input", "(", "'Please enter admin username: '", ")", "else", ":", "print", "(", "'Username: %s'", "%", "user", ")", "if", "(", "pwd", "is", "None", ")", ":", "pwd", "=", "getpass", ".", "getpass", "(", "'Please enter admin password: '", ")", "if", "(", "otp_entry", "is", "None", ")", ":", "otp_entry", "=", "int", "(", "getpass", ".", "getpass", "(", "'Please enter OTP code: '", ")", ")", "return", "(", "user", ",", "pwd", ",", "otp_entry", ")" ]
prompts the user to enter admin username / password and otp code .
train
false
42,331
def is_valid_locale(locale_n): try: locale.setlocale(locale.LC_ALL, locale_n) return True except locale.Error: return False
[ "def", "is_valid_locale", "(", "locale_n", ")", ":", "try", ":", "locale", ".", "setlocale", "(", "locale", ".", "LC_ALL", ",", "locale_n", ")", "return", "True", "except", "locale", ".", "Error", ":", "return", "False" ]
check if locale is valid .
train
false
42,332
@_built_in_directive def api_version(default=None, api_version=None, **kwargs): return api_version
[ "@", "_built_in_directive", "def", "api_version", "(", "default", "=", "None", ",", "api_version", "=", "None", ",", "**", "kwargs", ")", ":", "return", "api_version" ]
returns the current api_version as a directive for use in both request and not request handling code .
train
false
42,333
def ajax_editable_boolean_cell(item, attr, text=u'', override=None): if text: text = (u'&nbsp;(%s)' % text) if (override is not None): a = [django_boolean_icon(override, text), text] else: value = getattr(item, attr) a = [(u'<input type="checkbox" data-inplace data-inplace-id="%s" data-inplace-attribute="%s" %s>' % (item.pk, attr, (u'checked="checked"' if value else u'')))] a.insert(0, (u'<div id="wrap_%s_%d">' % (attr, item.pk))) a.append(u'</div>') return mark_safe(u''.join(a))
[ "def", "ajax_editable_boolean_cell", "(", "item", ",", "attr", ",", "text", "=", "u''", ",", "override", "=", "None", ")", ":", "if", "text", ":", "text", "=", "(", "u'&nbsp;(%s)'", "%", "text", ")", "if", "(", "override", "is", "not", "None", ")", ":", "a", "=", "[", "django_boolean_icon", "(", "override", ",", "text", ")", ",", "text", "]", "else", ":", "value", "=", "getattr", "(", "item", ",", "attr", ")", "a", "=", "[", "(", "u'<input type=\"checkbox\" data-inplace data-inplace-id=\"%s\" data-inplace-attribute=\"%s\" %s>'", "%", "(", "item", ".", "pk", ",", "attr", ",", "(", "u'checked=\"checked\"'", "if", "value", "else", "u''", ")", ")", ")", "]", "a", ".", "insert", "(", "0", ",", "(", "u'<div id=\"wrap_%s_%d\">'", "%", "(", "attr", ",", "item", ".", "pk", ")", ")", ")", "a", ".", "append", "(", "u'</div>'", ")", "return", "mark_safe", "(", "u''", ".", "join", "(", "a", ")", ")" ]
generate a html snippet for showing a boolean value on the admin page .
train
false
42,334
def get_state(app): assert ('sqlalchemy' in app.extensions), 'The sqlalchemy extension was not registered to the current application. Please make sure to call init_app() first.' return app.extensions['sqlalchemy']
[ "def", "get_state", "(", "app", ")", ":", "assert", "(", "'sqlalchemy'", "in", "app", ".", "extensions", ")", ",", "'The sqlalchemy extension was not registered to the current application. Please make sure to call init_app() first.'", "return", "app", ".", "extensions", "[", "'sqlalchemy'", "]" ]
gets the state for the application .
train
false
42,335
def import_class(import_str): (mod_str, _sep, class_str) = import_str.rpartition('.') try: __import__(mod_str) return getattr(sys.modules[mod_str], class_str) except (ValueError, AttributeError): raise ImportError(('Class %s cannot be found (%s)' % (class_str, traceback.format_exception(*sys.exc_info()))))
[ "def", "import_class", "(", "import_str", ")", ":", "(", "mod_str", ",", "_sep", ",", "class_str", ")", "=", "import_str", ".", "rpartition", "(", "'.'", ")", "try", ":", "__import__", "(", "mod_str", ")", "return", "getattr", "(", "sys", ".", "modules", "[", "mod_str", "]", ",", "class_str", ")", "except", "(", "ValueError", ",", "AttributeError", ")", ":", "raise", "ImportError", "(", "(", "'Class %s cannot be found (%s)'", "%", "(", "class_str", ",", "traceback", ".", "format_exception", "(", "*", "sys", ".", "exc_info", "(", ")", ")", ")", ")", ")" ]
returns a class from a string including module and class .
train
true
42,336
def _scan_plugins(): pd = os.path.dirname(__file__) config_files = glob(os.path.join(pd, '_plugins', '*.ini')) for filename in config_files: (name, meta_data) = _parse_config_file(filename) plugin_meta_data[name] = meta_data provides = [s.strip() for s in meta_data['provides'].split(',')] valid_provides = [p for p in provides if (p in plugin_store)] for p in provides: if (not (p in plugin_store)): print ('Plugin `%s` wants to provide non-existent `%s`. Ignoring.' % (name, p)) need_to_add_collection = (('imread_collection' not in valid_provides) and ('imread' in valid_provides)) if need_to_add_collection: valid_provides.append('imread_collection') plugin_provides[name] = valid_provides plugin_module_name[name] = os.path.basename(filename)[:(-4)]
[ "def", "_scan_plugins", "(", ")", ":", "pd", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "config_files", "=", "glob", "(", "os", ".", "path", ".", "join", "(", "pd", ",", "'_plugins'", ",", "'*.ini'", ")", ")", "for", "filename", "in", "config_files", ":", "(", "name", ",", "meta_data", ")", "=", "_parse_config_file", "(", "filename", ")", "plugin_meta_data", "[", "name", "]", "=", "meta_data", "provides", "=", "[", "s", ".", "strip", "(", ")", "for", "s", "in", "meta_data", "[", "'provides'", "]", ".", "split", "(", "','", ")", "]", "valid_provides", "=", "[", "p", "for", "p", "in", "provides", "if", "(", "p", "in", "plugin_store", ")", "]", "for", "p", "in", "provides", ":", "if", "(", "not", "(", "p", "in", "plugin_store", ")", ")", ":", "print", "(", "'Plugin `%s` wants to provide non-existent `%s`. Ignoring.'", "%", "(", "name", ",", "p", ")", ")", "need_to_add_collection", "=", "(", "(", "'imread_collection'", "not", "in", "valid_provides", ")", "and", "(", "'imread'", "in", "valid_provides", ")", ")", "if", "need_to_add_collection", ":", "valid_provides", ".", "append", "(", "'imread_collection'", ")", "plugin_provides", "[", "name", "]", "=", "valid_provides", "plugin_module_name", "[", "name", "]", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "[", ":", "(", "-", "4", ")", "]" ]
scan the plugins directory for .
train
false
42,338
def test_param_endings(): sig = Script('def x(a, b=5, c=""): pass\n x(').call_signatures()[0] assert ([p.description for p in sig.params] == ['a', 'b=5', 'c=""'])
[ "def", "test_param_endings", "(", ")", ":", "sig", "=", "Script", "(", "'def x(a, b=5, c=\"\"): pass\\n x('", ")", ".", "call_signatures", "(", ")", "[", "0", "]", "assert", "(", "[", "p", ".", "description", "for", "p", "in", "sig", ".", "params", "]", "==", "[", "'a'", ",", "'b=5'", ",", "'c=\"\"'", "]", ")" ]
params should be represented without the comma and whitespace they have around them .
train
false
42,340
def ex_call(func, args): if isinstance(func, six.string_types): func = ex_rvalue(func) args = list(args) for i in range(len(args)): if (not isinstance(args[i], ast.expr)): args[i] = ex_literal(args[i]) if (sys.version_info[:2] < (3, 5)): return ast.Call(func, args, [], None, None) else: return ast.Call(func, args, [])
[ "def", "ex_call", "(", "func", ",", "args", ")", ":", "if", "isinstance", "(", "func", ",", "six", ".", "string_types", ")", ":", "func", "=", "ex_rvalue", "(", "func", ")", "args", "=", "list", "(", "args", ")", "for", "i", "in", "range", "(", "len", "(", "args", ")", ")", ":", "if", "(", "not", "isinstance", "(", "args", "[", "i", "]", ",", "ast", ".", "expr", ")", ")", ":", "args", "[", "i", "]", "=", "ex_literal", "(", "args", "[", "i", "]", ")", "if", "(", "sys", ".", "version_info", "[", ":", "2", "]", "<", "(", "3", ",", "5", ")", ")", ":", "return", "ast", ".", "Call", "(", "func", ",", "args", ",", "[", "]", ",", "None", ",", "None", ")", "else", ":", "return", "ast", ".", "Call", "(", "func", ",", "args", ",", "[", "]", ")" ]
a function-call expression with only positional parameters .
train
true
42,342
def sentencize(s): s = s.replace('\n', ' ').strip().split('.') s = (s[0] if len(s) else s) try: return ' '.join(s.split()) except AttributeError: return s
[ "def", "sentencize", "(", "s", ")", ":", "s", "=", "s", ".", "replace", "(", "'\\n'", ",", "' '", ")", ".", "strip", "(", ")", ".", "split", "(", "'.'", ")", "s", "=", "(", "s", "[", "0", "]", "if", "len", "(", "s", ")", "else", "s", ")", "try", ":", "return", "' '", ".", "join", "(", "s", ".", "split", "(", ")", ")", "except", "AttributeError", ":", "return", "s" ]
extract first sentence .
train
false
42,344
def is_scalar_zero_or_nan(builder, value): return _scalar_pred_against_zero(builder, value, functools.partial(builder.fcmp_unordered, '=='), '==')
[ "def", "is_scalar_zero_or_nan", "(", "builder", ",", "value", ")", ":", "return", "_scalar_pred_against_zero", "(", "builder", ",", "value", ",", "functools", ".", "partial", "(", "builder", ".", "fcmp_unordered", ",", "'=='", ")", ",", "'=='", ")" ]
return a predicate representing whether *value* is equal to either zero or nan .
train
false
42,346
def test_set_format_does_not_share_subfmt(): t = Time('+02000-02-03', format='fits', out_subfmt='longdate') t.format = 'isot' assert (t.out_subfmt == '*') assert (t.value == '2000-02-03T00:00:00.000') t.format = 'fits' assert (t.out_subfmt == '*') assert (t.value == '2000-02-03T00:00:00.000(UTC)')
[ "def", "test_set_format_does_not_share_subfmt", "(", ")", ":", "t", "=", "Time", "(", "'+02000-02-03'", ",", "format", "=", "'fits'", ",", "out_subfmt", "=", "'longdate'", ")", "t", ".", "format", "=", "'isot'", "assert", "(", "t", ".", "out_subfmt", "==", "'*'", ")", "assert", "(", "t", ".", "value", "==", "'2000-02-03T00:00:00.000'", ")", "t", ".", "format", "=", "'fits'", "assert", "(", "t", ".", "out_subfmt", "==", "'*'", ")", "assert", "(", "t", ".", "value", "==", "'2000-02-03T00:00:00.000(UTC)'", ")" ]
set format and round trip through a format that does not share out_subfmt .
train
false
42,349
def make_envs_dict(abund_mtx, sample_names, taxon_names): (num_samples, num_seqs) = abund_mtx.shape if ((num_samples, num_seqs) != (len(sample_names), len(taxon_names))): raise ValueError(("Shape of matrix %s doesn't match # samples and # taxa (%s and %s)" % (abund_mtx.shape, num_samples, num_seqs))) envs_dict = {} sample_names = asarray(sample_names) for (i, taxon) in enumerate(abund_mtx.T): nonzeros = taxon.nonzero() envs_dict[taxon_names[i]] = dict(zip(sample_names[nonzeros], taxon[nonzeros])) return envs_dict
[ "def", "make_envs_dict", "(", "abund_mtx", ",", "sample_names", ",", "taxon_names", ")", ":", "(", "num_samples", ",", "num_seqs", ")", "=", "abund_mtx", ".", "shape", "if", "(", "(", "num_samples", ",", "num_seqs", ")", "!=", "(", "len", "(", "sample_names", ")", ",", "len", "(", "taxon_names", ")", ")", ")", ":", "raise", "ValueError", "(", "(", "\"Shape of matrix %s doesn't match # samples and # taxa (%s and %s)\"", "%", "(", "abund_mtx", ".", "shape", ",", "num_samples", ",", "num_seqs", ")", ")", ")", "envs_dict", "=", "{", "}", "sample_names", "=", "asarray", "(", "sample_names", ")", "for", "(", "i", ",", "taxon", ")", "in", "enumerate", "(", "abund_mtx", ".", "T", ")", ":", "nonzeros", "=", "taxon", ".", "nonzero", "(", ")", "envs_dict", "[", "taxon_names", "[", "i", "]", "]", "=", "dict", "(", "zip", "(", "sample_names", "[", "nonzeros", "]", ",", "taxon", "[", "nonzeros", "]", ")", ")", "return", "envs_dict" ]
makes an envs dict suitable for unifrac from an abundance matrix abund_mtx is samples by seqs numpy 2d array sample_names is a list .
train
false
42,350
@register.function def user_collection_list(collections=None, heading='', id='', link=None): if (collections is None): collections = [] c = {'collections': collections, 'heading': heading, 'link': link, 'id': id} template = get_env().get_template('bandwagon/users/collection_list.html') return jinja2.Markup(template.render(c))
[ "@", "register", ".", "function", "def", "user_collection_list", "(", "collections", "=", "None", ",", "heading", "=", "''", ",", "id", "=", "''", ",", "link", "=", "None", ")", ":", "if", "(", "collections", "is", "None", ")", ":", "collections", "=", "[", "]", "c", "=", "{", "'collections'", ":", "collections", ",", "'heading'", ":", "heading", ",", "'link'", ":", "link", ",", "'id'", ":", "id", "}", "template", "=", "get_env", "(", ")", ".", "get_template", "(", "'bandwagon/users/collection_list.html'", ")", "return", "jinja2", ".", "Markup", "(", "template", ".", "render", "(", "c", ")", ")" ]
list of collections .
train
false
42,351
def shelter_service(): output = s3_rest_controller() return output
[ "def", "shelter_service", "(", ")", ":", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
restful crud controller list / add shelter services .
train
false
42,354
def registered_for_course(course, user): if (user is None): return False if user.is_authenticated(): return CourseEnrollment.is_enrolled(user, course.id) else: return False
[ "def", "registered_for_course", "(", "course", ",", "user", ")", ":", "if", "(", "user", "is", "None", ")", ":", "return", "False", "if", "user", ".", "is_authenticated", "(", ")", ":", "return", "CourseEnrollment", ".", "is_enrolled", "(", "user", ",", "course", ".", "id", ")", "else", ":", "return", "False" ]
return true if user is registered for course .
train
false
42,355
def test_arithmetic_on_small_array_from_file(file): s = symbol('s', discover(file)) assert eq(compute((s.x + 1), file), (x + 1))
[ "def", "test_arithmetic_on_small_array_from_file", "(", "file", ")", ":", "s", "=", "symbol", "(", "'s'", ",", "discover", "(", "file", ")", ")", "assert", "eq", "(", "compute", "(", "(", "s", ".", "x", "+", "1", ")", ",", "file", ")", ",", "(", "x", "+", "1", ")", ")" ]
want to make sure that we call pre_compute on dataset even when its not the leaf data input .
train
false
42,356
def decode_bin(v): if (not is_string(v)): raise Exception('Value must be binary, not RLP array') return v
[ "def", "decode_bin", "(", "v", ")", ":", "if", "(", "not", "is_string", "(", "v", ")", ")", ":", "raise", "Exception", "(", "'Value must be binary, not RLP array'", ")", "return", "v" ]
decodes a bytearray from serialization .
train
false
42,357
def send_password_reset_email(email, host, reset_key): email_from = ('Daemo Team <%s>' % settings.EMAIL_SENDER) email_to = email subject = 'Daemo Password Reset' reset_url = ((('http://' + host) + '/reset-password/') + reset_key) text_content = (((('Hello, \n Please reset your password using the following link: \n' + reset_url) + '/1 \nIf you did not request a password reset please click the following link: ') + reset_url) + '/0 \nGreetings, \nDaemo Team') html_content = ((((((((((((('<h3>Hello,</h3><p>Please reset your password using the following link: <br><a href="' + reset_url) + '/1') + '">') + reset_url) + '/1') + "</a></p> <br><p>If you didn't request a password reset please click the following link: <br>") + '<a href="') + reset_url) + '/0') + '">') + reset_url) + '/0') + '</a><br><br> Greetings,<br> <strong>Daemo Team</strong>') send_mail(email_from, email_to, subject, text_content, html_content)
[ "def", "send_password_reset_email", "(", "email", ",", "host", ",", "reset_key", ")", ":", "email_from", "=", "(", "'Daemo Team <%s>'", "%", "settings", ".", "EMAIL_SENDER", ")", "email_to", "=", "email", "subject", "=", "'Daemo Password Reset'", "reset_url", "=", "(", "(", "(", "'http://'", "+", "host", ")", "+", "'/reset-password/'", ")", "+", "reset_key", ")", "text_content", "=", "(", "(", "(", "(", "'Hello, \\n Please reset your password using the following link: \\n'", "+", "reset_url", ")", "+", "'/1 \\nIf you did not request a password reset please click the following link: '", ")", "+", "reset_url", ")", "+", "'/0 \\nGreetings, \\nDaemo Team'", ")", "html_content", "=", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "'<h3>Hello,</h3><p>Please reset your password using the following link: <br><a href=\"'", "+", "reset_url", ")", "+", "'/1'", ")", "+", "'\">'", ")", "+", "reset_url", ")", "+", "'/1'", ")", "+", "\"</a></p> <br><p>If you didn't request a password reset please click the following link: <br>\"", ")", "+", "'<a href=\"'", ")", "+", "reset_url", ")", "+", "'/0'", ")", "+", "'\">'", ")", "+", "reset_url", ")", "+", "'/0'", ")", "+", "'</a><br><br> Greetings,<br> <strong>Daemo Team</strong>'", ")", "send_mail", "(", "email_from", ",", "email_to", ",", "subject", ",", "text_content", ",", "html_content", ")" ]
this sends the email to the user the email includes two links .
train
false
42,358
def check_applied(result): try: applied = result.was_applied except Exception: applied = True if (not applied): raise LWTException(result[0])
[ "def", "check_applied", "(", "result", ")", ":", "try", ":", "applied", "=", "result", ".", "was_applied", "except", "Exception", ":", "applied", "=", "True", "if", "(", "not", "applied", ")", ":", "raise", "LWTException", "(", "result", "[", "0", "]", ")" ]
raises lwtexception if it looks like a failed lwt request .
train
true
42,359
def _GetLibraryDirs(config): library_dirs = config.get('library_dirs', []) library_dirs = _FixPaths(library_dirs) return library_dirs
[ "def", "_GetLibraryDirs", "(", "config", ")", ":", "library_dirs", "=", "config", ".", "get", "(", "'library_dirs'", ",", "[", "]", ")", "library_dirs", "=", "_FixPaths", "(", "library_dirs", ")", "return", "library_dirs" ]
returns the list of directories to be used for library search paths .
train
false
42,360
def selBest(individuals, k): return sorted(individuals, key=attrgetter('fitness'), reverse=True)[:k]
[ "def", "selBest", "(", "individuals", ",", "k", ")", ":", "return", "sorted", "(", "individuals", ",", "key", "=", "attrgetter", "(", "'fitness'", ")", ",", "reverse", "=", "True", ")", "[", ":", "k", "]" ]
select the *k* best individuals among the input *individuals* .
train
false
42,361
def process_i1k_tar_subpath(args): (target_size, toptar, img_dir, setn, label_dict, subpath) = args name_slice = (slice(None, 9) if (setn == 'train') else slice(15, (-5))) label = label_dict[subpath.name[name_slice]] outpath = os.path.join(img_dir, str(label)) if (setn == 'train'): tf = tarfile.open(toptar) subtar = tarfile.open(fileobj=tf.extractfile(subpath)) file_list = subtar.getmembers() return process_files_in_tar(target_size, label, subtar, file_list, outpath) elif (setn == 'val'): tf = tarfile.open(toptar) file_list = [subpath] return process_files_in_tar(target_size, label, tf, file_list, outpath)
[ "def", "process_i1k_tar_subpath", "(", "args", ")", ":", "(", "target_size", ",", "toptar", ",", "img_dir", ",", "setn", ",", "label_dict", ",", "subpath", ")", "=", "args", "name_slice", "=", "(", "slice", "(", "None", ",", "9", ")", "if", "(", "setn", "==", "'train'", ")", "else", "slice", "(", "15", ",", "(", "-", "5", ")", ")", ")", "label", "=", "label_dict", "[", "subpath", ".", "name", "[", "name_slice", "]", "]", "outpath", "=", "os", ".", "path", ".", "join", "(", "img_dir", ",", "str", "(", "label", ")", ")", "if", "(", "setn", "==", "'train'", ")", ":", "tf", "=", "tarfile", ".", "open", "(", "toptar", ")", "subtar", "=", "tarfile", ".", "open", "(", "fileobj", "=", "tf", ".", "extractfile", "(", "subpath", ")", ")", "file_list", "=", "subtar", ".", "getmembers", "(", ")", "return", "process_files_in_tar", "(", "target_size", ",", "label", ",", "subtar", ",", "file_list", ",", "outpath", ")", "elif", "(", "setn", "==", "'val'", ")", ":", "tf", "=", "tarfile", ".", "open", "(", "toptar", ")", "file_list", "=", "[", "subpath", "]", "return", "process_files_in_tar", "(", "target_size", ",", "label", ",", "tf", ",", "file_list", ",", "outpath", ")" ]
process a single subpath in a i1k tar .
train
false
42,363
def fill_in_whitespace(text): text = text.replace('\\n', '\n') text = text.replace('\\t', ' DCTB ') text = text.replace('\\r', '\r') text = text.replace('\\a', '\x07') text = text.replace('\\b', '\x08') return text
[ "def", "fill_in_whitespace", "(", "text", ")", ":", "text", "=", "text", ".", "replace", "(", "'\\\\n'", ",", "'\\n'", ")", "text", "=", "text", ".", "replace", "(", "'\\\\t'", ",", "' DCTB '", ")", "text", "=", "text", ".", "replace", "(", "'\\\\r'", ",", "'\\r'", ")", "text", "=", "text", ".", "replace", "(", "'\\\\a'", ",", "'\\x07'", ")", "text", "=", "text", ".", "replace", "(", "'\\\\b'", ",", "'\\x08'", ")", "return", "text" ]
returns text with escaped whitespace replaced through whitespaces .
train
false
42,364
def kill_dhcp(conf, namespace): network_id = namespace.replace(dhcp.NS_PREFIX, '') dhcp_driver = importutils.import_object(conf.dhcp_driver, conf=conf, process_monitor=_get_dhcp_process_monitor(conf), network=dhcp.NetModel({'id': network_id}), plugin=FakeDhcpPlugin()) if dhcp_driver.active: dhcp_driver.disable()
[ "def", "kill_dhcp", "(", "conf", ",", "namespace", ")", ":", "network_id", "=", "namespace", ".", "replace", "(", "dhcp", ".", "NS_PREFIX", ",", "''", ")", "dhcp_driver", "=", "importutils", ".", "import_object", "(", "conf", ".", "dhcp_driver", ",", "conf", "=", "conf", ",", "process_monitor", "=", "_get_dhcp_process_monitor", "(", "conf", ")", ",", "network", "=", "dhcp", ".", "NetModel", "(", "{", "'id'", ":", "network_id", "}", ")", ",", "plugin", "=", "FakeDhcpPlugin", "(", ")", ")", "if", "dhcp_driver", ".", "active", ":", "dhcp_driver", ".", "disable", "(", ")" ]
disable dhcp for a network if dhcp is still active .
train
false
42,365
def _get_valid_name(proposed_name): slug_name = slugify(proposed_name) name = slug_name if (len(slug_name) > 40): name = slug_name[:40] existing_service = Service.objects.filter(name=name) iter = 1 while (existing_service.count() > 0): name = (slug_name + str(iter)) existing_service = Service.objects.filter(name=name) iter += 1 return name
[ "def", "_get_valid_name", "(", "proposed_name", ")", ":", "slug_name", "=", "slugify", "(", "proposed_name", ")", "name", "=", "slug_name", "if", "(", "len", "(", "slug_name", ")", ">", "40", ")", ":", "name", "=", "slug_name", "[", ":", "40", "]", "existing_service", "=", "Service", ".", "objects", ".", "filter", "(", "name", "=", "name", ")", "iter", "=", "1", "while", "(", "existing_service", ".", "count", "(", ")", ">", "0", ")", ":", "name", "=", "(", "slug_name", "+", "str", "(", "iter", ")", ")", "existing_service", "=", "Service", ".", "objects", ".", "filter", "(", "name", "=", "name", ")", "iter", "+=", "1", "return", "name" ]
return a unique slug name for a service .
train
false
42,366
def _sh_negate(sh, order): assert (order >= 0) return (sh.conj() * ((-1.0) if (order % 2) else 1.0))
[ "def", "_sh_negate", "(", "sh", ",", "order", ")", ":", "assert", "(", "order", ">=", "0", ")", "return", "(", "sh", ".", "conj", "(", ")", "*", "(", "(", "-", "1.0", ")", "if", "(", "order", "%", "2", ")", "else", "1.0", ")", ")" ]
helper to get the negative spherical harmonic from a positive one .
train
false
42,368
def libvlc_media_library_new(p_instance): f = (_Cfunctions.get('libvlc_media_library_new', None) or _Cfunction('libvlc_media_library_new', ((1,),), class_result(MediaLibrary), ctypes.c_void_p, Instance)) return f(p_instance)
[ "def", "libvlc_media_library_new", "(", "p_instance", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_library_new'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_library_new'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "class_result", "(", "MediaLibrary", ")", ",", "ctypes", ".", "c_void_p", ",", "Instance", ")", ")", "return", "f", "(", "p_instance", ")" ]
create an new media library object .
train
true
42,369
def create_win_salt_restart_task(): cmd = 'cmd' args = '/c ping -n 3 127.0.0.1 && net stop salt-minion && net start salt-minion' return __salt__['task.create_task'](name='restart-salt-minion', user_name='System', force=True, action_type='Execute', cmd=cmd, arguments=args, trigger_type='Once', start_date='1975-01-01', start_time='01:00')
[ "def", "create_win_salt_restart_task", "(", ")", ":", "cmd", "=", "'cmd'", "args", "=", "'/c ping -n 3 127.0.0.1 && net stop salt-minion && net start salt-minion'", "return", "__salt__", "[", "'task.create_task'", "]", "(", "name", "=", "'restart-salt-minion'", ",", "user_name", "=", "'System'", ",", "force", "=", "True", ",", "action_type", "=", "'Execute'", ",", "cmd", "=", "cmd", ",", "arguments", "=", "args", ",", "trigger_type", "=", "'Once'", ",", "start_date", "=", "'1975-01-01'", ",", "start_time", "=", "'01:00'", ")" ]
create a task in windows task scheduler to enable restarting the salt-minion cli example: .
train
true
42,371
def removeFilesInDirectory(directoryPath): fileNames = os.listdir(directoryPath) for fileName in fileNames: filePath = os.path.join(directoryPath, fileName) os.remove(filePath)
[ "def", "removeFilesInDirectory", "(", "directoryPath", ")", ":", "fileNames", "=", "os", ".", "listdir", "(", "directoryPath", ")", "for", "fileName", "in", "fileNames", ":", "filePath", "=", "os", ".", "path", ".", "join", "(", "directoryPath", ",", "fileName", ")", "os", ".", "remove", "(", "filePath", ")" ]
remove all the files in a directory .
train
false
42,372
def test_valid_origins(): func = (lambda x: np.mean(x)) data = np.array([1, 2, 3, 4, 5], dtype=np.float64) assert_raises(ValueError, sndi.generic_filter, data, func, size=3, origin=2) func2 = (lambda x, y: np.mean((x + y))) assert_raises(ValueError, sndi.generic_filter1d, data, func, filter_size=3, origin=2) assert_raises(ValueError, sndi.percentile_filter, data, 0.2, size=3, origin=2) for filter in [sndi.uniform_filter, sndi.minimum_filter, sndi.maximum_filter, sndi.maximum_filter1d, sndi.median_filter, sndi.minimum_filter1d]: list(filter(data, 3, origin=(-1))) list(filter(data, 3, origin=1)) assert_raises(ValueError, filter, data, 3, origin=2)
[ "def", "test_valid_origins", "(", ")", ":", "func", "=", "(", "lambda", "x", ":", "np", ".", "mean", "(", "x", ")", ")", "data", "=", "np", ".", "array", "(", "[", "1", ",", "2", ",", "3", ",", "4", ",", "5", "]", ",", "dtype", "=", "np", ".", "float64", ")", "assert_raises", "(", "ValueError", ",", "sndi", ".", "generic_filter", ",", "data", ",", "func", ",", "size", "=", "3", ",", "origin", "=", "2", ")", "func2", "=", "(", "lambda", "x", ",", "y", ":", "np", ".", "mean", "(", "(", "x", "+", "y", ")", ")", ")", "assert_raises", "(", "ValueError", ",", "sndi", ".", "generic_filter1d", ",", "data", ",", "func", ",", "filter_size", "=", "3", ",", "origin", "=", "2", ")", "assert_raises", "(", "ValueError", ",", "sndi", ".", "percentile_filter", ",", "data", ",", "0.2", ",", "size", "=", "3", ",", "origin", "=", "2", ")", "for", "filter", "in", "[", "sndi", ".", "uniform_filter", ",", "sndi", ".", "minimum_filter", ",", "sndi", ".", "maximum_filter", ",", "sndi", ".", "maximum_filter1d", ",", "sndi", ".", "median_filter", ",", "sndi", ".", "minimum_filter1d", "]", ":", "list", "(", "filter", "(", "data", ",", "3", ",", "origin", "=", "(", "-", "1", ")", ")", ")", "list", "(", "filter", "(", "data", ",", "3", ",", "origin", "=", "1", ")", ")", "assert_raises", "(", "ValueError", ",", "filter", ",", "data", ",", "3", ",", "origin", "=", "2", ")" ]
regression test for #1311 .
train
false
42,373
def require_superuser(handler): def require_superuser_wrapper_fn(request, *args, **kwargs): if getattr(request.user, 'is_superuser', False): return handler(request, *args, **kwargs) else: raise PermissionDenied(_('You must be logged in as a superuser to access this endpoint.')) return require_superuser_wrapper_fn
[ "def", "require_superuser", "(", "handler", ")", ":", "def", "require_superuser_wrapper_fn", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "getattr", "(", "request", ".", "user", ",", "'is_superuser'", ",", "False", ")", ":", "return", "handler", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "raise", "PermissionDenied", "(", "_", "(", "'You must be logged in as a superuser to access this endpoint.'", ")", ")", "return", "require_superuser_wrapper_fn" ]
level 4: require a django admin *** note: not yet used .
train
false
42,374
@register.filter def resolution(file_, resolution_string): if sorl_settings.THUMBNAIL_DUMMY: dummy_source = sorl_settings.THUMBNAIL_DUMMY_SOURCE source = dummy_source.replace(u'%(width)s', u'(?P<width>[0-9]+)') source = source.replace(u'%(height)s', u'(?P<height>[0-9]+)') source = re.compile(source) try: resolution = decimal.Decimal(resolution_string.strip(u'x')) info = source.match(file_).groupdict() info = {dimension: int((int(size) * resolution)) for (dimension, size) in info.items()} return (dummy_source % info) except (AttributeError, TypeError, KeyError): return file_ (filename, extension) = os.path.splitext(file_) return (u'%s@%s%s' % (filename, resolution_string, extension))
[ "@", "register", ".", "filter", "def", "resolution", "(", "file_", ",", "resolution_string", ")", ":", "if", "sorl_settings", ".", "THUMBNAIL_DUMMY", ":", "dummy_source", "=", "sorl_settings", ".", "THUMBNAIL_DUMMY_SOURCE", "source", "=", "dummy_source", ".", "replace", "(", "u'%(width)s'", ",", "u'(?P<width>[0-9]+)'", ")", "source", "=", "source", ".", "replace", "(", "u'%(height)s'", ",", "u'(?P<height>[0-9]+)'", ")", "source", "=", "re", ".", "compile", "(", "source", ")", "try", ":", "resolution", "=", "decimal", ".", "Decimal", "(", "resolution_string", ".", "strip", "(", "u'x'", ")", ")", "info", "=", "source", ".", "match", "(", "file_", ")", ".", "groupdict", "(", ")", "info", "=", "{", "dimension", ":", "int", "(", "(", "int", "(", "size", ")", "*", "resolution", ")", ")", "for", "(", "dimension", ",", "size", ")", "in", "info", ".", "items", "(", ")", "}", "return", "(", "dummy_source", "%", "info", ")", "except", "(", "AttributeError", ",", "TypeError", ",", "KeyError", ")", ":", "return", "file_", "(", "filename", ",", "extension", ")", "=", "os", ".", "path", ".", "splitext", "(", "file_", ")", "return", "(", "u'%s@%s%s'", "%", "(", "filename", ",", "resolution_string", ",", "extension", ")", ")" ]
a filter to return the url for the provided resolution of the thumbnail .
train
false
42,376
@command(('add\\s*(-?\\d[-,\\d\\s]{1,250})(%s)' % WORD)) def playlist_add(nums, playlist): nums = util.parse_multi(nums) if (not g.userpl.get(playlist)): playlist = playlist.replace(' ', '-') g.userpl[playlist] = Playlist(playlist) for songnum in nums: g.userpl[playlist].songs.append(g.model[(songnum - 1)]) dur = g.userpl[playlist].duration f = (len(nums), playlist, len(g.userpl[playlist]), dur) g.message = (util.F('added to saved pl') % f) if nums: playlists.save() g.content = content.generate_songlist_display()
[ "@", "command", "(", "(", "'add\\\\s*(-?\\\\d[-,\\\\d\\\\s]{1,250})(%s)'", "%", "WORD", ")", ")", "def", "playlist_add", "(", "nums", ",", "playlist", ")", ":", "nums", "=", "util", ".", "parse_multi", "(", "nums", ")", "if", "(", "not", "g", ".", "userpl", ".", "get", "(", "playlist", ")", ")", ":", "playlist", "=", "playlist", ".", "replace", "(", "' '", ",", "'-'", ")", "g", ".", "userpl", "[", "playlist", "]", "=", "Playlist", "(", "playlist", ")", "for", "songnum", "in", "nums", ":", "g", ".", "userpl", "[", "playlist", "]", ".", "songs", ".", "append", "(", "g", ".", "model", "[", "(", "songnum", "-", "1", ")", "]", ")", "dur", "=", "g", ".", "userpl", "[", "playlist", "]", ".", "duration", "f", "=", "(", "len", "(", "nums", ")", ",", "playlist", ",", "len", "(", "g", ".", "userpl", "[", "playlist", "]", ")", ",", "dur", ")", "g", ".", "message", "=", "(", "util", ".", "F", "(", "'added to saved pl'", ")", "%", "f", ")", "if", "nums", ":", "playlists", ".", "save", "(", ")", "g", ".", "content", "=", "content", ".", "generate_songlist_display", "(", ")" ]
add selected song nums to saved playlist .
train
false
42,377
def _interpret_task_logs(fs, matches, partial=True, log_callback=None): result = {} syslogs_parsed = set() for match in matches: error = {} if match.get('syslog'): stderr_path = match['path'] syslog_path = match['syslog']['path'] else: stderr_path = None syslog_path = match['path'] if stderr_path: if log_callback: log_callback(stderr_path) task_error = _parse_task_stderr(_cat_log(fs, stderr_path)) if task_error: task_error['path'] = stderr_path error['task_error'] = task_error else: continue if (syslog_path in syslogs_parsed): continue if log_callback: log_callback(syslog_path) syslog_error = _parse_task_syslog(_cat_log(fs, syslog_path)) syslogs_parsed.add(syslog_path) if (not syslog_error.get('hadoop_error')): continue error.update(syslog_error) error['hadoop_error']['path'] = syslog_path for id_key in ('attempt_id', 'container_id'): if (id_key in match): error[id_key] = match[id_key] _add_implied_task_id(error) result.setdefault('errors', []) result['errors'].append(error) if partial: result['partial'] = True break return result
[ "def", "_interpret_task_logs", "(", "fs", ",", "matches", ",", "partial", "=", "True", ",", "log_callback", "=", "None", ")", ":", "result", "=", "{", "}", "syslogs_parsed", "=", "set", "(", ")", "for", "match", "in", "matches", ":", "error", "=", "{", "}", "if", "match", ".", "get", "(", "'syslog'", ")", ":", "stderr_path", "=", "match", "[", "'path'", "]", "syslog_path", "=", "match", "[", "'syslog'", "]", "[", "'path'", "]", "else", ":", "stderr_path", "=", "None", "syslog_path", "=", "match", "[", "'path'", "]", "if", "stderr_path", ":", "if", "log_callback", ":", "log_callback", "(", "stderr_path", ")", "task_error", "=", "_parse_task_stderr", "(", "_cat_log", "(", "fs", ",", "stderr_path", ")", ")", "if", "task_error", ":", "task_error", "[", "'path'", "]", "=", "stderr_path", "error", "[", "'task_error'", "]", "=", "task_error", "else", ":", "continue", "if", "(", "syslog_path", "in", "syslogs_parsed", ")", ":", "continue", "if", "log_callback", ":", "log_callback", "(", "syslog_path", ")", "syslog_error", "=", "_parse_task_syslog", "(", "_cat_log", "(", "fs", ",", "syslog_path", ")", ")", "syslogs_parsed", ".", "add", "(", "syslog_path", ")", "if", "(", "not", "syslog_error", ".", "get", "(", "'hadoop_error'", ")", ")", ":", "continue", "error", ".", "update", "(", "syslog_error", ")", "error", "[", "'hadoop_error'", "]", "[", "'path'", "]", "=", "syslog_path", "for", "id_key", "in", "(", "'attempt_id'", ",", "'container_id'", ")", ":", "if", "(", "id_key", "in", "match", ")", ":", "error", "[", "id_key", "]", "=", "match", "[", "id_key", "]", "_add_implied_task_id", "(", "error", ")", "result", ".", "setdefault", "(", "'errors'", ",", "[", "]", ")", "result", "[", "'errors'", "]", ".", "append", "(", "error", ")", "if", "partial", ":", "result", "[", "'partial'", "]", "=", "True", "break", "return", "result" ]
look for errors in task syslog/stderr .
train
false
42,378
def _rec_diff_in(g, m, v, i, j, K): if (i == j): return dmp_diff(g, m, v, K) (w, i) = ((v - 1), (i + 1)) return dmp_strip([_rec_diff_in(c, m, w, i, j, K) for c in g], v)
[ "def", "_rec_diff_in", "(", "g", ",", "m", ",", "v", ",", "i", ",", "j", ",", "K", ")", ":", "if", "(", "i", "==", "j", ")", ":", "return", "dmp_diff", "(", "g", ",", "m", ",", "v", ",", "K", ")", "(", "w", ",", "i", ")", "=", "(", "(", "v", "-", "1", ")", ",", "(", "i", "+", "1", ")", ")", "return", "dmp_strip", "(", "[", "_rec_diff_in", "(", "c", ",", "m", ",", "w", ",", "i", ",", "j", ",", "K", ")", "for", "c", "in", "g", "]", ",", "v", ")" ]
recursive helper for :func:dmp_diff_in .
train
false
42,380
def dna_transformation(prev_image, dna_input): prev_image_pad = tf.pad(prev_image, [[0, 0], [2, 2], [2, 2], [0, 0]]) image_height = int(prev_image.get_shape()[1]) image_width = int(prev_image.get_shape()[2]) inputs = [] for xkern in range(DNA_KERN_SIZE): for ykern in range(DNA_KERN_SIZE): inputs.append(tf.expand_dims(tf.slice(prev_image_pad, [0, xkern, ykern, 0], [(-1), image_height, image_width, (-1)]), [3])) inputs = tf.concat(3, inputs) kernel = (tf.nn.relu((dna_input - RELU_SHIFT)) + RELU_SHIFT) kernel = tf.expand_dims((kernel / tf.reduce_sum(kernel, [3], keep_dims=True)), [4]) return tf.reduce_sum((kernel * inputs), [3], keep_dims=False)
[ "def", "dna_transformation", "(", "prev_image", ",", "dna_input", ")", ":", "prev_image_pad", "=", "tf", ".", "pad", "(", "prev_image", ",", "[", "[", "0", ",", "0", "]", ",", "[", "2", ",", "2", "]", ",", "[", "2", ",", "2", "]", ",", "[", "0", ",", "0", "]", "]", ")", "image_height", "=", "int", "(", "prev_image", ".", "get_shape", "(", ")", "[", "1", "]", ")", "image_width", "=", "int", "(", "prev_image", ".", "get_shape", "(", ")", "[", "2", "]", ")", "inputs", "=", "[", "]", "for", "xkern", "in", "range", "(", "DNA_KERN_SIZE", ")", ":", "for", "ykern", "in", "range", "(", "DNA_KERN_SIZE", ")", ":", "inputs", ".", "append", "(", "tf", ".", "expand_dims", "(", "tf", ".", "slice", "(", "prev_image_pad", ",", "[", "0", ",", "xkern", ",", "ykern", ",", "0", "]", ",", "[", "(", "-", "1", ")", ",", "image_height", ",", "image_width", ",", "(", "-", "1", ")", "]", ")", ",", "[", "3", "]", ")", ")", "inputs", "=", "tf", ".", "concat", "(", "3", ",", "inputs", ")", "kernel", "=", "(", "tf", ".", "nn", ".", "relu", "(", "(", "dna_input", "-", "RELU_SHIFT", ")", ")", "+", "RELU_SHIFT", ")", "kernel", "=", "tf", ".", "expand_dims", "(", "(", "kernel", "/", "tf", ".", "reduce_sum", "(", "kernel", ",", "[", "3", "]", ",", "keep_dims", "=", "True", ")", ")", ",", "[", "4", "]", ")", "return", "tf", ".", "reduce_sum", "(", "(", "kernel", "*", "inputs", ")", ",", "[", "3", "]", ",", "keep_dims", "=", "False", ")" ]
apply dynamic neural advection to previous image .
train
true
42,381
def setup_function(): global HASS HASS = get_test_home_assistant() HASS.config.components = ['pilight']
[ "def", "setup_function", "(", ")", ":", "global", "HASS", "HASS", "=", "get_test_home_assistant", "(", ")", "HASS", ".", "config", ".", "components", "=", "[", "'pilight'", "]" ]
initialize a home assistant server .
train
false
42,382
def _fake_run_horcmstart3(*args): global run_horcmstart3_cnt run_horcmstart3_cnt = (run_horcmstart3_cnt + 1) return (0 if (run_horcmstart3_cnt <= 1) else 3)
[ "def", "_fake_run_horcmstart3", "(", "*", "args", ")", ":", "global", "run_horcmstart3_cnt", "run_horcmstart3_cnt", "=", "(", "run_horcmstart3_cnt", "+", "1", ")", "return", "(", "0", "if", "(", "run_horcmstart3_cnt", "<=", "1", ")", "else", "3", ")" ]
update a counter and return a value based on it .
train
false
42,384
def pid_indent(pid): hash = md5(str(pid)) number = int(hash.hexdigest(), 16) indent = (number % 32) return indent
[ "def", "pid_indent", "(", "pid", ")", ":", "hash", "=", "md5", "(", "str", "(", "pid", ")", ")", "number", "=", "int", "(", "hash", ".", "hexdigest", "(", ")", ",", "16", ")", "indent", "=", "(", "number", "%", "32", ")", "return", "indent" ]
get an md5-based indentation for a process id .
train
false
42,387
def multi_future(children, quiet_exceptions=()): if isinstance(children, dict): keys = list(children.keys()) children = children.values() else: keys = None children = list(map(convert_yielded, children)) assert all((is_future(i) for i in children)) unfinished_children = set(children) future = Future() if (not children): future.set_result(({} if (keys is not None) else [])) def callback(f): unfinished_children.remove(f) if (not unfinished_children): result_list = [] for f in children: try: result_list.append(f.result()) except Exception as e: if future.done(): if (not isinstance(e, quiet_exceptions)): app_log.error('Multiple exceptions in yield list', exc_info=True) else: future.set_exc_info(sys.exc_info()) if (not future.done()): if (keys is not None): future.set_result(dict(zip(keys, result_list))) else: future.set_result(result_list) listening = set() for f in children: if (f not in listening): listening.add(f) f.add_done_callback(callback) return future
[ "def", "multi_future", "(", "children", ",", "quiet_exceptions", "=", "(", ")", ")", ":", "if", "isinstance", "(", "children", ",", "dict", ")", ":", "keys", "=", "list", "(", "children", ".", "keys", "(", ")", ")", "children", "=", "children", ".", "values", "(", ")", "else", ":", "keys", "=", "None", "children", "=", "list", "(", "map", "(", "convert_yielded", ",", "children", ")", ")", "assert", "all", "(", "(", "is_future", "(", "i", ")", "for", "i", "in", "children", ")", ")", "unfinished_children", "=", "set", "(", "children", ")", "future", "=", "Future", "(", ")", "if", "(", "not", "children", ")", ":", "future", ".", "set_result", "(", "(", "{", "}", "if", "(", "keys", "is", "not", "None", ")", "else", "[", "]", ")", ")", "def", "callback", "(", "f", ")", ":", "unfinished_children", ".", "remove", "(", "f", ")", "if", "(", "not", "unfinished_children", ")", ":", "result_list", "=", "[", "]", "for", "f", "in", "children", ":", "try", ":", "result_list", ".", "append", "(", "f", ".", "result", "(", ")", ")", "except", "Exception", "as", "e", ":", "if", "future", ".", "done", "(", ")", ":", "if", "(", "not", "isinstance", "(", "e", ",", "quiet_exceptions", ")", ")", ":", "app_log", ".", "error", "(", "'Multiple exceptions in yield list'", ",", "exc_info", "=", "True", ")", "else", ":", "future", ".", "set_exc_info", "(", "sys", ".", "exc_info", "(", ")", ")", "if", "(", "not", "future", ".", "done", "(", ")", ")", ":", "if", "(", "keys", "is", "not", "None", ")", ":", "future", ".", "set_result", "(", "dict", "(", "zip", "(", "keys", ",", "result_list", ")", ")", ")", "else", ":", "future", ".", "set_result", "(", "result_list", ")", "listening", "=", "set", "(", ")", "for", "f", "in", "children", ":", "if", "(", "f", "not", "in", "listening", ")", ":", "listening", ".", "add", "(", "f", ")", "f", ".", "add_done_callback", "(", "callback", ")", "return", "future" ]
wait for multiple asynchronous futures in parallel .
train
true
42,388
def fitness_and_quality_parsed(mime_type, parsed_ranges): (best_fitness, best_fit_q) = ((-1), 0) (target_type, target_subtype, target_params) = parse_media_range(mime_type) for (type, subtype, params) in parsed_ranges: if (((type == target_type) or (type == '*') or (target_type == '*')) and ((subtype == target_subtype) or (subtype == '*') or (target_subtype == '*'))): fitness = 0 if (type == target_type): fitness += 100 if (subtype == target_subtype): fitness += 10 for key in target_params: if ((key != 'q') and (key in params)): if (params[key] == target_params[key]): fitness += 1 if (fitness > best_fitness): best_fitness = fitness best_fit_q = params['q'] return (best_fitness, float(best_fit_q))
[ "def", "fitness_and_quality_parsed", "(", "mime_type", ",", "parsed_ranges", ")", ":", "(", "best_fitness", ",", "best_fit_q", ")", "=", "(", "(", "-", "1", ")", ",", "0", ")", "(", "target_type", ",", "target_subtype", ",", "target_params", ")", "=", "parse_media_range", "(", "mime_type", ")", "for", "(", "type", ",", "subtype", ",", "params", ")", "in", "parsed_ranges", ":", "if", "(", "(", "(", "type", "==", "target_type", ")", "or", "(", "type", "==", "'*'", ")", "or", "(", "target_type", "==", "'*'", ")", ")", "and", "(", "(", "subtype", "==", "target_subtype", ")", "or", "(", "subtype", "==", "'*'", ")", "or", "(", "target_subtype", "==", "'*'", ")", ")", ")", ":", "fitness", "=", "0", "if", "(", "type", "==", "target_type", ")", ":", "fitness", "+=", "100", "if", "(", "subtype", "==", "target_subtype", ")", ":", "fitness", "+=", "10", "for", "key", "in", "target_params", ":", "if", "(", "(", "key", "!=", "'q'", ")", "and", "(", "key", "in", "params", ")", ")", ":", "if", "(", "params", "[", "key", "]", "==", "target_params", "[", "key", "]", ")", ":", "fitness", "+=", "1", "if", "(", "fitness", ">", "best_fitness", ")", ":", "best_fitness", "=", "fitness", "best_fit_q", "=", "params", "[", "'q'", "]", "return", "(", "best_fitness", ",", "float", "(", "best_fit_q", ")", ")" ]
find the best match for a given mime-type against a list of media_ranges that have already been parsed by parse_media_range() .
train
false
42,389
def mergeFolder(src, dst, pattern=None): srcnames = os.listdir(src) for name in srcnames: srcfname = os.path.join(src, name) dstfname = os.path.join(dst, name) if os.path.isdir(srcfname): if (not os.path.isdir(dstfname)): os.makedirs(dstfname) mergeFolder(srcfname, dstfname) else: try: shutil.copyfile(srcfname, dstfname) except IOError as why: print why
[ "def", "mergeFolder", "(", "src", ",", "dst", ",", "pattern", "=", "None", ")", ":", "srcnames", "=", "os", ".", "listdir", "(", "src", ")", "for", "name", "in", "srcnames", ":", "srcfname", "=", "os", ".", "path", ".", "join", "(", "src", ",", "name", ")", "dstfname", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "name", ")", "if", "os", ".", "path", ".", "isdir", "(", "srcfname", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "dstfname", ")", ")", ":", "os", ".", "makedirs", "(", "dstfname", ")", "mergeFolder", "(", "srcfname", ",", "dstfname", ")", "else", ":", "try", ":", "shutil", ".", "copyfile", "(", "srcfname", ",", "dstfname", ")", "except", "IOError", "as", "why", ":", "print", "why" ]
merge a folder into another .
train
false
42,390
def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'): if isinstance(d, binary_type): return to_text(d, encoding=encoding, errors=errors) elif isinstance(d, dict): return dict(map(json_dict_bytes_to_unicode, iteritems(d), repeat(encoding), repeat(errors))) elif isinstance(d, list): return list(map(json_dict_bytes_to_unicode, d, repeat(encoding), repeat(errors))) elif isinstance(d, tuple): return tuple(map(json_dict_bytes_to_unicode, d, repeat(encoding), repeat(errors))) else: return d
[ "def", "json_dict_bytes_to_unicode", "(", "d", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'surrogate_or_strict'", ")", ":", "if", "isinstance", "(", "d", ",", "binary_type", ")", ":", "return", "to_text", "(", "d", ",", "encoding", "=", "encoding", ",", "errors", "=", "errors", ")", "elif", "isinstance", "(", "d", ",", "dict", ")", ":", "return", "dict", "(", "map", "(", "json_dict_bytes_to_unicode", ",", "iteritems", "(", "d", ")", ",", "repeat", "(", "encoding", ")", ",", "repeat", "(", "errors", ")", ")", ")", "elif", "isinstance", "(", "d", ",", "list", ")", ":", "return", "list", "(", "map", "(", "json_dict_bytes_to_unicode", ",", "d", ",", "repeat", "(", "encoding", ")", ",", "repeat", "(", "errors", ")", ")", ")", "elif", "isinstance", "(", "d", ",", "tuple", ")", ":", "return", "tuple", "(", "map", "(", "json_dict_bytes_to_unicode", ",", "d", ",", "repeat", "(", "encoding", ")", ",", "repeat", "(", "errors", ")", ")", ")", "else", ":", "return", "d" ]
recursively convert dict keys and values to byte str specialized for json return because this only handles .
train
false
42,391
def sineFreqScaling(sfreq, freqScaling): if ((freqScaling.size % 2) != 0): raise ValueError('Frequency scaling array does not have an even size') L = sfreq.shape[0] freqScalingEnv = np.interp(np.arange(L), ((L * freqScaling[::2]) / freqScaling[(-2)]), freqScaling[1::2]) ysfreq = np.zeros_like(sfreq) for l in range(L): ind_valid = np.where((sfreq[l, :] != 0))[0] if (ind_valid.size == 0): continue ysfreq[(l, ind_valid)] = (sfreq[(l, ind_valid)] * freqScalingEnv[l]) return ysfreq
[ "def", "sineFreqScaling", "(", "sfreq", ",", "freqScaling", ")", ":", "if", "(", "(", "freqScaling", ".", "size", "%", "2", ")", "!=", "0", ")", ":", "raise", "ValueError", "(", "'Frequency scaling array does not have an even size'", ")", "L", "=", "sfreq", ".", "shape", "[", "0", "]", "freqScalingEnv", "=", "np", ".", "interp", "(", "np", ".", "arange", "(", "L", ")", ",", "(", "(", "L", "*", "freqScaling", "[", ":", ":", "2", "]", ")", "/", "freqScaling", "[", "(", "-", "2", ")", "]", ")", ",", "freqScaling", "[", "1", ":", ":", "2", "]", ")", "ysfreq", "=", "np", ".", "zeros_like", "(", "sfreq", ")", "for", "l", "in", "range", "(", "L", ")", ":", "ind_valid", "=", "np", ".", "where", "(", "(", "sfreq", "[", "l", ",", ":", "]", "!=", "0", ")", ")", "[", "0", "]", "if", "(", "ind_valid", ".", "size", "==", "0", ")", ":", "continue", "ysfreq", "[", "(", "l", ",", "ind_valid", ")", "]", "=", "(", "sfreq", "[", "(", "l", ",", "ind_valid", ")", "]", "*", "freqScalingEnv", "[", "l", "]", ")", "return", "ysfreq" ]
frequency scaling of sinusoidal tracks sfreq: frequencies of input sinusoidal tracks freqscaling: scaling factors .
train
false