id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
48,962
def integrate(*args, **kwargs): meijerg = kwargs.pop('meijerg', None) conds = kwargs.pop('conds', 'piecewise') risch = kwargs.pop('risch', None) manual = kwargs.pop('manual', None) integral = Integral(*args, **kwargs) if isinstance(integral, Integral): return integral.doit(deep=False, meijerg=meijerg, conds=conds, risch=risch, manual=manual) else: return integral
[ "def", "integrate", "(", "*", "args", ",", "**", "kwargs", ")", ":", "meijerg", "=", "kwargs", ".", "pop", "(", "'meijerg'", ",", "None", ")", "conds", "=", "kwargs", ".", "pop", "(", "'conds'", ",", "'piecewise'", ")", "risch", "=", "kwargs", ".", "pop", "(", "'risch'", ",", "None", ")", "manual", "=", "kwargs", ".", "pop", "(", "'manual'", ",", "None", ")", "integral", "=", "Integral", "(", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "integral", ",", "Integral", ")", ":", "return", "integral", ".", "doit", "(", "deep", "=", "False", ",", "meijerg", "=", "meijerg", ",", "conds", "=", "conds", ",", "risch", "=", "risch", ",", "manual", "=", "manual", ")", "else", ":", "return", "integral" ]
integrate compute definite or indefinite integral of one or more variables using risch-norman algorithm and table lookup .
train
false
48,964
def create_xml_for_unpaired_read(data, fname): to_print = [create_basic_xml_info(data['name'], fname)] if (not config['clip']): to_print.append(create_clip_xml_info(data['number_of_bases'], data['clip_adapter_left'], data['clip_adapter_right'], data['clip_qual_left'], data['clip_qual_right'])) to_print.append(' </trace>\n') return ''.join(to_print)
[ "def", "create_xml_for_unpaired_read", "(", "data", ",", "fname", ")", ":", "to_print", "=", "[", "create_basic_xml_info", "(", "data", "[", "'name'", "]", ",", "fname", ")", "]", "if", "(", "not", "config", "[", "'clip'", "]", ")", ":", "to_print", ".", "append", "(", "create_clip_xml_info", "(", "data", "[", "'number_of_bases'", "]", ",", "data", "[", "'clip_adapter_left'", "]", ",", "data", "[", "'clip_adapter_right'", "]", ",", "data", "[", "'clip_qual_left'", "]", ",", "data", "[", "'clip_qual_right'", "]", ")", ")", "to_print", ".", "append", "(", "' </trace>\\n'", ")", "return", "''", ".", "join", "(", "to_print", ")" ]
given the data for one read it returns an str with the xml ancillary data .
train
false
48,965
def _get_registry(scope, window=None, tab=None): if ((window is not None) and (scope not in ['window', 'tab'])): raise TypeError('window is set with scope {}'.format(scope)) if ((tab is not None) and (scope != 'tab')): raise TypeError('tab is set with scope {}'.format(scope)) if (scope == 'global'): return global_registry elif (scope == 'tab'): return _get_tab_registry(window, tab) elif (scope == 'window'): return _get_window_registry(window) else: raise ValueError("Invalid scope '{}'!".format(scope))
[ "def", "_get_registry", "(", "scope", ",", "window", "=", "None", ",", "tab", "=", "None", ")", ":", "if", "(", "(", "window", "is", "not", "None", ")", "and", "(", "scope", "not", "in", "[", "'window'", ",", "'tab'", "]", ")", ")", ":", "raise", "TypeError", "(", "'window is set with scope {}'", ".", "format", "(", "scope", ")", ")", "if", "(", "(", "tab", "is", "not", "None", ")", "and", "(", "scope", "!=", "'tab'", ")", ")", ":", "raise", "TypeError", "(", "'tab is set with scope {}'", ".", "format", "(", "scope", ")", ")", "if", "(", "scope", "==", "'global'", ")", ":", "return", "global_registry", "elif", "(", "scope", "==", "'tab'", ")", ":", "return", "_get_tab_registry", "(", "window", ",", "tab", ")", "elif", "(", "scope", "==", "'window'", ")", ":", "return", "_get_window_registry", "(", "window", ")", "else", ":", "raise", "ValueError", "(", "\"Invalid scope '{}'!\"", ".", "format", "(", "scope", ")", ")" ]
get the correct registry for a given scope .
train
false
48,966
def addToHeadings(headingLineTable, headings, line): for depth in xrange(4, (-1), (-1)): equalSymbolLength = (depth + 2) if (line[:equalSymbolLength] == ('=' * equalSymbolLength)): headings.append(Heading(depth).getFromLine(headingLineTable, line)) return
[ "def", "addToHeadings", "(", "headingLineTable", ",", "headings", ",", "line", ")", ":", "for", "depth", "in", "xrange", "(", "4", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "equalSymbolLength", "=", "(", "depth", "+", "2", ")", "if", "(", "line", "[", ":", "equalSymbolLength", "]", "==", "(", "'='", "*", "equalSymbolLength", ")", ")", ":", "headings", ".", "append", "(", "Heading", "(", "depth", ")", ".", "getFromLine", "(", "headingLineTable", ",", "line", ")", ")", "return" ]
add the line to the headings .
train
false
48,967
def normcase(s): return s
[ "def", "normcase", "(", "s", ")", ":", "return", "s" ]
normalize case of pathname .
train
false
48,968
def extract_xpi(xpi, path, expand=False, verify=True): expand_allow_list = ['.crx', '.jar', '.xpi', '.zip'] tempdir = extract_zip(xpi) all_files = get_all_files(tempdir) if expand: for x in xrange(0, 10): flag = False for (root, dirs, files) in os.walk(tempdir): for name in files: if (os.path.splitext(name)[1] in expand_allow_list): src = os.path.join(root, name) if (not os.path.isdir(src)): dest = extract_zip(src, remove=True, fatal=False) all_files.extend(get_all_files(dest, strip_prefix=tempdir, prefix=src)) if dest: copy_over(dest, src) flag = True if (not flag): break copy_over(tempdir, path) return all_files
[ "def", "extract_xpi", "(", "xpi", ",", "path", ",", "expand", "=", "False", ",", "verify", "=", "True", ")", ":", "expand_allow_list", "=", "[", "'.crx'", ",", "'.jar'", ",", "'.xpi'", ",", "'.zip'", "]", "tempdir", "=", "extract_zip", "(", "xpi", ")", "all_files", "=", "get_all_files", "(", "tempdir", ")", "if", "expand", ":", "for", "x", "in", "xrange", "(", "0", ",", "10", ")", ":", "flag", "=", "False", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "tempdir", ")", ":", "for", "name", "in", "files", ":", "if", "(", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "1", "]", "in", "expand_allow_list", ")", ":", "src", "=", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "src", ")", ")", ":", "dest", "=", "extract_zip", "(", "src", ",", "remove", "=", "True", ",", "fatal", "=", "False", ")", "all_files", ".", "extend", "(", "get_all_files", "(", "dest", ",", "strip_prefix", "=", "tempdir", ",", "prefix", "=", "src", ")", ")", "if", "dest", ":", "copy_over", "(", "dest", ",", "src", ")", "flag", "=", "True", "if", "(", "not", "flag", ")", ":", "break", "copy_over", "(", "tempdir", ",", "path", ")", "return", "all_files" ]
if expand is given .
train
false
48,969
def encodeStream(stream, filter, parameters={}): if (filter == '/ASCIIHexDecode'): ret = asciiHexEncode(stream) elif (filter == '/ASCII85Decode'): ret = ascii85Encode(stream) elif (filter == '/LZWDecode'): ret = lzwEncode(stream, parameters) elif (filter == '/FlateDecode'): ret = flateEncode(stream, parameters) elif (filter == '/RunLengthDecode'): ret = runLengthEncode(stream) elif (filter == '/CCITTFaxDecode'): ret = ccittFaxEncode(stream, parameters) elif (filter == '/JBIG2Decode'): ret = jbig2Encode(stream, parameters) elif (filter == '/DCTDecode'): ret = dctEncode(stream, parameters) elif (filter == '/JPXDecode'): ret = jpxEncode(stream) elif (filter == '/Crypt'): ret = crypt(stream, parameters) else: ret = ((-1), ('Unknown filter "%s"' % filter)) return ret
[ "def", "encodeStream", "(", "stream", ",", "filter", ",", "parameters", "=", "{", "}", ")", ":", "if", "(", "filter", "==", "'/ASCIIHexDecode'", ")", ":", "ret", "=", "asciiHexEncode", "(", "stream", ")", "elif", "(", "filter", "==", "'/ASCII85Decode'", ")", ":", "ret", "=", "ascii85Encode", "(", "stream", ")", "elif", "(", "filter", "==", "'/LZWDecode'", ")", ":", "ret", "=", "lzwEncode", "(", "stream", ",", "parameters", ")", "elif", "(", "filter", "==", "'/FlateDecode'", ")", ":", "ret", "=", "flateEncode", "(", "stream", ",", "parameters", ")", "elif", "(", "filter", "==", "'/RunLengthDecode'", ")", ":", "ret", "=", "runLengthEncode", "(", "stream", ")", "elif", "(", "filter", "==", "'/CCITTFaxDecode'", ")", ":", "ret", "=", "ccittFaxEncode", "(", "stream", ",", "parameters", ")", "elif", "(", "filter", "==", "'/JBIG2Decode'", ")", ":", "ret", "=", "jbig2Encode", "(", "stream", ",", "parameters", ")", "elif", "(", "filter", "==", "'/DCTDecode'", ")", ":", "ret", "=", "dctEncode", "(", "stream", ",", "parameters", ")", "elif", "(", "filter", "==", "'/JPXDecode'", ")", ":", "ret", "=", "jpxEncode", "(", "stream", ")", "elif", "(", "filter", "==", "'/Crypt'", ")", ":", "ret", "=", "crypt", "(", "stream", ",", "parameters", ")", "else", ":", "ret", "=", "(", "(", "-", "1", ")", ",", "(", "'Unknown filter \"%s\"'", "%", "filter", ")", ")", "return", "ret" ]
encode the given stream .
train
false
48,970
@memoize def from_name(name): return QtGui.QIcon(name)
[ "@", "memoize", "def", "from_name", "(", "name", ")", ":", "return", "QtGui", ".", "QIcon", "(", "name", ")" ]
return a qicon from an absolute filename or "icons:basename .
train
false
48,972
@pytest.mark.network def test_install_global_option(script): result = script.pip('install', '--global-option=--version', 'INITools==0.1', expect_stderr=True) assert ('0.1\n' in result.stdout)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_install_global_option", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--global-option=--version'", ",", "'INITools==0.1'", ",", "expect_stderr", "=", "True", ")", "assert", "(", "'0.1\\n'", "in", "result", ".", "stdout", ")" ]
test using global distutils options .
train
false
48,973
def get_context_first_matching_object(context, context_lookups): for key in context_lookups: context_object = context.get(key) if context_object: return (key, context_object) return (None, None)
[ "def", "get_context_first_matching_object", "(", "context", ",", "context_lookups", ")", ":", "for", "key", "in", "context_lookups", ":", "context_object", "=", "context", ".", "get", "(", "key", ")", "if", "context_object", ":", "return", "(", "key", ",", "context_object", ")", "return", "(", "None", ",", "None", ")" ]
return the first object found in the context .
train
true
48,975
def profile_add(user, profile): ret = {} profiles = profile.split(',') known_profiles = profile_list().keys() valid_profiles = [p for p in profiles if (p in known_profiles)] log.debug('rbac.profile_add - profiles={0}, known_profiles={1}, valid_profiles={2}'.format(profiles, known_profiles, valid_profiles)) if (len(valid_profiles) > 0): res = __salt__['cmd.run_all']('usermod -P "{profiles}" {login}'.format(login=user, profiles=','.join(set((profile_get(user) + valid_profiles))))) if (res['retcode'] > 0): ret['Error'] = {'retcode': res['retcode'], 'message': (res['stderr'] if ('stderr' in res) else res['stdout'])} return ret active_profiles = profile_get(user, False) for p in profiles: if (p not in valid_profiles): ret[p] = 'Unknown' elif (p in active_profiles): ret[p] = 'Added' else: ret[p] = 'Failed' return ret
[ "def", "profile_add", "(", "user", ",", "profile", ")", ":", "ret", "=", "{", "}", "profiles", "=", "profile", ".", "split", "(", "','", ")", "known_profiles", "=", "profile_list", "(", ")", ".", "keys", "(", ")", "valid_profiles", "=", "[", "p", "for", "p", "in", "profiles", "if", "(", "p", "in", "known_profiles", ")", "]", "log", ".", "debug", "(", "'rbac.profile_add - profiles={0}, known_profiles={1}, valid_profiles={2}'", ".", "format", "(", "profiles", ",", "known_profiles", ",", "valid_profiles", ")", ")", "if", "(", "len", "(", "valid_profiles", ")", ">", "0", ")", ":", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'usermod -P \"{profiles}\" {login}'", ".", "format", "(", "login", "=", "user", ",", "profiles", "=", "','", ".", "join", "(", "set", "(", "(", "profile_get", "(", "user", ")", "+", "valid_profiles", ")", ")", ")", ")", ")", "if", "(", "res", "[", "'retcode'", "]", ">", "0", ")", ":", "ret", "[", "'Error'", "]", "=", "{", "'retcode'", ":", "res", "[", "'retcode'", "]", ",", "'message'", ":", "(", "res", "[", "'stderr'", "]", "if", "(", "'stderr'", "in", "res", ")", "else", "res", "[", "'stdout'", "]", ")", "}", "return", "ret", "active_profiles", "=", "profile_get", "(", "user", ",", "False", ")", "for", "p", "in", "profiles", ":", "if", "(", "p", "not", "in", "valid_profiles", ")", ":", "ret", "[", "p", "]", "=", "'Unknown'", "elif", "(", "p", "in", "active_profiles", ")", ":", "ret", "[", "p", "]", "=", "'Added'", "else", ":", "ret", "[", "p", "]", "=", "'Failed'", "return", "ret" ]
add profile to user user : string username profile : string profile name cli example: .
train
true
48,976
def KSA(key): keylength = len(key) S = list(range(256)) j = 0 for i in range(256): j = (((j + S[i]) + key[(i % keylength)]) % 256) (S[i], S[j]) = (S[j], S[i]) return S
[ "def", "KSA", "(", "key", ")", ":", "keylength", "=", "len", "(", "key", ")", "S", "=", "list", "(", "range", "(", "256", ")", ")", "j", "=", "0", "for", "i", "in", "range", "(", "256", ")", ":", "j", "=", "(", "(", "(", "j", "+", "S", "[", "i", "]", ")", "+", "key", "[", "(", "i", "%", "keylength", ")", "]", ")", "%", "256", ")", "(", "S", "[", "i", "]", ",", "S", "[", "j", "]", ")", "=", "(", "S", "[", "j", "]", ",", "S", "[", "i", "]", ")", "return", "S" ]
run key scheduling algorithm .
train
true
48,978
def getLinkLine(line): linkStartIndex = line.find(globalWikiLinkStart) squareEndBracketIndex = line.find(']', linkStartIndex) greaterThanIndex = line.find('>', linkStartIndex, squareEndBracketIndex) greaterThanIndexPlusOne = (greaterThanIndex + 1) closeATagIndex = line.find('</a>', greaterThanIndexPlusOne, squareEndBracketIndex) linkText = line[((closeATagIndex + len('</a>')) + 1):squareEndBracketIndex] linkLine = ((((line[:linkStartIndex] + line[(linkStartIndex + 1):greaterThanIndexPlusOne]) + linkText) + '</a>') + line[(squareEndBracketIndex + 1):]) return linkLine
[ "def", "getLinkLine", "(", "line", ")", ":", "linkStartIndex", "=", "line", ".", "find", "(", "globalWikiLinkStart", ")", "squareEndBracketIndex", "=", "line", ".", "find", "(", "']'", ",", "linkStartIndex", ")", "greaterThanIndex", "=", "line", ".", "find", "(", "'>'", ",", "linkStartIndex", ",", "squareEndBracketIndex", ")", "greaterThanIndexPlusOne", "=", "(", "greaterThanIndex", "+", "1", ")", "closeATagIndex", "=", "line", ".", "find", "(", "'</a>'", ",", "greaterThanIndexPlusOne", ",", "squareEndBracketIndex", ")", "linkText", "=", "line", "[", "(", "(", "closeATagIndex", "+", "len", "(", "'</a>'", ")", ")", "+", "1", ")", ":", "squareEndBracketIndex", "]", "linkLine", "=", "(", "(", "(", "(", "line", "[", ":", "linkStartIndex", "]", "+", "line", "[", "(", "linkStartIndex", "+", "1", ")", ":", "greaterThanIndexPlusOne", "]", ")", "+", "linkText", ")", "+", "'</a>'", ")", "+", "line", "[", "(", "squareEndBracketIndex", "+", "1", ")", ":", "]", ")", "return", "linkLine" ]
get the link line with the wiki style link converted into a hypertext link .
train
false
48,979
def is_ip(ip): return (is_ipv4(ip) or is_ipv6(ip))
[ "def", "is_ip", "(", "ip", ")", ":", "return", "(", "is_ipv4", "(", "ip", ")", "or", "is_ipv6", "(", "ip", ")", ")" ]
returns a bool telling if the passed ip is a valid ipv4 or ipv6 address .
train
false
48,980
@task def unflag_database(stdout=sys.stdout): log('Unflagging the database', stdout=stdout) unflag_reindexing_amo()
[ "@", "task", "def", "unflag_database", "(", "stdout", "=", "sys", ".", "stdout", ")", ":", "log", "(", "'Unflagging the database'", ",", "stdout", "=", "stdout", ")", "unflag_reindexing_amo", "(", ")" ]
unflag the database to indicate that the reindexing is over .
train
false
48,981
def GetClassLookupDict(classes, labels): paths = {} for cls in classes: category = getattr(cls, 'category', None) if category: for (path, label) in InterpolatePaths(category, labels).items(): paths[path] = (cls, label) return paths
[ "def", "GetClassLookupDict", "(", "classes", ",", "labels", ")", ":", "paths", "=", "{", "}", "for", "cls", "in", "classes", ":", "category", "=", "getattr", "(", "cls", ",", "'category'", ",", "None", ")", "if", "category", ":", "for", "(", "path", ",", "label", ")", "in", "InterpolatePaths", "(", "category", ",", "labels", ")", ".", "items", "(", ")", ":", "paths", "[", "path", "]", "=", "(", "cls", ",", "label", ")", "return", "paths" ]
build a path->class lookup dict .
train
false
48,982
def _class_for_nxm_header(raw): (t, has_mask, length) = nxm_entry.unpack_header(raw, 0) c = _nxm_type_to_class.get(t) if c: return c vendor = ((t >> 7) & 65535) field = (t & 127) typename = 'NXM_UNKNOWN_' typename += ('%04x_%02x' % (vendor, field)) if has_mask: typename += '_MASKABLE' types = [_nxm_raw] if has_mask: types.append(_nxm_maskable) return _make_nxm(typename, vendor, field, length, types)
[ "def", "_class_for_nxm_header", "(", "raw", ")", ":", "(", "t", ",", "has_mask", ",", "length", ")", "=", "nxm_entry", ".", "unpack_header", "(", "raw", ",", "0", ")", "c", "=", "_nxm_type_to_class", ".", "get", "(", "t", ")", "if", "c", ":", "return", "c", "vendor", "=", "(", "(", "t", ">>", "7", ")", "&", "65535", ")", "field", "=", "(", "t", "&", "127", ")", "typename", "=", "'NXM_UNKNOWN_'", "typename", "+=", "(", "'%04x_%02x'", "%", "(", "vendor", ",", "field", ")", ")", "if", "has_mask", ":", "typename", "+=", "'_MASKABLE'", "types", "=", "[", "_nxm_raw", "]", "if", "has_mask", ":", "types", ".", "append", "(", "_nxm_maskable", ")", "return", "_make_nxm", "(", "typename", ",", "vendor", ",", "field", ",", "length", ",", "types", ")" ]
given a raw nxm_entry header .
train
false
48,983
def hexify(sum): if (sum is None): return 'None' return (hexify_format % tuple(map(ord, sum)))
[ "def", "hexify", "(", "sum", ")", ":", "if", "(", "sum", "is", "None", ")", ":", "return", "'None'", "return", "(", "hexify_format", "%", "tuple", "(", "map", "(", "ord", ",", "sum", ")", ")", ")" ]
return a hex representation of a 16-byte string .
train
false
48,984
def s_dword(value, endian='<', format='binary', signed=False, full_range=False, fuzzable=True, name=None): dword = primitives.dword(value, endian, format, signed, full_range, fuzzable, name) blocks.CURRENT.push(dword)
[ "def", "s_dword", "(", "value", ",", "endian", "=", "'<'", ",", "format", "=", "'binary'", ",", "signed", "=", "False", ",", "full_range", "=", "False", ",", "fuzzable", "=", "True", ",", "name", "=", "None", ")", ":", "dword", "=", "primitives", ".", "dword", "(", "value", ",", "endian", ",", "format", ",", "signed", ",", "full_range", ",", "fuzzable", ",", "name", ")", "blocks", ".", "CURRENT", ".", "push", "(", "dword", ")" ]
push a double word onto the current block stack .
train
false
48,985
def status_before_must_be(*valid_start_statuses): def decorator_func(func): '\n Decorator function that gets returned\n ' @functools.wraps(func) def with_status_check(obj, *args, **kwargs): if (obj.status not in valid_start_statuses): exception_msg = u"Error calling {} {}: status is '{}', must be one of: {}".format(func, obj, obj.status, valid_start_statuses) raise VerificationException(exception_msg) return func(obj, *args, **kwargs) return with_status_check return decorator_func
[ "def", "status_before_must_be", "(", "*", "valid_start_statuses", ")", ":", "def", "decorator_func", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "with_status_check", "(", "obj", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "obj", ".", "status", "not", "in", "valid_start_statuses", ")", ":", "exception_msg", "=", "u\"Error calling {} {}: status is '{}', must be one of: {}\"", ".", "format", "(", "func", ",", "obj", ",", "obj", ".", "status", ",", "valid_start_statuses", ")", "raise", "VerificationException", "(", "exception_msg", ")", "return", "func", "(", "obj", ",", "*", "args", ",", "**", "kwargs", ")", "return", "with_status_check", "return", "decorator_func" ]
helper decorator with arguments to make sure that an object with a status attribute is in one of a list of acceptable status states before a method is called .
train
false
48,986
def serialize_all(nodes, stream=None, Dumper=Dumper, canonical=None, indent=None, width=None, allow_unicode=None, line_break=None, encoding='utf-8', explicit_start=None, explicit_end=None, version=None, tags=None): getvalue = None if (stream is None): try: from cStringIO import StringIO except ImportError: from StringIO import StringIO stream = StringIO() getvalue = stream.getvalue dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, allow_unicode=allow_unicode, line_break=line_break, encoding=encoding, version=version, tags=tags, explicit_start=explicit_start, explicit_end=explicit_end) dumper.open() for node in nodes: dumper.serialize(node) dumper.close() if getvalue: return getvalue()
[ "def", "serialize_all", "(", "nodes", ",", "stream", "=", "None", ",", "Dumper", "=", "Dumper", ",", "canonical", "=", "None", ",", "indent", "=", "None", ",", "width", "=", "None", ",", "allow_unicode", "=", "None", ",", "line_break", "=", "None", ",", "encoding", "=", "'utf-8'", ",", "explicit_start", "=", "None", ",", "explicit_end", "=", "None", ",", "version", "=", "None", ",", "tags", "=", "None", ")", ":", "getvalue", "=", "None", "if", "(", "stream", "is", "None", ")", ":", "try", ":", "from", "cStringIO", "import", "StringIO", "except", "ImportError", ":", "from", "StringIO", "import", "StringIO", "stream", "=", "StringIO", "(", ")", "getvalue", "=", "stream", ".", "getvalue", "dumper", "=", "Dumper", "(", "stream", ",", "canonical", "=", "canonical", ",", "indent", "=", "indent", ",", "width", "=", "width", ",", "allow_unicode", "=", "allow_unicode", ",", "line_break", "=", "line_break", ",", "encoding", "=", "encoding", ",", "version", "=", "version", ",", "tags", "=", "tags", ",", "explicit_start", "=", "explicit_start", ",", "explicit_end", "=", "explicit_end", ")", "dumper", ".", "open", "(", ")", "for", "node", "in", "nodes", ":", "dumper", ".", "serialize", "(", "node", ")", "dumper", ".", "close", "(", ")", "if", "getvalue", ":", "return", "getvalue", "(", ")" ]
serialize a sequence of representation trees into a yaml stream .
train
true
48,987
def _country_code_from_ip(ip_addr): if (ip_addr.find(':') >= 0): return pygeoip.GeoIP(settings.GEOIPV6_PATH).country_code_by_addr(ip_addr) else: return pygeoip.GeoIP(settings.GEOIP_PATH).country_code_by_addr(ip_addr)
[ "def", "_country_code_from_ip", "(", "ip_addr", ")", ":", "if", "(", "ip_addr", ".", "find", "(", "':'", ")", ">=", "0", ")", ":", "return", "pygeoip", ".", "GeoIP", "(", "settings", ".", "GEOIPV6_PATH", ")", ".", "country_code_by_addr", "(", "ip_addr", ")", "else", ":", "return", "pygeoip", ".", "GeoIP", "(", "settings", ".", "GEOIP_PATH", ")", ".", "country_code_by_addr", "(", "ip_addr", ")" ]
return the country code associated with an ip address .
train
false
48,989
def make_step_decorator(context, instance, update_instance_progress, total_offset=0): step_info = dict(total=total_offset, current=0) def bump_progress(): step_info['current'] += 1 update_instance_progress(context, instance, step_info['current'], step_info['total']) def step_decorator(f): step_info['total'] += 1 @functools.wraps(f) def inner(*args, **kwargs): rv = f(*args, **kwargs) bump_progress() return rv return inner return step_decorator
[ "def", "make_step_decorator", "(", "context", ",", "instance", ",", "update_instance_progress", ",", "total_offset", "=", "0", ")", ":", "step_info", "=", "dict", "(", "total", "=", "total_offset", ",", "current", "=", "0", ")", "def", "bump_progress", "(", ")", ":", "step_info", "[", "'current'", "]", "+=", "1", "update_instance_progress", "(", "context", ",", "instance", ",", "step_info", "[", "'current'", "]", ",", "step_info", "[", "'total'", "]", ")", "def", "step_decorator", "(", "f", ")", ":", "step_info", "[", "'total'", "]", "+=", "1", "@", "functools", ".", "wraps", "(", "f", ")", "def", "inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "rv", "=", "f", "(", "*", "args", ",", "**", "kwargs", ")", "bump_progress", "(", ")", "return", "rv", "return", "inner", "return", "step_decorator" ]
factory to create a decorator that records instance progress as a series of discrete steps .
train
false
48,990
def appendInputWithSimilarValues(inputs): numInputs = len(inputs) for i in xrange(numInputs): input = inputs[i] for j in xrange((len(input) - 1)): if ((input[j] == 1) and (input[(j + 1)] == 0)): newInput = copy.deepcopy(input) newInput[j] = 0 newInput[(j + 1)] = 1 inputs.append(newInput) break
[ "def", "appendInputWithSimilarValues", "(", "inputs", ")", ":", "numInputs", "=", "len", "(", "inputs", ")", "for", "i", "in", "xrange", "(", "numInputs", ")", ":", "input", "=", "inputs", "[", "i", "]", "for", "j", "in", "xrange", "(", "(", "len", "(", "input", ")", "-", "1", ")", ")", ":", "if", "(", "(", "input", "[", "j", "]", "==", "1", ")", "and", "(", "input", "[", "(", "j", "+", "1", ")", "]", "==", "0", ")", ")", ":", "newInput", "=", "copy", ".", "deepcopy", "(", "input", ")", "newInput", "[", "j", "]", "=", "0", "newInput", "[", "(", "j", "+", "1", ")", "]", "=", "1", "inputs", ".", "append", "(", "newInput", ")", "break" ]
creates an one-off record for each record in the inputs .
train
true
48,992
def get_ancestors_of(doctype, name): (lft, rgt) = frappe.db.get_value(doctype, name, [u'lft', u'rgt']) result = frappe.db.sql_list((u'select name from `tab%s`\n DCTB DCTB where lft<%s and rgt>%s order by lft desc' % (doctype, u'%s', u'%s')), (lft, rgt)) return (result or [])
[ "def", "get_ancestors_of", "(", "doctype", ",", "name", ")", ":", "(", "lft", ",", "rgt", ")", "=", "frappe", ".", "db", ".", "get_value", "(", "doctype", ",", "name", ",", "[", "u'lft'", ",", "u'rgt'", "]", ")", "result", "=", "frappe", ".", "db", ".", "sql_list", "(", "(", "u'select name from `tab%s`\\n DCTB DCTB where lft<%s and rgt>%s order by lft desc'", "%", "(", "doctype", ",", "u'%s'", ",", "u'%s'", ")", ")", ",", "(", "lft", ",", "rgt", ")", ")", "return", "(", "result", "or", "[", "]", ")" ]
get ancestor elements of a doctype with a tree structure .
train
false
48,993
def get_tag_mode(view, tag_mode_config): default_mode = None syntax = view.settings().get('syntax') language = (splitext(basename(syntax))[0].lower() if (syntax is not None) else 'plain text') if isinstance(tag_mode_config, list): for item in tag_mode_config: if (isinstance(item, dict) and compare_languge(language, item.get('syntax', []))): first_line = item.get('first_line', '') if first_line: size = (view.size() - 1) if (size > 256): size = 256 if (isinstance(first_line, str) and bre.compile_search(first_line, bre.I).match(view.substr(sublime.Region(0, size)))): return item.get('mode', default_mode) else: return item.get('mode', default_mode) return default_mode
[ "def", "get_tag_mode", "(", "view", ",", "tag_mode_config", ")", ":", "default_mode", "=", "None", "syntax", "=", "view", ".", "settings", "(", ")", ".", "get", "(", "'syntax'", ")", "language", "=", "(", "splitext", "(", "basename", "(", "syntax", ")", ")", "[", "0", "]", ".", "lower", "(", ")", "if", "(", "syntax", "is", "not", "None", ")", "else", "'plain text'", ")", "if", "isinstance", "(", "tag_mode_config", ",", "list", ")", ":", "for", "item", "in", "tag_mode_config", ":", "if", "(", "isinstance", "(", "item", ",", "dict", ")", "and", "compare_languge", "(", "language", ",", "item", ".", "get", "(", "'syntax'", ",", "[", "]", ")", ")", ")", ":", "first_line", "=", "item", ".", "get", "(", "'first_line'", ",", "''", ")", "if", "first_line", ":", "size", "=", "(", "view", ".", "size", "(", ")", "-", "1", ")", "if", "(", "size", ">", "256", ")", ":", "size", "=", "256", "if", "(", "isinstance", "(", "first_line", ",", "str", ")", "and", "bre", ".", "compile_search", "(", "first_line", ",", "bre", ".", "I", ")", ".", "match", "(", "view", ".", "substr", "(", "sublime", ".", "Region", "(", "0", ",", "size", ")", ")", ")", ")", ":", "return", "item", ".", "get", "(", "'mode'", ",", "default_mode", ")", "else", ":", "return", "item", ".", "get", "(", "'mode'", ",", "default_mode", ")", "return", "default_mode" ]
get the tag mode .
train
false
48,995
@frappe.whitelist() def set_indicator(board_name, column_name, indicator): board = frappe.get_doc(u'Kanban Board', board_name) for column in board.columns: if (column.column_name == column_name): column.indicator = indicator board.save() return board
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "set_indicator", "(", "board_name", ",", "column_name", ",", "indicator", ")", ":", "board", "=", "frappe", ".", "get_doc", "(", "u'Kanban Board'", ",", "board_name", ")", "for", "column", "in", "board", ".", "columns", ":", "if", "(", "column", ".", "column_name", "==", "column_name", ")", ":", "column", ".", "indicator", "=", "indicator", "board", ".", "save", "(", ")", "return", "board" ]
set the indicator color of column .
train
false
48,996
def filer_actions(context): context[u'action_index'] = (context.get(u'action_index', (-1)) + 1) return context
[ "def", "filer_actions", "(", "context", ")", ":", "context", "[", "u'action_index'", "]", "=", "(", "context", ".", "get", "(", "u'action_index'", ",", "(", "-", "1", ")", ")", "+", "1", ")", "return", "context" ]
track the number of times the action field has been rendered on the page .
train
false
48,998
def powered_off(name): return _virt_call(name, 'stop', 'unpowered', 'Machine has been powered off')
[ "def", "powered_off", "(", "name", ")", ":", "return", "_virt_call", "(", "name", ",", "'stop'", ",", "'unpowered'", ",", "'Machine has been powered off'", ")" ]
stops a vm by power off .
train
false
49,000
def encode_cookie(payload): return u'{0}|{1}'.format(payload, _cookie_digest(payload))
[ "def", "encode_cookie", "(", "payload", ")", ":", "return", "u'{0}|{1}'", ".", "format", "(", "payload", ",", "_cookie_digest", "(", "payload", ")", ")" ]
this will encode a unicode value into a cookie .
train
true
49,001
def win_service_iter(): for (name, display_name) in cext.winservice_enumerate(): (yield WindowsService(name, display_name))
[ "def", "win_service_iter", "(", ")", ":", "for", "(", "name", ",", "display_name", ")", "in", "cext", ".", "winservice_enumerate", "(", ")", ":", "(", "yield", "WindowsService", "(", "name", ",", "display_name", ")", ")" ]
return a list of windowsservice instances .
train
false
49,004
def disable_job(name=None): if (not name): raise SaltInvocationError('Required parameter `name` is missing.') server = _connect() if (not job_exists(name)): raise SaltInvocationError('Job `{0}` does not exists.'.format(name)) try: server.disable_job(name) except jenkins.JenkinsException as err: raise SaltInvocationError('Something went wrong {0}.'.format(err)) return True
[ "def", "disable_job", "(", "name", "=", "None", ")", ":", "if", "(", "not", "name", ")", ":", "raise", "SaltInvocationError", "(", "'Required parameter `name` is missing.'", ")", "server", "=", "_connect", "(", ")", "if", "(", "not", "job_exists", "(", "name", ")", ")", ":", "raise", "SaltInvocationError", "(", "'Job `{0}` does not exists.'", ".", "format", "(", "name", ")", ")", "try", ":", "server", ".", "disable_job", "(", "name", ")", "except", "jenkins", ".", "JenkinsException", "as", "err", ":", "raise", "SaltInvocationError", "(", "'Something went wrong {0}.'", ".", "format", "(", "err", ")", ")", "return", "True" ]
return true is job is disabled successfully .
train
true
49,005
def wang_ryzin_reg(h, Xi, x): return (h ** abs((Xi - x)))
[ "def", "wang_ryzin_reg", "(", "h", ",", "Xi", ",", "x", ")", ":", "return", "(", "h", "**", "abs", "(", "(", "Xi", "-", "x", ")", ")", ")" ]
a version for the wang-ryzin kernel for nonparametric regression .
train
false
49,007
def delete_resources_on_service_clients(logical_line, physical_line, filename, line_number, lines): if (not _common_service_clients_check(logical_line, physical_line, filename, 'ignored_list_T111.txt')): return for line in lines[line_number:]: if (METHOD.match(line) or CLASS.match(line)): return if (('self.delete(' not in line) and ('self.delete_resource(' not in line)): continue if METHOD_DELETE_RESOURCE.match(logical_line): return msg = 'T111: [DELETE /resources/<id>] methods should be delete_<resource name>' (yield (0, msg))
[ "def", "delete_resources_on_service_clients", "(", "logical_line", ",", "physical_line", ",", "filename", ",", "line_number", ",", "lines", ")", ":", "if", "(", "not", "_common_service_clients_check", "(", "logical_line", ",", "physical_line", ",", "filename", ",", "'ignored_list_T111.txt'", ")", ")", ":", "return", "for", "line", "in", "lines", "[", "line_number", ":", "]", ":", "if", "(", "METHOD", ".", "match", "(", "line", ")", "or", "CLASS", ".", "match", "(", "line", ")", ")", ":", "return", "if", "(", "(", "'self.delete('", "not", "in", "line", ")", "and", "(", "'self.delete_resource('", "not", "in", "line", ")", ")", ":", "continue", "if", "METHOD_DELETE_RESOURCE", ".", "match", "(", "logical_line", ")", ":", "return", "msg", "=", "'T111: [DELETE /resources/<id>] methods should be delete_<resource name>'", "(", "yield", "(", "0", ",", "msg", ")", ")" ]
check that service client names of delete should be consistent t111 .
train
false
49,009
def _SecretName(user): return '{0}_otp'.format(user)
[ "def", "_SecretName", "(", "user", ")", ":", "return", "'{0}_otp'", ".", "format", "(", "user", ")" ]
returns the name of the secret file for the specified user .
train
false
49,010
def triangulate_points(x, y): (centers, edges, tri, neighbors) = md.delaunay(x, y) return tri
[ "def", "triangulate_points", "(", "x", ",", "y", ")", ":", "(", "centers", ",", "edges", ",", "tri", ",", "neighbors", ")", "=", "md", ".", "delaunay", "(", "x", ",", "y", ")", "return", "tri" ]
delaunay triangulation of 2d points .
train
false
49,012
def _pd_read_hdf(path, key, lock, kwargs): if lock: lock.acquire() try: result = pd.read_hdf(path, key, **kwargs) finally: if lock: lock.release() return result
[ "def", "_pd_read_hdf", "(", "path", ",", "key", ",", "lock", ",", "kwargs", ")", ":", "if", "lock", ":", "lock", ".", "acquire", "(", ")", "try", ":", "result", "=", "pd", ".", "read_hdf", "(", "path", ",", "key", ",", "**", "kwargs", ")", "finally", ":", "if", "lock", ":", "lock", ".", "release", "(", ")", "return", "result" ]
read from hdf5 file with a lock .
train
false
49,013
def _uri_split(uri): (scheme, netloc, path, query, fragment) = _safe_urlsplit(uri) port = None if ('@' in netloc): (auth, hostname) = netloc.split('@', 1) else: auth = None hostname = netloc if hostname: if (':' in hostname): (hostname, port) = hostname.split(':', 1) return (scheme, auth, hostname, port, path, query, fragment)
[ "def", "_uri_split", "(", "uri", ")", ":", "(", "scheme", ",", "netloc", ",", "path", ",", "query", ",", "fragment", ")", "=", "_safe_urlsplit", "(", "uri", ")", "port", "=", "None", "if", "(", "'@'", "in", "netloc", ")", ":", "(", "auth", ",", "hostname", ")", "=", "netloc", ".", "split", "(", "'@'", ",", "1", ")", "else", ":", "auth", "=", "None", "hostname", "=", "netloc", "if", "hostname", ":", "if", "(", "':'", "in", "hostname", ")", ":", "(", "hostname", ",", "port", ")", "=", "hostname", ".", "split", "(", "':'", ",", "1", ")", "return", "(", "scheme", ",", "auth", ",", "hostname", ",", "port", ",", "path", ",", "query", ",", "fragment", ")" ]
splits up an uri or iri .
train
false
49,014
def validate_port(confvar): port_val = confvar.get() error_res = [(confvar, 'Port should be an integer between 0 and 65535 (inclusive).')] try: port = int(port_val) if ((port < 0) or (port > 65535)): return error_res except ValueError: return error_res return []
[ "def", "validate_port", "(", "confvar", ")", ":", "port_val", "=", "confvar", ".", "get", "(", ")", "error_res", "=", "[", "(", "confvar", ",", "'Port should be an integer between 0 and 65535 (inclusive).'", ")", "]", "try", ":", "port", "=", "int", "(", "port_val", ")", "if", "(", "(", "port", "<", "0", ")", "or", "(", "port", ">", "65535", ")", ")", ":", "return", "error_res", "except", "ValueError", ":", "return", "error_res", "return", "[", "]" ]
validate that the value of confvar is between [0 .
train
false
49,015
def get_available_translations(): locale_path = get_module_data_path('spyder', relpath='locale', attr_name='LOCALEPATH') listdir = os.listdir(locale_path) langs = [d for d in listdir if osp.isdir(osp.join(locale_path, d))] langs = ([DEFAULT_LANGUAGE] + langs) langs = list((set(langs) - set(DISABLED_LANGUAGES))) for lang in langs: if (lang not in LANGUAGE_CODES): error = _('Update LANGUAGE_CODES (inside config/base.py) if a new translation has been added to Spyder') raise Exception(error) return langs
[ "def", "get_available_translations", "(", ")", ":", "locale_path", "=", "get_module_data_path", "(", "'spyder'", ",", "relpath", "=", "'locale'", ",", "attr_name", "=", "'LOCALEPATH'", ")", "listdir", "=", "os", ".", "listdir", "(", "locale_path", ")", "langs", "=", "[", "d", "for", "d", "in", "listdir", "if", "osp", ".", "isdir", "(", "osp", ".", "join", "(", "locale_path", ",", "d", ")", ")", "]", "langs", "=", "(", "[", "DEFAULT_LANGUAGE", "]", "+", "langs", ")", "langs", "=", "list", "(", "(", "set", "(", "langs", ")", "-", "set", "(", "DISABLED_LANGUAGES", ")", ")", ")", "for", "lang", "in", "langs", ":", "if", "(", "lang", "not", "in", "LANGUAGE_CODES", ")", ":", "error", "=", "_", "(", "'Update LANGUAGE_CODES (inside config/base.py) if a new translation has been added to Spyder'", ")", "raise", "Exception", "(", "error", ")", "return", "langs" ]
list available translations for spyder based on the folders found in the locale folder .
train
true
49,016
def set_memcached(key, obj, lock=True): if lock: if create_lock(key): cache.set(key, cPickle.dumps(obj), 2592000) delete_lock(key) else: cache.set(key, cPickle.dumps(obj), 2592000)
[ "def", "set_memcached", "(", "key", ",", "obj", ",", "lock", "=", "True", ")", ":", "if", "lock", ":", "if", "create_lock", "(", "key", ")", ":", "cache", ".", "set", "(", "key", ",", "cPickle", ".", "dumps", "(", "obj", ")", ",", "2592000", ")", "delete_lock", "(", "key", ")", "else", ":", "cache", ".", "set", "(", "key", ",", "cPickle", ".", "dumps", "(", "obj", ")", ",", "2592000", ")" ]
serialization object and add his in memcached .
train
false
49,018
def fix_all_html(container): for (name, mt) in container.mime_map.iteritems(): if (mt in OEB_DOCS): container.parsed(name) container.dirty(name)
[ "def", "fix_all_html", "(", "container", ")", ":", "for", "(", "name", ",", "mt", ")", "in", "container", ".", "mime_map", ".", "iteritems", "(", ")", ":", "if", "(", "mt", "in", "OEB_DOCS", ")", ":", "container", ".", "parsed", "(", "name", ")", "container", ".", "dirty", "(", "name", ")" ]
fix any parsing errors in all html files in the container .
train
false
49,019
@patch('twilio.rest.resources.base.Resource.request') def test_delete(req): resp = Mock() resp.content = '' resp.status_code = 204 req.return_value = (resp, {}) app = Call(list_resource, 'CA123') app.delete() uri = 'https://api.twilio.com/2010-04-01/Accounts/AC123/Calls/CA123' req.assert_called_with('DELETE', uri)
[ "@", "patch", "(", "'twilio.rest.resources.base.Resource.request'", ")", "def", "test_delete", "(", "req", ")", ":", "resp", "=", "Mock", "(", ")", "resp", ".", "content", "=", "''", "resp", ".", "status_code", "=", "204", "req", ".", "return_value", "=", "(", "resp", ",", "{", "}", ")", "app", "=", "Call", "(", "list_resource", ",", "'CA123'", ")", "app", ".", "delete", "(", ")", "uri", "=", "'https://api.twilio.com/2010-04-01/Accounts/AC123/Calls/CA123'", "req", ".", "assert_called_with", "(", "'DELETE'", ",", "uri", ")" ]
deleting a call should work .
train
false
49,020
def update_linode(linode_id, update_args=None): update_args.update({'LinodeID': linode_id}) result = _query('linode', 'update', args=update_args) return _clean_data(result)
[ "def", "update_linode", "(", "linode_id", ",", "update_args", "=", "None", ")", ":", "update_args", ".", "update", "(", "{", "'LinodeID'", ":", "linode_id", "}", ")", "result", "=", "_query", "(", "'linode'", ",", "'update'", ",", "args", "=", "update_args", ")", "return", "_clean_data", "(", "result", ")" ]
updates a linodes properties .
train
true
49,021
def p_statement_interactive(p): p[0] = (0, (p[1], 0))
[ "def", "p_statement_interactive", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "0", ",", "(", "p", "[", "1", "]", ",", "0", ")", ")" ]
statement : run newline | list newline | new newline .
train
false
49,022
@gen.coroutine def HidePhotos(client, obj_store, user_id, device_id, request): request['user_id'] = user_id (yield gen.Task(Operation.CreateAndExecute, client, user_id, device_id, 'HidePhotosOperation.Execute', request)) num_photos = sum((len(ep_dict['photo_ids']) for ep_dict in request['episodes'])) logging.info(('HIDE PHOTOS: user: %d, device: %d, %d photos' % (user_id, device_id, num_photos))) raise gen.Return({})
[ "@", "gen", ".", "coroutine", "def", "HidePhotos", "(", "client", ",", "obj_store", ",", "user_id", ",", "device_id", ",", "request", ")", ":", "request", "[", "'user_id'", "]", "=", "user_id", "(", "yield", "gen", ".", "Task", "(", "Operation", ".", "CreateAndExecute", ",", "client", ",", "user_id", ",", "device_id", ",", "'HidePhotosOperation.Execute'", ",", "request", ")", ")", "num_photos", "=", "sum", "(", "(", "len", "(", "ep_dict", "[", "'photo_ids'", "]", ")", "for", "ep_dict", "in", "request", "[", "'episodes'", "]", ")", ")", "logging", ".", "info", "(", "(", "'HIDE PHOTOS: user: %d, device: %d, %d photos'", "%", "(", "user_id", ",", "device_id", ",", "num_photos", ")", ")", ")", "raise", "gen", ".", "Return", "(", "{", "}", ")" ]
hides photos from a users personal library and inbox view .
train
false
49,025
def import_attribute(name): (path, attr) = name.rsplit('.', 1) module = __import__(path, globals(), locals(), [attr]) return getattr(module, attr)
[ "def", "import_attribute", "(", "name", ")", ":", "(", "path", ",", "attr", ")", "=", "name", ".", "rsplit", "(", "'.'", ",", "1", ")", "module", "=", "__import__", "(", "path", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "[", "attr", "]", ")", "return", "getattr", "(", "module", ",", "attr", ")" ]
import attribute using string reference .
train
false
49,026
def decode_json(json_string): return json.loads(unicodehelper.decode(json_string))
[ "def", "decode_json", "(", "json_string", ")", ":", "return", "json", ".", "loads", "(", "unicodehelper", ".", "decode", "(", "json_string", ")", ")" ]
helper that transparently handles bom encoding .
train
false
49,028
def random_lobster(n, p1, p2, seed=None): if (seed is not None): random.seed(seed) llen = int((((2 * random.random()) * n) + 0.5)) L = path_graph(llen) L.name = ('random_lobster(%d,%s,%s)' % (n, p1, p2)) current_node = (llen - 1) for n in range(llen): if (random.random() < p1): current_node += 1 L.add_edge(n, current_node) if (random.random() < p2): current_node += 1 L.add_edge((current_node - 1), current_node) return L
[ "def", "random_lobster", "(", "n", ",", "p1", ",", "p2", ",", "seed", "=", "None", ")", ":", "if", "(", "seed", "is", "not", "None", ")", ":", "random", ".", "seed", "(", "seed", ")", "llen", "=", "int", "(", "(", "(", "(", "2", "*", "random", ".", "random", "(", ")", ")", "*", "n", ")", "+", "0.5", ")", ")", "L", "=", "path_graph", "(", "llen", ")", "L", ".", "name", "=", "(", "'random_lobster(%d,%s,%s)'", "%", "(", "n", ",", "p1", ",", "p2", ")", ")", "current_node", "=", "(", "llen", "-", "1", ")", "for", "n", "in", "range", "(", "llen", ")", ":", "if", "(", "random", ".", "random", "(", ")", "<", "p1", ")", ":", "current_node", "+=", "1", "L", ".", "add_edge", "(", "n", ",", "current_node", ")", "if", "(", "random", ".", "random", "(", ")", "<", "p2", ")", ":", "current_node", "+=", "1", "L", ".", "add_edge", "(", "(", "current_node", "-", "1", ")", ",", "current_node", ")", "return", "L" ]
returns a random lobster graph .
train
false
49,029
def stubout_fetch_disk_image(stubs, raise_failure=False): def _fake_fetch_disk_image(context, session, instance, name_label, image, image_type): if raise_failure: raise XenAPI.Failure('Test Exception raised by fake fetch_image_glance_disk') elif (image_type == vm_utils.ImageType.KERNEL): filename = 'kernel' elif (image_type == vm_utils.ImageType.RAMDISK): filename = 'ramdisk' else: filename = 'unknown' vdi_type = vm_utils.ImageType.to_string(image_type) return {vdi_type: dict(uuid=None, file=filename)} stubs.Set(vm_utils, '_fetch_disk_image', _fake_fetch_disk_image)
[ "def", "stubout_fetch_disk_image", "(", "stubs", ",", "raise_failure", "=", "False", ")", ":", "def", "_fake_fetch_disk_image", "(", "context", ",", "session", ",", "instance", ",", "name_label", ",", "image", ",", "image_type", ")", ":", "if", "raise_failure", ":", "raise", "XenAPI", ".", "Failure", "(", "'Test Exception raised by fake fetch_image_glance_disk'", ")", "elif", "(", "image_type", "==", "vm_utils", ".", "ImageType", ".", "KERNEL", ")", ":", "filename", "=", "'kernel'", "elif", "(", "image_type", "==", "vm_utils", ".", "ImageType", ".", "RAMDISK", ")", ":", "filename", "=", "'ramdisk'", "else", ":", "filename", "=", "'unknown'", "vdi_type", "=", "vm_utils", ".", "ImageType", ".", "to_string", "(", "image_type", ")", "return", "{", "vdi_type", ":", "dict", "(", "uuid", "=", "None", ",", "file", "=", "filename", ")", "}", "stubs", ".", "Set", "(", "vm_utils", ",", "'_fetch_disk_image'", ",", "_fake_fetch_disk_image", ")" ]
simulates a failure in fetch image_glance_disk .
train
false
49,031
def infer_reuse_pattern(fgraph, outputs_to_disown): rval = set() for o in outputs_to_disown: view_tree_set(alias_root(o), rval) rval = set((r for r in rval if (r.owner is not None))) return rval
[ "def", "infer_reuse_pattern", "(", "fgraph", ",", "outputs_to_disown", ")", ":", "rval", "=", "set", "(", ")", "for", "o", "in", "outputs_to_disown", ":", "view_tree_set", "(", "alias_root", "(", "o", ")", ",", "rval", ")", "rval", "=", "set", "(", "(", "r", "for", "r", "in", "rval", "if", "(", "r", ".", "owner", "is", "not", "None", ")", ")", ")", "return", "rval" ]
given an fgraph and a list of variables .
train
false
49,032
def maximum_position(input, labels=None, index=None): dims = numpy.array(numpy.asarray(input).shape) dim_prod = numpy.cumprod(([1] + list(dims[:0:(-1)])))[::(-1)] result = _select(input, labels, index, find_max_positions=True)[0] if numpy.isscalar(result): return tuple(((result // dim_prod) % dims)) return [tuple(v) for v in ((result.reshape((-1), 1) // dim_prod) % dims)]
[ "def", "maximum_position", "(", "input", ",", "labels", "=", "None", ",", "index", "=", "None", ")", ":", "dims", "=", "numpy", ".", "array", "(", "numpy", ".", "asarray", "(", "input", ")", ".", "shape", ")", "dim_prod", "=", "numpy", ".", "cumprod", "(", "(", "[", "1", "]", "+", "list", "(", "dims", "[", ":", "0", ":", "(", "-", "1", ")", "]", ")", ")", ")", "[", ":", ":", "(", "-", "1", ")", "]", "result", "=", "_select", "(", "input", ",", "labels", ",", "index", ",", "find_max_positions", "=", "True", ")", "[", "0", "]", "if", "numpy", ".", "isscalar", "(", "result", ")", ":", "return", "tuple", "(", "(", "(", "result", "//", "dim_prod", ")", "%", "dims", ")", ")", "return", "[", "tuple", "(", "v", ")", "for", "v", "in", "(", "(", "result", ".", "reshape", "(", "(", "-", "1", ")", ",", "1", ")", "//", "dim_prod", ")", "%", "dims", ")", "]" ]
find the positions of the maximums of the values of an array at labels .
train
false
49,033
def idxmerge(idxlist, final_progress=True): def pfunc(count, total): qprogress(('Reading indexes: %.2f%% (%d/%d)\r' % (((count * 100.0) / total), count, total))) def pfinal(count, total): if final_progress: progress(('Reading indexes: %.2f%% (%d/%d), done.\n' % (100, total, total))) return merge_iter(idxlist, 10024, pfunc, pfinal)
[ "def", "idxmerge", "(", "idxlist", ",", "final_progress", "=", "True", ")", ":", "def", "pfunc", "(", "count", ",", "total", ")", ":", "qprogress", "(", "(", "'Reading indexes: %.2f%% (%d/%d)\\r'", "%", "(", "(", "(", "count", "*", "100.0", ")", "/", "total", ")", ",", "count", ",", "total", ")", ")", ")", "def", "pfinal", "(", "count", ",", "total", ")", ":", "if", "final_progress", ":", "progress", "(", "(", "'Reading indexes: %.2f%% (%d/%d), done.\\n'", "%", "(", "100", ",", "total", ",", "total", ")", ")", ")", "return", "merge_iter", "(", "idxlist", ",", "10024", ",", "pfunc", ",", "pfinal", ")" ]
generate a list of all the objects reachable in a packidxlist .
train
false
49,034
def caffe_preprocess_and_compute(pimg, caffe_transformer=None, caffe_net=None, output_layers=None): if (caffe_net is not None): if (output_layers is None): output_layers = caffe_net.outputs img_data_rs = resize_image(pimg, sz=(256, 256)) image = caffe.io.load_image(StringIO(img_data_rs)) (H, W, _) = image.shape (_, _, h, w) = caffe_net.blobs['data'].data.shape h_off = max(((H - h) / 2), 0) w_off = max(((W - w) / 2), 0) crop = image[h_off:(h_off + h), w_off:(w_off + w), :] transformed_image = caffe_transformer.preprocess('data', crop) transformed_image.shape = ((1,) + transformed_image.shape) input_name = caffe_net.inputs[0] all_outputs = caffe_net.forward_all(blobs=output_layers, **{input_name: transformed_image}) outputs = all_outputs[output_layers[0]][0].astype(float) return outputs else: return []
[ "def", "caffe_preprocess_and_compute", "(", "pimg", ",", "caffe_transformer", "=", "None", ",", "caffe_net", "=", "None", ",", "output_layers", "=", "None", ")", ":", "if", "(", "caffe_net", "is", "not", "None", ")", ":", "if", "(", "output_layers", "is", "None", ")", ":", "output_layers", "=", "caffe_net", ".", "outputs", "img_data_rs", "=", "resize_image", "(", "pimg", ",", "sz", "=", "(", "256", ",", "256", ")", ")", "image", "=", "caffe", ".", "io", ".", "load_image", "(", "StringIO", "(", "img_data_rs", ")", ")", "(", "H", ",", "W", ",", "_", ")", "=", "image", ".", "shape", "(", "_", ",", "_", ",", "h", ",", "w", ")", "=", "caffe_net", ".", "blobs", "[", "'data'", "]", ".", "data", ".", "shape", "h_off", "=", "max", "(", "(", "(", "H", "-", "h", ")", "/", "2", ")", ",", "0", ")", "w_off", "=", "max", "(", "(", "(", "W", "-", "w", ")", "/", "2", ")", ",", "0", ")", "crop", "=", "image", "[", "h_off", ":", "(", "h_off", "+", "h", ")", ",", "w_off", ":", "(", "w_off", "+", "w", ")", ",", ":", "]", "transformed_image", "=", "caffe_transformer", ".", "preprocess", "(", "'data'", ",", "crop", ")", "transformed_image", ".", "shape", "=", "(", "(", "1", ",", ")", "+", "transformed_image", ".", "shape", ")", "input_name", "=", "caffe_net", ".", "inputs", "[", "0", "]", "all_outputs", "=", "caffe_net", ".", "forward_all", "(", "blobs", "=", "output_layers", ",", "**", "{", "input_name", ":", "transformed_image", "}", ")", "outputs", "=", "all_outputs", "[", "output_layers", "[", "0", "]", "]", "[", "0", "]", ".", "astype", "(", "float", ")", "return", "outputs", "else", ":", "return", "[", "]" ]
run a caffe network on an input image after preprocessing it to prepare it for caffe .
train
false
49,036
def binary_search(sorted_collection, item): left = 0 right = (len(sorted_collection) - 1) while (left <= right): midpoint = ((left + right) // 2) current_item = sorted_collection[midpoint] if (current_item == item): return midpoint elif (item < current_item): right = (midpoint - 1) else: left = (midpoint + 1) return None
[ "def", "binary_search", "(", "sorted_collection", ",", "item", ")", ":", "left", "=", "0", "right", "=", "(", "len", "(", "sorted_collection", ")", "-", "1", ")", "while", "(", "left", "<=", "right", ")", ":", "midpoint", "=", "(", "(", "left", "+", "right", ")", "//", "2", ")", "current_item", "=", "sorted_collection", "[", "midpoint", "]", "if", "(", "current_item", "==", "item", ")", ":", "return", "midpoint", "elif", "(", "item", "<", "current_item", ")", ":", "right", "=", "(", "midpoint", "-", "1", ")", "else", ":", "left", "=", "(", "midpoint", "+", "1", ")", "return", "None" ]
pure implementation of binary search algorithm in python be careful collection must be sorted .
train
false
49,038
@preloaderStop def successMessage(message): printLine(message, '\n')
[ "@", "preloaderStop", "def", "successMessage", "(", "message", ")", ":", "printLine", "(", "message", ",", "'\\n'", ")" ]
displaying a message .
train
false
49,040
def desired_matches(desired, header): parsed_ranges = list(map(parse_media_range, header.split(','))) return [mimetype for mimetype in desired if quality_parsed(mimetype, parsed_ranges)]
[ "def", "desired_matches", "(", "desired", ",", "header", ")", ":", "parsed_ranges", "=", "list", "(", "map", "(", "parse_media_range", ",", "header", ".", "split", "(", "','", ")", ")", ")", "return", "[", "mimetype", "for", "mimetype", "in", "desired", "if", "quality_parsed", "(", "mimetype", ",", "parsed_ranges", ")", "]" ]
takes a list of desired mime-types in the order the server prefers to send them regardless of the browsers preference .
train
false
49,041
def topic_check_iam_permissions(client, to_delete): TOPIC_NAME = ('topic_check_iam_permissions-%d' % (_millis(),)) topic = client.topic(TOPIC_NAME) topic.create() to_delete.append(topic) from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] ALLOWED = topic.check_iam_permissions(TO_CHECK) assert (set(ALLOWED) == set(TO_CHECK))
[ "def", "topic_check_iam_permissions", "(", "client", ",", "to_delete", ")", ":", "TOPIC_NAME", "=", "(", "'topic_check_iam_permissions-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "topic", "=", "client", ".", "topic", "(", "TOPIC_NAME", ")", "topic", ".", "create", "(", ")", "to_delete", ".", "append", "(", "topic", ")", "from", "google", ".", "cloud", ".", "pubsub", ".", "iam", "import", "OWNER_ROLE", ",", "EDITOR_ROLE", ",", "VIEWER_ROLE", "TO_CHECK", "=", "[", "OWNER_ROLE", ",", "EDITOR_ROLE", ",", "VIEWER_ROLE", "]", "ALLOWED", "=", "topic", ".", "check_iam_permissions", "(", "TO_CHECK", ")", "assert", "(", "set", "(", "ALLOWED", ")", "==", "set", "(", "TO_CHECK", ")", ")" ]
check topic iam permissions .
train
false
49,042
@pytest.fixture def linear_structure(request, graph, random_sequence): sequence = random_sequence() graph.consume(sequence) if hdn_counts(sequence, graph): request.applymarker(pytest.mark.xfail) return (graph, sequence)
[ "@", "pytest", ".", "fixture", "def", "linear_structure", "(", "request", ",", "graph", ",", "random_sequence", ")", ":", "sequence", "=", "random_sequence", "(", ")", "graph", ".", "consume", "(", "sequence", ")", "if", "hdn_counts", "(", "sequence", ",", "graph", ")", ":", "request", ".", "applymarker", "(", "pytest", ".", "mark", ".", "xfail", ")", "return", "(", "graph", ",", "sequence", ")" ]
sets up a simple linear path graph structure .
train
false
49,044
def _iter_content_lines(content): return _iter_lines(content.iter_bytes(), '\n')
[ "def", "_iter_content_lines", "(", "content", ")", ":", "return", "_iter_lines", "(", "content", ".", "iter_bytes", "(", ")", ",", "'\\n'", ")" ]
iterate over the lines that make up content .
train
false
49,045
def downgrades(src): def _(f): destination = (src - 1) @do(op.setitem(_downgrade_methods, destination)) @wraps(f) def wrapper(op, conn, version_info_table): conn.execute(version_info_table.delete()) f(op) write_version_info(conn, version_info_table, destination) return wrapper return _
[ "def", "downgrades", "(", "src", ")", ":", "def", "_", "(", "f", ")", ":", "destination", "=", "(", "src", "-", "1", ")", "@", "do", "(", "op", ".", "setitem", "(", "_downgrade_methods", ",", "destination", ")", ")", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "op", ",", "conn", ",", "version_info_table", ")", ":", "conn", ".", "execute", "(", "version_info_table", ".", "delete", "(", ")", ")", "f", "(", "op", ")", "write_version_info", "(", "conn", ",", "version_info_table", ",", "destination", ")", "return", "wrapper", "return", "_" ]
decorator for marking that a method is a downgrade to a version to the previous version .
train
true
49,046
def _trim_env_off_path(paths, saltenv, trim_slash=False): env_len = (None if _is_env_per_bucket() else (len(saltenv) + 1)) slash_len = ((-1) if trim_slash else None) return [d[env_len:slash_len] for d in paths]
[ "def", "_trim_env_off_path", "(", "paths", ",", "saltenv", ",", "trim_slash", "=", "False", ")", ":", "env_len", "=", "(", "None", "if", "_is_env_per_bucket", "(", ")", "else", "(", "len", "(", "saltenv", ")", "+", "1", ")", ")", "slash_len", "=", "(", "(", "-", "1", ")", "if", "trim_slash", "else", "None", ")", "return", "[", "d", "[", "env_len", ":", "slash_len", "]", "for", "d", "in", "paths", "]" ]
return a list of file paths with the saltenv directory removed .
train
true
49,047
def db_clean_broken(autotest_dir): for test in models.Test.objects.all(): full_path = os.path.join(autotest_dir, test.path) if (not os.path.isfile(full_path)): logging.info('Removing %s', test.path) _log_or_execute(repr(test), test.delete) for profiler in models.Profiler.objects.all(): full_path = os.path.join(autotest_dir, 'client', 'profilers', profiler.name) if (not os.path.exists(full_path)): logging.info('Removing %s', profiler.name) _log_or_execute(repr(profiler), profiler.delete)
[ "def", "db_clean_broken", "(", "autotest_dir", ")", ":", "for", "test", "in", "models", ".", "Test", ".", "objects", ".", "all", "(", ")", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "autotest_dir", ",", "test", ".", "path", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "full_path", ")", ")", ":", "logging", ".", "info", "(", "'Removing %s'", ",", "test", ".", "path", ")", "_log_or_execute", "(", "repr", "(", "test", ")", ",", "test", ".", "delete", ")", "for", "profiler", "in", "models", ".", "Profiler", ".", "objects", ".", "all", "(", ")", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "autotest_dir", ",", "'client'", ",", "'profilers'", ",", "profiler", ".", "name", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "full_path", ")", ")", ":", "logging", ".", "info", "(", "'Removing %s'", ",", "profiler", ".", "name", ")", "_log_or_execute", "(", "repr", "(", "profiler", ")", ",", "profiler", ".", "delete", ")" ]
remove tests from autotest_web that do not have valid control files this function invoked when -c supplied on the command line and when running update_all() .
train
false
49,048
def _set_common_headers(doc, section_id, response): response['ETag'] = doc.calculate_etag(section_id) if doc.current_revision_id: response['X-kuma-revision'] = doc.current_revision_id return response
[ "def", "_set_common_headers", "(", "doc", ",", "section_id", ",", "response", ")", ":", "response", "[", "'ETag'", "]", "=", "doc", ".", "calculate_etag", "(", "section_id", ")", "if", "doc", ".", "current_revision_id", ":", "response", "[", "'X-kuma-revision'", "]", "=", "doc", ".", "current_revision_id", "return", "response" ]
perform some response-header manipulation that gets used in several places .
train
false
49,049
@utils.arg('monitor', metavar='<monitor>', help='ID of the monitor.') @utils.service_type('monitor') def do_show(cs, args): monitor = _find_monitor(cs, args.monitor) _print_monitor(monitor)
[ "@", "utils", ".", "arg", "(", "'monitor'", ",", "metavar", "=", "'<monitor>'", ",", "help", "=", "'ID of the monitor.'", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_show", "(", "cs", ",", "args", ")", ":", "monitor", "=", "_find_monitor", "(", "cs", ",", "args", ".", "monitor", ")", "_print_monitor", "(", "monitor", ")" ]
show details about the given server .
train
false
49,050
def _losetup_list(): output = check_output(['losetup', '--all']).decode('utf8') return _losetup_list_parse(output)
[ "def", "_losetup_list", "(", ")", ":", "output", "=", "check_output", "(", "[", "'losetup'", ",", "'--all'", "]", ")", ".", "decode", "(", "'utf8'", ")", "return", "_losetup_list_parse", "(", "output", ")" ]
list all the loopback devices on the system .
train
false
49,052
def release_by_id(release_id, session=None): return session.query(Release).filter((Release.id == release_id)).one()
[ "def", "release_by_id", "(", "release_id", ",", "session", "=", "None", ")", ":", "return", "session", ".", "query", "(", "Release", ")", ".", "filter", "(", "(", "Release", ".", "id", "==", "release_id", ")", ")", ".", "one", "(", ")" ]
return an instance of a release by querying its id .
train
false
49,053
def calculate_checksum(source, context): md5 = hashlib.md5() if (source is not None): md5.update(source.encode(u'utf-8')) md5.update(context.encode(u'utf-8')) return md5.hexdigest()
[ "def", "calculate_checksum", "(", "source", ",", "context", ")", ":", "md5", "=", "hashlib", ".", "md5", "(", ")", "if", "(", "source", "is", "not", "None", ")", ":", "md5", ".", "update", "(", "source", ".", "encode", "(", "u'utf-8'", ")", ")", "md5", ".", "update", "(", "context", ".", "encode", "(", "u'utf-8'", ")", ")", "return", "md5", ".", "hexdigest", "(", ")" ]
calculates checksum identifying translation .
train
false
49,055
def _PopulateUniquePropertiesSet(prop, unique_properties): if prop.multiple(): prop = _CopyAndSetMultipleToFalse(prop) prop_as_str = prop.SerializePartialToString() unique_properties[prop.name()].add(prop_as_str) return prop_as_str
[ "def", "_PopulateUniquePropertiesSet", "(", "prop", ",", "unique_properties", ")", ":", "if", "prop", ".", "multiple", "(", ")", ":", "prop", "=", "_CopyAndSetMultipleToFalse", "(", "prop", ")", "prop_as_str", "=", "prop", ".", "SerializePartialToString", "(", ")", "unique_properties", "[", "prop", ".", "name", "(", ")", "]", ".", "add", "(", "prop_as_str", ")", "return", "prop_as_str" ]
populates a set containing unique properties .
train
false
49,056
def logged_in_client(user='test', passwd='test', client=None): if (client is None): client = WindmillTestClient(__name__) client.open(url=(windmill.settings['TEST_URL'] + '?clearSession=true')) client.waits.forPageLoad() client.waits.forElement(classname='hue-loaded') if client.execJS(js="!!$('hue-login')")['output']: client.waits.forElement(classname='hue-username') client.click(jquery='(".hue-username")[0]') client.type(classname='hue-username', text=user) client.click(classname='hue-password') client.type(classname='hue-password', text=passwd) client.click(classname='hue-continue') time.sleep(2.0) client.waits.forElement(classname='loggedIn', timeout='20000') return client
[ "def", "logged_in_client", "(", "user", "=", "'test'", ",", "passwd", "=", "'test'", ",", "client", "=", "None", ")", ":", "if", "(", "client", "is", "None", ")", ":", "client", "=", "WindmillTestClient", "(", "__name__", ")", "client", ".", "open", "(", "url", "=", "(", "windmill", ".", "settings", "[", "'TEST_URL'", "]", "+", "'?clearSession=true'", ")", ")", "client", ".", "waits", ".", "forPageLoad", "(", ")", "client", ".", "waits", ".", "forElement", "(", "classname", "=", "'hue-loaded'", ")", "if", "client", ".", "execJS", "(", "js", "=", "\"!!$('hue-login')\"", ")", "[", "'output'", "]", ":", "client", ".", "waits", ".", "forElement", "(", "classname", "=", "'hue-username'", ")", "client", ".", "click", "(", "jquery", "=", "'(\".hue-username\")[0]'", ")", "client", ".", "type", "(", "classname", "=", "'hue-username'", ",", "text", "=", "user", ")", "client", ".", "click", "(", "classname", "=", "'hue-password'", ")", "client", ".", "type", "(", "classname", "=", "'hue-password'", ",", "text", "=", "passwd", ")", "client", ".", "click", "(", "classname", "=", "'hue-continue'", ")", "time", ".", "sleep", "(", "2.0", ")", "client", ".", "waits", ".", "forElement", "(", "classname", "=", "'loggedIn'", ",", "timeout", "=", "'20000'", ")", "return", "client" ]
opens up the root url .
train
false
49,057
def test_size(): for sparse_type in ('csc_matrix', 'csr_matrix'): x = getattr(theano.sparse, sparse_type)() y = getattr(scipy.sparse, sparse_type)((5, 7)).astype(config.floatX) get_size = theano.function([x], x.size) def check(): assert (y.size == get_size(y)) check() y[(0, 0)] = 1 check() y[(0, 1)] = 0 check()
[ "def", "test_size", "(", ")", ":", "for", "sparse_type", "in", "(", "'csc_matrix'", ",", "'csr_matrix'", ")", ":", "x", "=", "getattr", "(", "theano", ".", "sparse", ",", "sparse_type", ")", "(", ")", "y", "=", "getattr", "(", "scipy", ".", "sparse", ",", "sparse_type", ")", "(", "(", "5", ",", "7", ")", ")", ".", "astype", "(", "config", ".", "floatX", ")", "get_size", "=", "theano", ".", "function", "(", "[", "x", "]", ",", "x", ".", "size", ")", "def", "check", "(", ")", ":", "assert", "(", "y", ".", "size", "==", "get_size", "(", "y", ")", ")", "check", "(", ")", "y", "[", "(", "0", ",", "0", ")", "]", "=", "1", "check", "(", ")", "y", "[", "(", "0", ",", "1", ")", "]", "=", "0", "check", "(", ")" ]
ensure the size attribute of sparse matrices behaves as in numpy .
train
false
49,058
@expect_json @login_required def cohorting_settings(request, course_key_string): course_key = CourseKey.from_string(course_key_string) get_course_with_access(request.user, 'staff', course_key) settings = {} verified_track_cohort_enabled = VerifiedTrackCohortedCourse.is_verified_track_cohort_enabled(course_key) settings['enabled'] = verified_track_cohort_enabled if verified_track_cohort_enabled: settings['verified_cohort_name'] = VerifiedTrackCohortedCourse.verified_cohort_name_for_course(course_key) return JsonResponse(settings)
[ "@", "expect_json", "@", "login_required", "def", "cohorting_settings", "(", "request", ",", "course_key_string", ")", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "course_key_string", ")", "get_course_with_access", "(", "request", ".", "user", ",", "'staff'", ",", "course_key", ")", "settings", "=", "{", "}", "verified_track_cohort_enabled", "=", "VerifiedTrackCohortedCourse", ".", "is_verified_track_cohort_enabled", "(", "course_key", ")", "settings", "[", "'enabled'", "]", "=", "verified_track_cohort_enabled", "if", "verified_track_cohort_enabled", ":", "settings", "[", "'verified_cohort_name'", "]", "=", "VerifiedTrackCohortedCourse", ".", "verified_cohort_name_for_course", "(", "course_key", ")", "return", "JsonResponse", "(", "settings", ")" ]
the handler for verified track cohorting requests .
train
false
49,059
def branch_exists(branch): cmd = ['git', 'rev-parse', '--symbolic', '--verify', branch] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, _) = p.communicate() if p.returncode: return False return (out.rstrip() == branch)
[ "def", "branch_exists", "(", "branch", ")", ":", "cmd", "=", "[", "'git'", ",", "'rev-parse'", ",", "'--symbolic'", ",", "'--verify'", ",", "branch", "]", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "out", ",", "_", ")", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", ":", "return", "False", "return", "(", "out", ".", "rstrip", "(", ")", "==", "branch", ")" ]
make sure that the given ref name really exists .
train
false
49,062
def open_text(fname, **kwargs): if PY3: kwargs.setdefault('encoding', FS_ENCODING) kwargs.setdefault('errors', ENCODING_ERRORS_HANDLER) return open(fname, 'rt', **kwargs)
[ "def", "open_text", "(", "fname", ",", "**", "kwargs", ")", ":", "if", "PY3", ":", "kwargs", ".", "setdefault", "(", "'encoding'", ",", "FS_ENCODING", ")", "kwargs", ".", "setdefault", "(", "'errors'", ",", "ENCODING_ERRORS_HANDLER", ")", "return", "open", "(", "fname", ",", "'rt'", ",", "**", "kwargs", ")" ]
on python 3 opens a file in text mode by using fs encoding and a proper en/decoding errors handler .
train
false
49,063
def addSymmetricYPath(outputs, path, y): vertexes = [] loops = [getSymmetricYLoop(path, vertexes, (- y)), getSymmetricYLoop(path, vertexes, y)] outputs.append(getPillarOutput(loops))
[ "def", "addSymmetricYPath", "(", "outputs", ",", "path", ",", "y", ")", ":", "vertexes", "=", "[", "]", "loops", "=", "[", "getSymmetricYLoop", "(", "path", ",", "vertexes", ",", "(", "-", "y", ")", ")", ",", "getSymmetricYLoop", "(", "path", ",", "vertexes", ",", "y", ")", "]", "outputs", ".", "append", "(", "getPillarOutput", "(", "loops", ")", ")" ]
add y path output to outputs .
train
false
49,066
def _getSupportedCiphers(): supportedCiphers = [] cs = ['aes256-ctr', 'aes256-cbc', 'aes192-ctr', 'aes192-cbc', 'aes128-ctr', 'aes128-cbc', 'cast128-ctr', 'cast128-cbc', 'blowfish-ctr', 'blowfish-cbc', '3des-ctr', '3des-cbc'] for cipher in cs: (algorithmClass, keySize, modeClass) = SSHCiphers.cipherMap[cipher] try: Cipher(algorithmClass((' ' * keySize)), modeClass((' ' * (algorithmClass.block_size // 8))), backend=default_backend()).encryptor() except UnsupportedAlgorithm: pass else: supportedCiphers.append(cipher) return supportedCiphers
[ "def", "_getSupportedCiphers", "(", ")", ":", "supportedCiphers", "=", "[", "]", "cs", "=", "[", "'aes256-ctr'", ",", "'aes256-cbc'", ",", "'aes192-ctr'", ",", "'aes192-cbc'", ",", "'aes128-ctr'", ",", "'aes128-cbc'", ",", "'cast128-ctr'", ",", "'cast128-cbc'", ",", "'blowfish-ctr'", ",", "'blowfish-cbc'", ",", "'3des-ctr'", ",", "'3des-cbc'", "]", "for", "cipher", "in", "cs", ":", "(", "algorithmClass", ",", "keySize", ",", "modeClass", ")", "=", "SSHCiphers", ".", "cipherMap", "[", "cipher", "]", "try", ":", "Cipher", "(", "algorithmClass", "(", "(", "' '", "*", "keySize", ")", ")", ",", "modeClass", "(", "(", "' '", "*", "(", "algorithmClass", ".", "block_size", "//", "8", ")", ")", ")", ",", "backend", "=", "default_backend", "(", ")", ")", ".", "encryptor", "(", ")", "except", "UnsupportedAlgorithm", ":", "pass", "else", ":", "supportedCiphers", ".", "append", "(", "cipher", ")", "return", "supportedCiphers" ]
build a list of ciphers that are supported by the backend in use .
train
false
49,068
def verify_month_inputs(month_date): (year, month) = month_date.split('-') (year, month) = (int(year), int(month)) missing = [] for day in xrange(1, (calendar.monthrange(year, month)[1] + 1)): for hour in xrange(24): hour_date = ('%04d-%02d-%02d-%02d' % (year, month, day, hour)) log_path = os.path.join(RAW_LOG_DIR, ('%s.log.gz' % hour_date)) if (not s3_key_exists(s3_connection, log_path)): log_path = os.path.join(RAW_LOG_DIR, ('%s.log.bz2' % hour_date)) if (not s3_key_exists(s3_connection, log_path)): missing.append(hour_date) for d in missing: print d
[ "def", "verify_month_inputs", "(", "month_date", ")", ":", "(", "year", ",", "month", ")", "=", "month_date", ".", "split", "(", "'-'", ")", "(", "year", ",", "month", ")", "=", "(", "int", "(", "year", ")", ",", "int", "(", "month", ")", ")", "missing", "=", "[", "]", "for", "day", "in", "xrange", "(", "1", ",", "(", "calendar", ".", "monthrange", "(", "year", ",", "month", ")", "[", "1", "]", "+", "1", ")", ")", ":", "for", "hour", "in", "xrange", "(", "24", ")", ":", "hour_date", "=", "(", "'%04d-%02d-%02d-%02d'", "%", "(", "year", ",", "month", ",", "day", ",", "hour", ")", ")", "log_path", "=", "os", ".", "path", ".", "join", "(", "RAW_LOG_DIR", ",", "(", "'%s.log.gz'", "%", "hour_date", ")", ")", "if", "(", "not", "s3_key_exists", "(", "s3_connection", ",", "log_path", ")", ")", ":", "log_path", "=", "os", ".", "path", ".", "join", "(", "RAW_LOG_DIR", ",", "(", "'%s.log.bz2'", "%", "hour_date", ")", ")", "if", "(", "not", "s3_key_exists", "(", "s3_connection", ",", "log_path", ")", ")", ":", "missing", ".", "append", "(", "hour_date", ")", "for", "d", "in", "missing", ":", "print", "d" ]
check existance of all hourly traffic logs for month_date .
train
false
49,069
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): options = dict(((key[len(prefix):], configuration[key]) for key in configuration if key.startswith(prefix))) options['_coerce_config'] = True options.update(kwargs) url = options.pop('url') return create_engine(url, **options)
[ "def", "engine_from_config", "(", "configuration", ",", "prefix", "=", "'sqlalchemy.'", ",", "**", "kwargs", ")", ":", "options", "=", "dict", "(", "(", "(", "key", "[", "len", "(", "prefix", ")", ":", "]", ",", "configuration", "[", "key", "]", ")", "for", "key", "in", "configuration", "if", "key", ".", "startswith", "(", "prefix", ")", ")", ")", "options", "[", "'_coerce_config'", "]", "=", "True", "options", ".", "update", "(", "kwargs", ")", "url", "=", "options", ".", "pop", "(", "'url'", ")", "return", "create_engine", "(", "url", ",", "**", "options", ")" ]
create a new engine instance using a configuration dictionary .
train
false
49,070
def c(colour, text): colours = {'r': r, 'g': g, 'y': y, 'b': b, 'p': p} return ((colours[colour] + text) + w)
[ "def", "c", "(", "colour", ",", "text", ")", ":", "colours", "=", "{", "'r'", ":", "r", ",", "'g'", ":", "g", ",", "'y'", ":", "y", ",", "'b'", ":", "b", ",", "'p'", ":", "p", "}", "return", "(", "(", "colours", "[", "colour", "]", "+", "text", ")", "+", "w", ")" ]
return coloured text .
train
false
49,072
def event_location(): return s3_rest_controller()
[ "def", "event_location", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
49,073
def get_interface_name(name, prefix='', max_len=n_const.DEVICE_NAME_MAX_LEN): requested_name = (prefix + name) if (len(requested_name) <= max_len): return requested_name if ((len(prefix) + INTERFACE_HASH_LEN) > max_len): raise ValueError(_('Too long prefix provided. New name would exceed given length for an interface name.')) namelen = ((max_len - len(prefix)) - INTERFACE_HASH_LEN) hashed_name = hashlib.sha1(encodeutils.to_utf8(name)) new_name = ('%(prefix)s%(truncated)s%(hash)s' % {'prefix': prefix, 'truncated': name[0:namelen], 'hash': hashed_name.hexdigest()[0:INTERFACE_HASH_LEN]}) LOG.info(_LI('The requested interface name %(requested_name)s exceeds the %(limit)d character limitation. It was shortened to %(new_name)s to fit.'), {'requested_name': requested_name, 'limit': max_len, 'new_name': new_name}) return new_name
[ "def", "get_interface_name", "(", "name", ",", "prefix", "=", "''", ",", "max_len", "=", "n_const", ".", "DEVICE_NAME_MAX_LEN", ")", ":", "requested_name", "=", "(", "prefix", "+", "name", ")", "if", "(", "len", "(", "requested_name", ")", "<=", "max_len", ")", ":", "return", "requested_name", "if", "(", "(", "len", "(", "prefix", ")", "+", "INTERFACE_HASH_LEN", ")", ">", "max_len", ")", ":", "raise", "ValueError", "(", "_", "(", "'Too long prefix provided. New name would exceed given length for an interface name.'", ")", ")", "namelen", "=", "(", "(", "max_len", "-", "len", "(", "prefix", ")", ")", "-", "INTERFACE_HASH_LEN", ")", "hashed_name", "=", "hashlib", ".", "sha1", "(", "encodeutils", ".", "to_utf8", "(", "name", ")", ")", "new_name", "=", "(", "'%(prefix)s%(truncated)s%(hash)s'", "%", "{", "'prefix'", ":", "prefix", ",", "'truncated'", ":", "name", "[", "0", ":", "namelen", "]", ",", "'hash'", ":", "hashed_name", ".", "hexdigest", "(", ")", "[", "0", ":", "INTERFACE_HASH_LEN", "]", "}", ")", "LOG", ".", "info", "(", "_LI", "(", "'The requested interface name %(requested_name)s exceeds the %(limit)d character limitation. It was shortened to %(new_name)s to fit.'", ")", ",", "{", "'requested_name'", ":", "requested_name", ",", "'limit'", ":", "max_len", ",", "'new_name'", ":", "new_name", "}", ")", "return", "new_name" ]
construct an interface name based on the prefix and name .
train
false
49,075
@addon_valid_disabled_pending_view @non_atomic_requests def addon_detail(request, addon): if (addon.is_deleted or (addon.is_pending() and (not addon.is_persona()))): raise http.Http404 if addon.is_disabled: return render(request, 'addons/impala/disabled.html', {'addon': addon}, status=404) if (addon.type in request.APP.types): if (addon.type == amo.ADDON_PERSONA): return persona_detail(request, addon) else: if (not addon.current_version): raise http.Http404 return extension_detail(request, addon) else: try: new_app = [a for a in amo.APP_USAGE if (addon.type in a.types)][0] except IndexError: raise http.Http404 else: prefixer = urlresolvers.get_url_prefix() prefixer.app = new_app.short return http.HttpResponsePermanentRedirect(reverse('addons.detail', args=[addon.slug]))
[ "@", "addon_valid_disabled_pending_view", "@", "non_atomic_requests", "def", "addon_detail", "(", "request", ",", "addon", ")", ":", "if", "(", "addon", ".", "is_deleted", "or", "(", "addon", ".", "is_pending", "(", ")", "and", "(", "not", "addon", ".", "is_persona", "(", ")", ")", ")", ")", ":", "raise", "http", ".", "Http404", "if", "addon", ".", "is_disabled", ":", "return", "render", "(", "request", ",", "'addons/impala/disabled.html'", ",", "{", "'addon'", ":", "addon", "}", ",", "status", "=", "404", ")", "if", "(", "addon", ".", "type", "in", "request", ".", "APP", ".", "types", ")", ":", "if", "(", "addon", ".", "type", "==", "amo", ".", "ADDON_PERSONA", ")", ":", "return", "persona_detail", "(", "request", ",", "addon", ")", "else", ":", "if", "(", "not", "addon", ".", "current_version", ")", ":", "raise", "http", ".", "Http404", "return", "extension_detail", "(", "request", ",", "addon", ")", "else", ":", "try", ":", "new_app", "=", "[", "a", "for", "a", "in", "amo", ".", "APP_USAGE", "if", "(", "addon", ".", "type", "in", "a", ".", "types", ")", "]", "[", "0", "]", "except", "IndexError", ":", "raise", "http", ".", "Http404", "else", ":", "prefixer", "=", "urlresolvers", ".", "get_url_prefix", "(", ")", "prefixer", ".", "app", "=", "new_app", ".", "short", "return", "http", ".", "HttpResponsePermanentRedirect", "(", "reverse", "(", "'addons.detail'", ",", "args", "=", "[", "addon", ".", "slug", "]", ")", ")" ]
add-ons details page dispatcher .
train
false
49,076
def get_asset_url_by_slug(asset_slug): asset_url = '' try: template_asset = CertificateTemplateAsset.objects.get(asset_slug=asset_slug) asset_url = template_asset.asset.url except CertificateTemplateAsset.DoesNotExist: pass return asset_url
[ "def", "get_asset_url_by_slug", "(", "asset_slug", ")", ":", "asset_url", "=", "''", "try", ":", "template_asset", "=", "CertificateTemplateAsset", ".", "objects", ".", "get", "(", "asset_slug", "=", "asset_slug", ")", "asset_url", "=", "template_asset", ".", "asset", ".", "url", "except", "CertificateTemplateAsset", ".", "DoesNotExist", ":", "pass", "return", "asset_url" ]
returns certificate template asset url for given asset_slug .
train
false
49,077
def flagsimap2maildir(flagstring): retval = set() imapflaglist = flagstring[1:(-1)].split() for (imapflag, maildirflag) in flagmap: if (imapflag in imapflaglist): retval.add(maildirflag) return retval
[ "def", "flagsimap2maildir", "(", "flagstring", ")", ":", "retval", "=", "set", "(", ")", "imapflaglist", "=", "flagstring", "[", "1", ":", "(", "-", "1", ")", "]", ".", "split", "(", ")", "for", "(", "imapflag", ",", "maildirflag", ")", "in", "flagmap", ":", "if", "(", "imapflag", "in", "imapflaglist", ")", ":", "retval", ".", "add", "(", "maildirflag", ")", "return", "retval" ]
convert string into a flags set .
train
false
49,078
def _set_config(c): glformat = QGLFormat() glformat.setRedBufferSize(c['red_size']) glformat.setGreenBufferSize(c['green_size']) glformat.setBlueBufferSize(c['blue_size']) glformat.setAlphaBufferSize(c['alpha_size']) glformat.setAccum(False) glformat.setRgba(True) glformat.setDoubleBuffer((True if c['double_buffer'] else False)) glformat.setDepth((True if c['depth_size'] else False)) glformat.setDepthBufferSize((c['depth_size'] if c['depth_size'] else 0)) glformat.setStencil((True if c['stencil_size'] else False)) glformat.setStencilBufferSize((c['stencil_size'] if c['stencil_size'] else 0)) glformat.setSampleBuffers((True if c['samples'] else False)) glformat.setSamples((c['samples'] if c['samples'] else 0)) glformat.setStereo(c['stereo']) return glformat
[ "def", "_set_config", "(", "c", ")", ":", "glformat", "=", "QGLFormat", "(", ")", "glformat", ".", "setRedBufferSize", "(", "c", "[", "'red_size'", "]", ")", "glformat", ".", "setGreenBufferSize", "(", "c", "[", "'green_size'", "]", ")", "glformat", ".", "setBlueBufferSize", "(", "c", "[", "'blue_size'", "]", ")", "glformat", ".", "setAlphaBufferSize", "(", "c", "[", "'alpha_size'", "]", ")", "glformat", ".", "setAccum", "(", "False", ")", "glformat", ".", "setRgba", "(", "True", ")", "glformat", ".", "setDoubleBuffer", "(", "(", "True", "if", "c", "[", "'double_buffer'", "]", "else", "False", ")", ")", "glformat", ".", "setDepth", "(", "(", "True", "if", "c", "[", "'depth_size'", "]", "else", "False", ")", ")", "glformat", ".", "setDepthBufferSize", "(", "(", "c", "[", "'depth_size'", "]", "if", "c", "[", "'depth_size'", "]", "else", "0", ")", ")", "glformat", ".", "setStencil", "(", "(", "True", "if", "c", "[", "'stencil_size'", "]", "else", "False", ")", ")", "glformat", ".", "setStencilBufferSize", "(", "(", "c", "[", "'stencil_size'", "]", "if", "c", "[", "'stencil_size'", "]", "else", "0", ")", ")", "glformat", ".", "setSampleBuffers", "(", "(", "True", "if", "c", "[", "'samples'", "]", "else", "False", ")", ")", "glformat", ".", "setSamples", "(", "(", "c", "[", "'samples'", "]", "if", "c", "[", "'samples'", "]", "else", "0", ")", ")", "glformat", ".", "setStereo", "(", "c", "[", "'stereo'", "]", ")", "return", "glformat" ]
set the opengl configuration .
train
true
49,079
def clouds(opts): functions = LazyLoader(_module_dirs(opts, 'clouds', 'cloud', base_path=os.path.join(SALT_BASE_PATH, 'cloud'), int_type='clouds'), opts, tag='clouds', pack={'__utils__': salt.loader.utils(opts), '__active_provider_name__': None}) for funcname in LIBCLOUD_FUNCS_NOT_SUPPORTED: log.trace("'{0}' has been marked as not supported. Removing from the list of supported cloud functions".format(funcname)) functions.pop(funcname, None) return functions
[ "def", "clouds", "(", "opts", ")", ":", "functions", "=", "LazyLoader", "(", "_module_dirs", "(", "opts", ",", "'clouds'", ",", "'cloud'", ",", "base_path", "=", "os", ".", "path", ".", "join", "(", "SALT_BASE_PATH", ",", "'cloud'", ")", ",", "int_type", "=", "'clouds'", ")", ",", "opts", ",", "tag", "=", "'clouds'", ",", "pack", "=", "{", "'__utils__'", ":", "salt", ".", "loader", ".", "utils", "(", "opts", ")", ",", "'__active_provider_name__'", ":", "None", "}", ")", "for", "funcname", "in", "LIBCLOUD_FUNCS_NOT_SUPPORTED", ":", "log", ".", "trace", "(", "\"'{0}' has been marked as not supported. Removing from the list of supported cloud functions\"", ".", "format", "(", "funcname", ")", ")", "functions", ".", "pop", "(", "funcname", ",", "None", ")", "return", "functions" ]
return the cloud functions .
train
true
49,080
def has_site_permission(user): mw = u'mezzanine.core.middleware.SitePermissionMiddleware' if (mw not in get_middleware_setting()): from warnings import warn warn((mw + u' missing from settings.MIDDLEWARE - per sitepermissions not applied')) return (user.is_staff and user.is_active) return getattr(user, u'has_site_permission', False)
[ "def", "has_site_permission", "(", "user", ")", ":", "mw", "=", "u'mezzanine.core.middleware.SitePermissionMiddleware'", "if", "(", "mw", "not", "in", "get_middleware_setting", "(", ")", ")", ":", "from", "warnings", "import", "warn", "warn", "(", "(", "mw", "+", "u' missing from settings.MIDDLEWARE - per sitepermissions not applied'", ")", ")", "return", "(", "user", ".", "is_staff", "and", "user", ".", "is_active", ")", "return", "getattr", "(", "user", ",", "u'has_site_permission'", ",", "False", ")" ]
checks if a staff user has staff-level access for the current site .
train
false
49,081
def FDistribution(name, d1, d2): return rv(name, FDistributionDistribution, (d1, d2))
[ "def", "FDistribution", "(", "name", ",", "d1", ",", "d2", ")", ":", "return", "rv", "(", "name", ",", "FDistributionDistribution", ",", "(", "d1", ",", "d2", ")", ")" ]
create a continuous random variable with a f distribution .
train
false
49,082
def pl_true_int_repr(clause, model={}): result = False for lit in clause: if (lit < 0): p = model.get((- lit)) if (p is not None): p = (not p) else: p = model.get(lit) if (p is True): return True elif (p is None): result = None return result
[ "def", "pl_true_int_repr", "(", "clause", ",", "model", "=", "{", "}", ")", ":", "result", "=", "False", "for", "lit", "in", "clause", ":", "if", "(", "lit", "<", "0", ")", ":", "p", "=", "model", ".", "get", "(", "(", "-", "lit", ")", ")", "if", "(", "p", "is", "not", "None", ")", ":", "p", "=", "(", "not", "p", ")", "else", ":", "p", "=", "model", ".", "get", "(", "lit", ")", "if", "(", "p", "is", "True", ")", ":", "return", "True", "elif", "(", "p", "is", "None", ")", ":", "result", "=", "None", "return", "result" ]
lightweight version of pl_true .
train
false
49,084
def _GetMainModule(): deepest_frame = sys._getframe(0) while (deepest_frame.f_back is not None): deepest_frame = deepest_frame.f_back globals_for_main_module = deepest_frame.f_globals main_module_name = _GetModuleObjectAndName(globals_for_main_module)[1] if (main_module_name is None): main_module_name = sys.argv[0] return main_module_name
[ "def", "_GetMainModule", "(", ")", ":", "deepest_frame", "=", "sys", ".", "_getframe", "(", "0", ")", "while", "(", "deepest_frame", ".", "f_back", "is", "not", "None", ")", ":", "deepest_frame", "=", "deepest_frame", ".", "f_back", "globals_for_main_module", "=", "deepest_frame", ".", "f_globals", "main_module_name", "=", "_GetModuleObjectAndName", "(", "globals_for_main_module", ")", "[", "1", "]", "if", "(", "main_module_name", "is", "None", ")", ":", "main_module_name", "=", "sys", ".", "argv", "[", "0", "]", "return", "main_module_name" ]
returns: string .
train
false
49,086
def fs2web(path): return '/'.join(path.split(os.path.sep))
[ "def", "fs2web", "(", "path", ")", ":", "return", "'/'", ".", "join", "(", "path", ".", "split", "(", "os", ".", "path", ".", "sep", ")", ")" ]
convert fs path into web path .
train
false
49,089
def refresh_info_cache_for_instance(context, instance): if (instance.info_cache is not None): instance.info_cache.refresh()
[ "def", "refresh_info_cache_for_instance", "(", "context", ",", "instance", ")", ":", "if", "(", "instance", ".", "info_cache", "is", "not", "None", ")", ":", "instance", ".", "info_cache", ".", "refresh", "(", ")" ]
refresh the info cache for an instance .
train
false
49,092
def bridge_delete(br, if_exists=True): param_if_exists = _param_if_exists(if_exists) cmd = 'ovs-vsctl {1}del-br {0}'.format(br, param_if_exists) result = __salt__['cmd.run_all'](cmd) retcode = result['retcode'] return _retcode_to_bool(retcode)
[ "def", "bridge_delete", "(", "br", ",", "if_exists", "=", "True", ")", ":", "param_if_exists", "=", "_param_if_exists", "(", "if_exists", ")", "cmd", "=", "'ovs-vsctl {1}del-br {0}'", ".", "format", "(", "br", ",", "param_if_exists", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "retcode", "=", "result", "[", "'retcode'", "]", "return", "_retcode_to_bool", "(", "retcode", ")" ]
deletes bridge and all of its ports .
train
true
49,093
def disallow_proxying(): ScopeReplacer._should_proxy = False
[ "def", "disallow_proxying", "(", ")", ":", "ScopeReplacer", ".", "_should_proxy", "=", "False" ]
disallow lazily imported modules to be used as proxies .
train
false
49,095
def _proxy(): return __proxy__
[ "def", "_proxy", "(", ")", ":", "return", "__proxy__" ]
get proxy .
train
false
49,096
def test_polar_to_cartesian(): r = 1 theta = np.pi x = (r * np.cos(theta)) y = (r * np.sin(theta)) coord = _pol_to_cart(np.array([[r, theta]]))[0] assert_allclose(coord, (x, y), atol=1e-07) assert_allclose(coord, ((-1), 0), atol=1e-07) assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-07) rng = np.random.RandomState(0) r = rng.randn(10) theta = (rng.rand(10) * (2 * np.pi)) polar = np.array((r, theta)).T assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar], _pol_to_cart(polar), atol=1e-07)
[ "def", "test_polar_to_cartesian", "(", ")", ":", "r", "=", "1", "theta", "=", "np", ".", "pi", "x", "=", "(", "r", "*", "np", ".", "cos", "(", "theta", ")", ")", "y", "=", "(", "r", "*", "np", ".", "sin", "(", "theta", ")", ")", "coord", "=", "_pol_to_cart", "(", "np", ".", "array", "(", "[", "[", "r", ",", "theta", "]", "]", ")", ")", "[", "0", "]", "assert_allclose", "(", "coord", ",", "(", "x", ",", "y", ")", ",", "atol", "=", "1e-07", ")", "assert_allclose", "(", "coord", ",", "(", "(", "-", "1", ")", ",", "0", ")", ",", "atol", "=", "1e-07", ")", "assert_allclose", "(", "coord", ",", "_polar_to_cartesian", "(", "theta", ",", "r", ")", ",", "atol", "=", "1e-07", ")", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "r", "=", "rng", ".", "randn", "(", "10", ")", "theta", "=", "(", "rng", ".", "rand", "(", "10", ")", "*", "(", "2", "*", "np", ".", "pi", ")", ")", "polar", "=", "np", ".", "array", "(", "(", "r", ",", "theta", ")", ")", ".", "T", "assert_allclose", "(", "[", "_polar_to_cartesian", "(", "p", "[", "1", "]", ",", "p", "[", "0", "]", ")", "for", "p", "in", "polar", "]", ",", "_pol_to_cart", "(", "polar", ")", ",", "atol", "=", "1e-07", ")" ]
test helper transform function from polar to cartesian .
train
false
49,097
def update_deps(post, lang, task): task.file_dep.update([p for p in post.fragment_deps(lang) if (not p.startswith('####MAGIC####'))])
[ "def", "update_deps", "(", "post", ",", "lang", ",", "task", ")", ":", "task", ".", "file_dep", ".", "update", "(", "[", "p", "for", "p", "in", "post", ".", "fragment_deps", "(", "lang", ")", "if", "(", "not", "p", ".", "startswith", "(", "'####MAGIC####'", ")", ")", "]", ")" ]
update file dependencies as they might have been updated during compilation .
train
false
49,098
def _initialize_backends_from_django_settings(): backends.clear() config = getattr(settings, 'TRACKING_BACKENDS', {}) for (name, values) in config.iteritems(): if values: engine = values['ENGINE'] options = values.get('OPTIONS', {}) backends[name] = _instantiate_backend_from_name(engine, options)
[ "def", "_initialize_backends_from_django_settings", "(", ")", ":", "backends", ".", "clear", "(", ")", "config", "=", "getattr", "(", "settings", ",", "'TRACKING_BACKENDS'", ",", "{", "}", ")", "for", "(", "name", ",", "values", ")", "in", "config", ".", "iteritems", "(", ")", ":", "if", "values", ":", "engine", "=", "values", "[", "'ENGINE'", "]", "options", "=", "values", ".", "get", "(", "'OPTIONS'", ",", "{", "}", ")", "backends", "[", "name", "]", "=", "_instantiate_backend_from_name", "(", "engine", ",", "options", ")" ]
initialize the event tracking backends according to the configuration in django settings .
train
false
49,099
@pytest.mark.network def test_editables_legacy(script, data): script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') script.pip('install', '-e', 'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package') result = script.pip('list', '--editable', '--format=legacy', expect_stderr=True) assert ('simple (1.0)' not in result.stdout), str(result) assert (os.path.join('src', 'pip-test-package') in result.stdout), str(result)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_editables_legacy", "(", "script", ",", "data", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'--no-index'", ",", "'simple==1.0'", ")", "script", ".", "pip", "(", "'install'", ",", "'-e'", ",", "'git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package'", ")", "result", "=", "script", ".", "pip", "(", "'list'", ",", "'--editable'", ",", "'--format=legacy'", ",", "expect_stderr", "=", "True", ")", "assert", "(", "'simple (1.0)'", "not", "in", "result", ".", "stdout", ")", ",", "str", "(", "result", ")", "assert", "(", "os", ".", "path", ".", "join", "(", "'src'", ",", "'pip-test-package'", ")", "in", "result", ".", "stdout", ")", ",", "str", "(", "result", ")" ]
test the behavior of --editables flag in the list command .
train
false
49,100
def expand(template_str, dictionary, **kwargs): t = Template(template_str, **kwargs) return t.expand(dictionary)
[ "def", "expand", "(", "template_str", ",", "dictionary", ",", "**", "kwargs", ")", ":", "t", "=", "Template", "(", "template_str", ",", "**", "kwargs", ")", "return", "t", ".", "expand", "(", "dictionary", ")" ]
free function to expands a template string with a data dictionary .
train
true