id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
36,068
def construct_method_map_from_config(): method_map = dict() method_index = 1 for method in CONF.auth.methods: method_map[method_index] = method method_index = (method_index * 2) return method_map
[ "def", "construct_method_map_from_config", "(", ")", ":", "method_map", "=", "dict", "(", ")", "method_index", "=", "1", "for", "method", "in", "CONF", ".", "auth", ".", "methods", ":", "method_map", "[", "method_index", "]", "=", "method", "method_index", "=", "(", "method_index", "*", "2", ")", "return", "method_map" ]
determine authentication method types for deployment .
train
false
36,069
@step((CHECK_PREFIX + 'I have sent an email with "([^"]*)" in the ({0})'.format('|'.join(EMAIL_PARTS)))) def mail_sent_content(step, text, part): assert any(((text in getattr(email, part)) for email in mail.outbox)), 'An email contained expected text in the {0}'.format(part)
[ "@", "step", "(", "(", "CHECK_PREFIX", "+", "'I have sent an email with \"([^\"]*)\" in the ({0})'", ".", "format", "(", "'|'", ".", "join", "(", "EMAIL_PARTS", ")", ")", ")", ")", "def", "mail_sent_content", "(", "step", ",", "text", ",", "part", ")", ":", "assert", "any", "(", "(", "(", "text", "in", "getattr", "(", "email", ",", "part", ")", ")", "for", "email", "in", "mail", ".", "outbox", ")", ")", ",", "'An email contained expected text in the {0}'", ".", "format", "(", "part", ")" ]
then i have sent an email with "pandas" in the body .
train
false
36,070
def _timeit_fast(stmt='pass', setup='pass', repeat=3): timer = timeit.Timer(stmt, setup) x = 0 for p in range(0, 10): number = (10 ** p) x = timer.timeit(number) if (x >= (0.005 / 10)): break if (x > 1): best = x else: number *= 10 r = timer.repeat(repeat, number) best = min(r) sec = (best / number) return sec
[ "def", "_timeit_fast", "(", "stmt", "=", "'pass'", ",", "setup", "=", "'pass'", ",", "repeat", "=", "3", ")", ":", "timer", "=", "timeit", ".", "Timer", "(", "stmt", ",", "setup", ")", "x", "=", "0", "for", "p", "in", "range", "(", "0", ",", "10", ")", ":", "number", "=", "(", "10", "**", "p", ")", "x", "=", "timer", ".", "timeit", "(", "number", ")", "if", "(", "x", ">=", "(", "0.005", "/", "10", ")", ")", ":", "break", "if", "(", "x", ">", "1", ")", ":", "best", "=", "x", "else", ":", "number", "*=", "10", "r", "=", "timer", ".", "repeat", "(", "repeat", ",", "number", ")", "best", "=", "min", "(", "r", ")", "sec", "=", "(", "best", "/", "number", ")", "return", "sec" ]
returns the time the statement/function took .
train
false
36,074
def makedirs_safe(path, mode=None): rpath = unfrackpath(path) b_rpath = to_bytes(rpath) if (not os.path.exists(b_rpath)): try: if mode: os.makedirs(b_rpath, mode) else: os.makedirs(b_rpath) except OSError as e: if (e.errno != EEXIST): raise AnsibleError(('Unable to create local directories(%s): %s' % (to_native(rpath), to_native(e))))
[ "def", "makedirs_safe", "(", "path", ",", "mode", "=", "None", ")", ":", "rpath", "=", "unfrackpath", "(", "path", ")", "b_rpath", "=", "to_bytes", "(", "rpath", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "b_rpath", ")", ")", ":", "try", ":", "if", "mode", ":", "os", ".", "makedirs", "(", "b_rpath", ",", "mode", ")", "else", ":", "os", ".", "makedirs", "(", "b_rpath", ")", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "!=", "EEXIST", ")", ":", "raise", "AnsibleError", "(", "(", "'Unable to create local directories(%s): %s'", "%", "(", "to_native", "(", "rpath", ")", ",", "to_native", "(", "e", ")", ")", ")", ")" ]
safe way to create dirs in muliprocess/thread environments .
train
false
36,075
def pk(ref, hyp, k=None, boundary='1'): if (k is None): k = int(round((len(ref) / (ref.count(boundary) * 2.0)))) err = 0 for i in xrange(((len(ref) - k) + 1)): r = (ref[i:(i + k)].count(boundary) > 0) h = (hyp[i:(i + k)].count(boundary) > 0) if (r != h): err += 1 return (err / ((len(ref) - k) + 1.0))
[ "def", "pk", "(", "ref", ",", "hyp", ",", "k", "=", "None", ",", "boundary", "=", "'1'", ")", ":", "if", "(", "k", "is", "None", ")", ":", "k", "=", "int", "(", "round", "(", "(", "len", "(", "ref", ")", "/", "(", "ref", ".", "count", "(", "boundary", ")", "*", "2.0", ")", ")", ")", ")", "err", "=", "0", "for", "i", "in", "xrange", "(", "(", "(", "len", "(", "ref", ")", "-", "k", ")", "+", "1", ")", ")", ":", "r", "=", "(", "ref", "[", "i", ":", "(", "i", "+", "k", ")", "]", ".", "count", "(", "boundary", ")", ">", "0", ")", "h", "=", "(", "hyp", "[", "i", ":", "(", "i", "+", "k", ")", "]", ".", "count", "(", "boundary", ")", ">", "0", ")", "if", "(", "r", "!=", "h", ")", ":", "err", "+=", "1", "return", "(", "err", "/", "(", "(", "len", "(", "ref", ")", "-", "k", ")", "+", "1.0", ")", ")" ]
compute the pk metric for a pair of segmentations a segmentation is any sequence over a vocabulary of two items .
train
false
36,076
def _symbol(s, matching_symbol=None): if isinstance(s, string_types): if (matching_symbol and (matching_symbol.name == s)): return matching_symbol return Symbol(s, real=True) elif isinstance(s, Symbol): return s else: raise ValueError('symbol must be string for symbol name or Symbol')
[ "def", "_symbol", "(", "s", ",", "matching_symbol", "=", "None", ")", ":", "if", "isinstance", "(", "s", ",", "string_types", ")", ":", "if", "(", "matching_symbol", "and", "(", "matching_symbol", ".", "name", "==", "s", ")", ")", ":", "return", "matching_symbol", "return", "Symbol", "(", "s", ",", "real", "=", "True", ")", "elif", "isinstance", "(", "s", ",", "Symbol", ")", ":", "return", "s", "else", ":", "raise", "ValueError", "(", "'symbol must be string for symbol name or Symbol'", ")" ]
return s if s is a symbol .
train
false
36,077
def send_error_email(subject, message, additional_recipients=None): recipients = _email_recipients(additional_recipients) sender = email().sender send_email(subject=subject, message=message, sender=sender, recipients=recipients)
[ "def", "send_error_email", "(", "subject", ",", "message", ",", "additional_recipients", "=", "None", ")", ":", "recipients", "=", "_email_recipients", "(", "additional_recipients", ")", "sender", "=", "email", "(", ")", ".", "sender", "send_email", "(", "subject", "=", "subject", ",", "message", "=", "message", ",", "sender", "=", "sender", ",", "recipients", "=", "recipients", ")" ]
sends an email to the configured error email .
train
true
36,078
def get_rich_rules(zone, permanent=True): cmd = '--zone={0} --list-rich-rules'.format(zone) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd).splitlines()
[ "def", "get_rich_rules", "(", "zone", ",", "permanent", "=", "True", ")", ":", "cmd", "=", "'--zone={0} --list-rich-rules'", ".", "format", "(", "zone", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "return", "__firewall_cmd", "(", "cmd", ")", ".", "splitlines", "(", ")" ]
list rich rules bound to a zone .
train
true
36,079
def getDate(date=None): if (date is None): return datetime.date.today() if isinstance(date, datetime.date): return date if isinstance(date, datetime.datetime): return date.date() if isinstance(date, (int, float)): return datetime.date.fromtimestamp(date) if isinstance(date, str): date = date.replace(' ', '0') if (len(date) == 6): return datetime.date(*time.strptime(date, '%y%m%d')[:3]) return datetime.date(*time.strptime(date, '%Y%m%d')[:3]) if hasattr(date, '__getitem__'): return datetime.date(*date[:3]) return datetime.date.fromtimestamp(date.ticks())
[ "def", "getDate", "(", "date", "=", "None", ")", ":", "if", "(", "date", "is", "None", ")", ":", "return", "datetime", ".", "date", ".", "today", "(", ")", "if", "isinstance", "(", "date", ",", "datetime", ".", "date", ")", ":", "return", "date", "if", "isinstance", "(", "date", ",", "datetime", ".", "datetime", ")", ":", "return", "date", ".", "date", "(", ")", "if", "isinstance", "(", "date", ",", "(", "int", ",", "float", ")", ")", ":", "return", "datetime", ".", "date", ".", "fromtimestamp", "(", "date", ")", "if", "isinstance", "(", "date", ",", "str", ")", ":", "date", "=", "date", ".", "replace", "(", "' '", ",", "'0'", ")", "if", "(", "len", "(", "date", ")", "==", "6", ")", ":", "return", "datetime", ".", "date", "(", "*", "time", ".", "strptime", "(", "date", ",", "'%y%m%d'", ")", "[", ":", "3", "]", ")", "return", "datetime", ".", "date", "(", "*", "time", ".", "strptime", "(", "date", ",", "'%Y%m%d'", ")", "[", ":", "3", "]", ")", "if", "hasattr", "(", "date", ",", "'__getitem__'", ")", ":", "return", "datetime", ".", "date", "(", "*", "date", "[", ":", "3", "]", ")", "return", "datetime", ".", "date", ".", "fromtimestamp", "(", "date", ".", "ticks", "(", ")", ")" ]
return datetime .
train
false
36,080
@pytest.mark.network def test_search(script): output = script.pip('search', 'pip') assert ('The PyPA recommended tool for installing Python packages.' in output.stdout)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_search", "(", "script", ")", ":", "output", "=", "script", ".", "pip", "(", "'search'", ",", "'pip'", ")", "assert", "(", "'The PyPA recommended tool for installing Python packages.'", "in", "output", ".", "stdout", ")" ]
end to end test of search command .
train
false
36,081
def _present_graph(dot_filename, filename=None): if (filename == dot_filename): return if ((not filename) and _program_in_path('xdot')): print 'Spawning graph viewer (xdot)' subprocess.Popen(['xdot', dot_filename], close_fds=True) elif _program_in_path('dot'): if (not filename): print 'Graph viewer (xdot) not found, generating a png instead' filename = (dot_filename[:(-4)] + '.png') (stem, ext) = os.path.splitext(filename) cmd = ['dot', ('-T' + ext[1:]), ('-o' + filename), dot_filename] dot = subprocess.Popen(cmd, close_fds=False) dot.wait() if (dot.returncode != 0): print ('dot failed (exit code %d) while executing "%s"' % (dot.returncode, ' '.join(cmd))) else: print ('Image generated as %s' % filename) elif (not filename): print 'Graph viewer (xdot) and image renderer (dot) not found, not doing anything else' else: print 'Image renderer (dot) not found, not doing anything else'
[ "def", "_present_graph", "(", "dot_filename", ",", "filename", "=", "None", ")", ":", "if", "(", "filename", "==", "dot_filename", ")", ":", "return", "if", "(", "(", "not", "filename", ")", "and", "_program_in_path", "(", "'xdot'", ")", ")", ":", "print", "'Spawning graph viewer (xdot)'", "subprocess", ".", "Popen", "(", "[", "'xdot'", ",", "dot_filename", "]", ",", "close_fds", "=", "True", ")", "elif", "_program_in_path", "(", "'dot'", ")", ":", "if", "(", "not", "filename", ")", ":", "print", "'Graph viewer (xdot) not found, generating a png instead'", "filename", "=", "(", "dot_filename", "[", ":", "(", "-", "4", ")", "]", "+", "'.png'", ")", "(", "stem", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "cmd", "=", "[", "'dot'", ",", "(", "'-T'", "+", "ext", "[", "1", ":", "]", ")", ",", "(", "'-o'", "+", "filename", ")", ",", "dot_filename", "]", "dot", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "close_fds", "=", "False", ")", "dot", ".", "wait", "(", ")", "if", "(", "dot", ".", "returncode", "!=", "0", ")", ":", "print", "(", "'dot failed (exit code %d) while executing \"%s\"'", "%", "(", "dot", ".", "returncode", ",", "' '", ".", "join", "(", "cmd", ")", ")", ")", "else", ":", "print", "(", "'Image generated as %s'", "%", "filename", ")", "elif", "(", "not", "filename", ")", ":", "print", "'Graph viewer (xdot) and image renderer (dot) not found, not doing anything else'", "else", ":", "print", "'Image renderer (dot) not found, not doing anything else'" ]
present a .
train
false
36,082
def create_headers(user_id, token=None): headers = {} authorization = 'MediaBrowser UserId="{user_id}", Client="other", Device="beets", DeviceId="beets", Version="0.0.0"'.format(user_id=user_id) headers['x-emby-authorization'] = authorization if token: headers['x-mediabrowser-token'] = token return headers
[ "def", "create_headers", "(", "user_id", ",", "token", "=", "None", ")", ":", "headers", "=", "{", "}", "authorization", "=", "'MediaBrowser UserId=\"{user_id}\", Client=\"other\", Device=\"beets\", DeviceId=\"beets\", Version=\"0.0.0\"'", ".", "format", "(", "user_id", "=", "user_id", ")", "headers", "[", "'x-emby-authorization'", "]", "=", "authorization", "if", "token", ":", "headers", "[", "'x-mediabrowser-token'", "]", "=", "token", "return", "headers" ]
return header dict that is needed to talk to the emby api .
train
false
36,083
def t_paired(a, b, tails='two-sided', exp_diff=0): if (len(a) != len(b)): raise ValueError('Unequal length lists in ttest_paired.') return t_one_sample((array(a) - array(b)), popmean=exp_diff, tails=tails)
[ "def", "t_paired", "(", "a", ",", "b", ",", "tails", "=", "'two-sided'", ",", "exp_diff", "=", "0", ")", ":", "if", "(", "len", "(", "a", ")", "!=", "len", "(", "b", ")", ")", ":", "raise", "ValueError", "(", "'Unequal length lists in ttest_paired.'", ")", "return", "t_one_sample", "(", "(", "array", "(", "a", ")", "-", "array", "(", "b", ")", ")", ",", "popmean", "=", "exp_diff", ",", "tails", "=", "tails", ")" ]
returns t and prob for two related samples of scores a and b .
train
false
36,084
def get_special_chars(language): for char in SPECIAL_CHARS: (yield (get_char_description(char), char)) code = language.code.replace(u'_', u'-').split(u'-')[0] if (code in EXTRA_CHARS): for char in EXTRA_CHARS[code]: (yield (get_char_description(char), char)) (yield get_quote(code, DOUBLE_OPEN, _(u'Opening double quote'))) (yield get_quote(code, DOUBLE_CLOSE, _(u'Closing double quote'))) (yield get_quote(code, SINGLE_OPEN, _(u'Opening single quote'))) (yield get_quote(code, SINGLE_CLOSE, _(u'Closing single quote'))) if (code in HYPHEN_LANGS): (yield (_(u'Hyphen'), u'-')) if (code in EN_DASH_LANGS): (yield (_(u'En dash'), u'\u2013')) if (code in EM_DASH_LANGS): (yield (_(u'Em dash'), u'\u2014'))
[ "def", "get_special_chars", "(", "language", ")", ":", "for", "char", "in", "SPECIAL_CHARS", ":", "(", "yield", "(", "get_char_description", "(", "char", ")", ",", "char", ")", ")", "code", "=", "language", ".", "code", ".", "replace", "(", "u'_'", ",", "u'-'", ")", ".", "split", "(", "u'-'", ")", "[", "0", "]", "if", "(", "code", "in", "EXTRA_CHARS", ")", ":", "for", "char", "in", "EXTRA_CHARS", "[", "code", "]", ":", "(", "yield", "(", "get_char_description", "(", "char", ")", ",", "char", ")", ")", "(", "yield", "get_quote", "(", "code", ",", "DOUBLE_OPEN", ",", "_", "(", "u'Opening double quote'", ")", ")", ")", "(", "yield", "get_quote", "(", "code", ",", "DOUBLE_CLOSE", ",", "_", "(", "u'Closing double quote'", ")", ")", ")", "(", "yield", "get_quote", "(", "code", ",", "SINGLE_OPEN", ",", "_", "(", "u'Opening single quote'", ")", ")", ")", "(", "yield", "get_quote", "(", "code", ",", "SINGLE_CLOSE", ",", "_", "(", "u'Closing single quote'", ")", ")", ")", "if", "(", "code", "in", "HYPHEN_LANGS", ")", ":", "(", "yield", "(", "_", "(", "u'Hyphen'", ")", ",", "u'-'", ")", ")", "if", "(", "code", "in", "EN_DASH_LANGS", ")", ":", "(", "yield", "(", "_", "(", "u'En dash'", ")", ",", "u'\\u2013'", ")", ")", "if", "(", "code", "in", "EM_DASH_LANGS", ")", ":", "(", "yield", "(", "_", "(", "u'Em dash'", ")", ",", "u'\\u2014'", ")", ")" ]
returns list of special characters .
train
false
36,085
def getInvalidAuthType(): invalidAuthType = 'ThisPluginDoesNotExist' while (invalidAuthType in [factory.authType for factory in strcred.findCheckerFactories()]): invalidAuthType += '_' return invalidAuthType
[ "def", "getInvalidAuthType", "(", ")", ":", "invalidAuthType", "=", "'ThisPluginDoesNotExist'", "while", "(", "invalidAuthType", "in", "[", "factory", ".", "authType", "for", "factory", "in", "strcred", ".", "findCheckerFactories", "(", ")", "]", ")", ":", "invalidAuthType", "+=", "'_'", "return", "invalidAuthType" ]
helper method to produce an auth type that doesnt exist .
train
false
36,086
def get_redis_ConnectionError(): from redis import exceptions return exceptions.ConnectionError
[ "def", "get_redis_ConnectionError", "(", ")", ":", "from", "redis", "import", "exceptions", "return", "exceptions", ".", "ConnectionError" ]
return the redis connectionerror exception class .
train
false
36,088
def admin_username(): return DETAILS.get('admin_username', 'root')
[ "def", "admin_username", "(", ")", ":", "return", "DETAILS", ".", "get", "(", "'admin_username'", ",", "'root'", ")" ]
return the admin_username in the details dictionary .
train
false
36,089
def test_Gaussian2DCovariance(): cov_matrix = [[49.0, (-16.0)], [(-16.0), 9.0]] model = models.Gaussian2D(17.0, 2.0, 2.5, cov_matrix=cov_matrix) (y, x) = np.mgrid[0:5, 0:5] g = model(x, y) g_ref = [[4.3744505, 5.8413977, 7.42988694, 9.00160175, 10.38794269], [8.83290201, 10.81772851, 12.61946384, 14.02225593, 14.84113227], [13.68528889, 15.37184621, 16.44637743, 16.76048705, 16.26953638], [16.26953638, 16.76048705, 16.44637743, 15.37184621, 13.68528889], [14.84113227, 14.02225593, 12.61946384, 10.81772851, 8.83290201]] assert_allclose(g, g_ref, rtol=0, atol=1e-06)
[ "def", "test_Gaussian2DCovariance", "(", ")", ":", "cov_matrix", "=", "[", "[", "49.0", ",", "(", "-", "16.0", ")", "]", ",", "[", "(", "-", "16.0", ")", ",", "9.0", "]", "]", "model", "=", "models", ".", "Gaussian2D", "(", "17.0", ",", "2.0", ",", "2.5", ",", "cov_matrix", "=", "cov_matrix", ")", "(", "y", ",", "x", ")", "=", "np", ".", "mgrid", "[", "0", ":", "5", ",", "0", ":", "5", "]", "g", "=", "model", "(", "x", ",", "y", ")", "g_ref", "=", "[", "[", "4.3744505", ",", "5.8413977", ",", "7.42988694", ",", "9.00160175", ",", "10.38794269", "]", ",", "[", "8.83290201", ",", "10.81772851", ",", "12.61946384", ",", "14.02225593", ",", "14.84113227", "]", ",", "[", "13.68528889", ",", "15.37184621", ",", "16.44637743", ",", "16.76048705", ",", "16.26953638", "]", ",", "[", "16.26953638", ",", "16.76048705", ",", "16.44637743", ",", "15.37184621", ",", "13.68528889", "]", ",", "[", "14.84113227", ",", "14.02225593", ",", "12.61946384", ",", "10.81772851", ",", "8.83290201", "]", "]", "assert_allclose", "(", "g", ",", "g_ref", ",", "rtol", "=", "0", ",", "atol", "=", "1e-06", ")" ]
test rotated elliptical gaussian2d model when cov_matrix is input .
train
false
36,090
@register.simple_tag(takes_context=True) def routablepageurl(context, page, url_name, *args, **kwargs): request = context[u'request'] base_url = page.relative_url(request.site) routed_url = page.reverse_subpage(url_name, args=args, kwargs=kwargs) return (base_url + routed_url)
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "routablepageurl", "(", "context", ",", "page", ",", "url_name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "request", "=", "context", "[", "u'request'", "]", "base_url", "=", "page", ".", "relative_url", "(", "request", ".", "site", ")", "routed_url", "=", "page", ".", "reverse_subpage", "(", "url_name", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")", "return", "(", "base_url", "+", "routed_url", ")" ]
routablepageurl is similar to pageurl .
train
false
36,091
def is_sequence(i, include=None): return ((hasattr(i, '__getitem__') and iterable(i)) or (bool(include) and isinstance(i, include)))
[ "def", "is_sequence", "(", "i", ",", "include", "=", "None", ")", ":", "return", "(", "(", "hasattr", "(", "i", ",", "'__getitem__'", ")", "and", "iterable", "(", "i", ")", ")", "or", "(", "bool", "(", "include", ")", "and", "isinstance", "(", "i", ",", "include", ")", ")", ")" ]
return a boolean indicating whether i is a sequence in the sympy sense .
train
true
36,093
def cgconfig_is_running(): return service_cgconfig_control('status')
[ "def", "cgconfig_is_running", "(", ")", ":", "return", "service_cgconfig_control", "(", "'status'", ")" ]
check cgconfig service status .
train
false
36,096
def cleanup_page_name(title): if (not title): return title name = title.lower() name = re.sub(u'[~!@#$%^&*+()<>,."\'\\?]', u'', name) name = re.sub(u'[:/]', u'-', name) name = u'-'.join(name.split()) name = re.sub(u'(-)\\1+', u'\\1', name) return name[:140]
[ "def", "cleanup_page_name", "(", "title", ")", ":", "if", "(", "not", "title", ")", ":", "return", "title", "name", "=", "title", ".", "lower", "(", ")", "name", "=", "re", ".", "sub", "(", "u'[~!@#$%^&*+()<>,.\"\\'\\\\?]'", ",", "u''", ",", "name", ")", "name", "=", "re", ".", "sub", "(", "u'[:/]'", ",", "u'-'", ",", "name", ")", "name", "=", "u'-'", ".", "join", "(", "name", ".", "split", "(", ")", ")", "name", "=", "re", ".", "sub", "(", "u'(-)\\\\1+'", ",", "u'\\\\1'", ",", "name", ")", "return", "name", "[", ":", "140", "]" ]
make page name from title .
train
false
36,097
def params(css, encoding, use_bom=False, expect_error=False, **kwargs): return (css, encoding, use_bom, expect_error, kwargs)
[ "def", "params", "(", "css", ",", "encoding", ",", "use_bom", "=", "False", ",", "expect_error", "=", "False", ",", "**", "kwargs", ")", ":", "return", "(", "css", ",", "encoding", ",", "use_bom", ",", "expect_error", ",", "kwargs", ")" ]
nicer syntax to make a tuple .
train
false
36,099
def resolve_type(type_, package_context): bt = base_msg_type(type_) if (bt in BUILTIN_TYPES): return type_ elif (bt == 'Header'): return 'std_msgs/Header' elif (SEP in type_): return type_ else: return ('%s%s%s' % (package_context, SEP, type_))
[ "def", "resolve_type", "(", "type_", ",", "package_context", ")", ":", "bt", "=", "base_msg_type", "(", "type_", ")", "if", "(", "bt", "in", "BUILTIN_TYPES", ")", ":", "return", "type_", "elif", "(", "bt", "==", "'Header'", ")", ":", "return", "'std_msgs/Header'", "elif", "(", "SEP", "in", "type_", ")", ":", "return", "type_", "else", ":", "return", "(", "'%s%s%s'", "%", "(", "package_context", ",", "SEP", ",", "type_", ")", ")" ]
resolve type name based on current package context .
train
false
36,101
def test_visual(displayer, choices): displayer.notification('Random notification!') displayer.menu('Question?', choices, ok_label='O', cancel_label='Can', help_label='??') displayer.menu('Question?', [choice[1] for choice in choices], ok_label='O', cancel_label='Can', help_label='??') displayer.input('Input Message') displayer.yesno('YesNo Message', yes_label='Yessir', no_label='Nosir') displayer.checklist('Checklist Message', [choice[0] for choice in choices])
[ "def", "test_visual", "(", "displayer", ",", "choices", ")", ":", "displayer", ".", "notification", "(", "'Random notification!'", ")", "displayer", ".", "menu", "(", "'Question?'", ",", "choices", ",", "ok_label", "=", "'O'", ",", "cancel_label", "=", "'Can'", ",", "help_label", "=", "'??'", ")", "displayer", ".", "menu", "(", "'Question?'", ",", "[", "choice", "[", "1", "]", "for", "choice", "in", "choices", "]", ",", "ok_label", "=", "'O'", ",", "cancel_label", "=", "'Can'", ",", "help_label", "=", "'??'", ")", "displayer", ".", "input", "(", "'Input Message'", ")", "displayer", ".", "yesno", "(", "'YesNo Message'", ",", "yes_label", "=", "'Yessir'", ",", "no_label", "=", "'Nosir'", ")", "displayer", ".", "checklist", "(", "'Checklist Message'", ",", "[", "choice", "[", "0", "]", "for", "choice", "in", "choices", "]", ")" ]
visually test all of the display functions .
train
false
36,102
def _jaccard(a_rows, a_cols, b_rows, b_cols): intersection = ((a_rows * b_rows).sum() * (a_cols * b_cols).sum()) a_size = (a_rows.sum() * a_cols.sum()) b_size = (b_rows.sum() * b_cols.sum()) return (intersection / ((a_size + b_size) - intersection))
[ "def", "_jaccard", "(", "a_rows", ",", "a_cols", ",", "b_rows", ",", "b_cols", ")", ":", "intersection", "=", "(", "(", "a_rows", "*", "b_rows", ")", ".", "sum", "(", ")", "*", "(", "a_cols", "*", "b_cols", ")", ".", "sum", "(", ")", ")", "a_size", "=", "(", "a_rows", ".", "sum", "(", ")", "*", "a_cols", ".", "sum", "(", ")", ")", "b_size", "=", "(", "b_rows", ".", "sum", "(", ")", "*", "b_cols", ".", "sum", "(", ")", ")", "return", "(", "intersection", "/", "(", "(", "a_size", "+", "b_size", ")", "-", "intersection", ")", ")" ]
jaccard coefficient on the elements of the two biclusters .
train
false
36,103
def tclobjs_to_py(adict): for (opt, val) in adict.iteritems(): if (val and hasattr(val, '__len__') and (not isinstance(val, basestring))): if (getattr(val[0], 'typename', None) == 'StateSpec'): val = _list_from_statespec(val) else: val = map(_convert_stringval, val) elif hasattr(val, 'typename'): val = _convert_stringval(val) adict[opt] = val return adict
[ "def", "tclobjs_to_py", "(", "adict", ")", ":", "for", "(", "opt", ",", "val", ")", "in", "adict", ".", "iteritems", "(", ")", ":", "if", "(", "val", "and", "hasattr", "(", "val", ",", "'__len__'", ")", "and", "(", "not", "isinstance", "(", "val", ",", "basestring", ")", ")", ")", ":", "if", "(", "getattr", "(", "val", "[", "0", "]", ",", "'typename'", ",", "None", ")", "==", "'StateSpec'", ")", ":", "val", "=", "_list_from_statespec", "(", "val", ")", "else", ":", "val", "=", "map", "(", "_convert_stringval", ",", "val", ")", "elif", "hasattr", "(", "val", ",", "'typename'", ")", ":", "val", "=", "_convert_stringval", "(", "val", ")", "adict", "[", "opt", "]", "=", "val", "return", "adict" ]
returns adict with its values converted from tcl objects to python objects .
train
false
36,104
def list_devices(): return devices.gpus
[ "def", "list_devices", "(", ")", ":", "return", "devices", ".", "gpus" ]
return a list of all detected devices .
train
false
36,105
def erf_zeros(nt): if ((floor(nt) != nt) or (nt <= 0) or (not isscalar(nt))): raise ValueError('Argument must be positive scalar integer.') return specfun.cerzo(nt)
[ "def", "erf_zeros", "(", "nt", ")", ":", "if", "(", "(", "floor", "(", "nt", ")", "!=", "nt", ")", "or", "(", "nt", "<=", "0", ")", "or", "(", "not", "isscalar", "(", "nt", ")", ")", ")", ":", "raise", "ValueError", "(", "'Argument must be positive scalar integer.'", ")", "return", "specfun", ".", "cerzo", "(", "nt", ")" ]
compute nt complex zeros of error function erf(z) .
train
false
36,106
def make_ssl_devcert(base_path, host=None, cn=None): from OpenSSL import crypto if (host is not None): cn = ('*.%s/CN=%s' % (host, host)) (cert, pkey) = generate_adhoc_ssl_pair(cn=cn) cert_file = (base_path + '.crt') pkey_file = (base_path + '.key') with open(cert_file, 'wb') as f: f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) with open(pkey_file, 'wb') as f: f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) return (cert_file, pkey_file)
[ "def", "make_ssl_devcert", "(", "base_path", ",", "host", "=", "None", ",", "cn", "=", "None", ")", ":", "from", "OpenSSL", "import", "crypto", "if", "(", "host", "is", "not", "None", ")", ":", "cn", "=", "(", "'*.%s/CN=%s'", "%", "(", "host", ",", "host", ")", ")", "(", "cert", ",", "pkey", ")", "=", "generate_adhoc_ssl_pair", "(", "cn", "=", "cn", ")", "cert_file", "=", "(", "base_path", "+", "'.crt'", ")", "pkey_file", "=", "(", "base_path", "+", "'.key'", ")", "with", "open", "(", "cert_file", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "crypto", ".", "dump_certificate", "(", "crypto", ".", "FILETYPE_PEM", ",", "cert", ")", ")", "with", "open", "(", "pkey_file", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "crypto", ".", "dump_privatekey", "(", "crypto", ".", "FILETYPE_PEM", ",", "pkey", ")", ")", "return", "(", "cert_file", ",", "pkey_file", ")" ]
creates an ssl key for development .
train
true
36,108
def _nsenter(pid): return 'nsenter --target {0} --mount --uts --ipc --net --pid'.format(pid)
[ "def", "_nsenter", "(", "pid", ")", ":", "return", "'nsenter --target {0} --mount --uts --ipc --net --pid'", ".", "format", "(", "pid", ")" ]
return the nsenter command to attach to the named container .
train
false
36,109
@contextmanager def patch_client(target, mock_client=None): with mock.patch(target) as client_getter: client = (mock_client or MockBox()) client_getter.return_value = client (yield client)
[ "@", "contextmanager", "def", "patch_client", "(", "target", ",", "mock_client", "=", "None", ")", ":", "with", "mock", ".", "patch", "(", "target", ")", "as", "client_getter", ":", "client", "=", "(", "mock_client", "or", "MockBox", "(", ")", ")", "client_getter", ".", "return_value", "=", "client", "(", "yield", "client", ")" ]
patches a function that returns a dropboxclient .
train
false
36,110
def hls_palette(n_colors=6, h=0.01, l=0.6, s=0.65): hues = np.linspace(0, 1, (n_colors + 1))[:(-1)] hues += h hues %= 1 hues -= hues.astype(int) palette = [colorsys.hls_to_rgb(h_i, l, s) for h_i in hues] return _ColorPalette(palette)
[ "def", "hls_palette", "(", "n_colors", "=", "6", ",", "h", "=", "0.01", ",", "l", "=", "0.6", ",", "s", "=", "0.65", ")", ":", "hues", "=", "np", ".", "linspace", "(", "0", ",", "1", ",", "(", "n_colors", "+", "1", ")", ")", "[", ":", "(", "-", "1", ")", "]", "hues", "+=", "h", "hues", "%=", "1", "hues", "-=", "hues", ".", "astype", "(", "int", ")", "palette", "=", "[", "colorsys", ".", "hls_to_rgb", "(", "h_i", ",", "l", ",", "s", ")", "for", "h_i", "in", "hues", "]", "return", "_ColorPalette", "(", "palette", ")" ]
get a set of evenly spaced colors in hls hue space .
train
true
36,112
@pytest.mark.cmd @pytest.mark.django_db def test_export_tmx_tp(capfd, tp0, media_test_dir): lang_code = tp0.language.code prj_code = tp0.project.code call_command('export', '--tmx', ('--project=%s' % prj_code), ('--language=%s' % lang_code)) (out, err) = capfd.readouterr() rev = revision.get(tp0.__class__)(tp0.directory).get(key='stats') filename = ('%s.%s.%s.tmx.zip' % (tp0.project.code, tp0.language.code, rev[:10])) assert (os.path.join(lang_code, filename) in out) call_command('export', '--tmx', ('--project=%s' % prj_code), ('--language=%s' % lang_code)) (out, err) = capfd.readouterr() assert (('Translation project (%s) has not been changed' % tp0) in out)
[ "@", "pytest", ".", "mark", ".", "cmd", "@", "pytest", ".", "mark", ".", "django_db", "def", "test_export_tmx_tp", "(", "capfd", ",", "tp0", ",", "media_test_dir", ")", ":", "lang_code", "=", "tp0", ".", "language", ".", "code", "prj_code", "=", "tp0", ".", "project", ".", "code", "call_command", "(", "'export'", ",", "'--tmx'", ",", "(", "'--project=%s'", "%", "prj_code", ")", ",", "(", "'--language=%s'", "%", "lang_code", ")", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "rev", "=", "revision", ".", "get", "(", "tp0", ".", "__class__", ")", "(", "tp0", ".", "directory", ")", ".", "get", "(", "key", "=", "'stats'", ")", "filename", "=", "(", "'%s.%s.%s.tmx.zip'", "%", "(", "tp0", ".", "project", ".", "code", ",", "tp0", ".", "language", ".", "code", ",", "rev", "[", ":", "10", "]", ")", ")", "assert", "(", "os", ".", "path", ".", "join", "(", "lang_code", ",", "filename", ")", "in", "out", ")", "call_command", "(", "'export'", ",", "'--tmx'", ",", "(", "'--project=%s'", "%", "prj_code", ")", ",", "(", "'--language=%s'", "%", "lang_code", ")", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "(", "'Translation project (%s) has not been changed'", "%", "tp0", ")", "in", "out", ")" ]
export a tp .
train
false
36,113
def OutIndent(format='', *args): if format: VaOutput(format, args) IndentLevel()
[ "def", "OutIndent", "(", "format", "=", "''", ",", "*", "args", ")", ":", "if", "format", ":", "VaOutput", "(", "format", ",", "args", ")", "IndentLevel", "(", ")" ]
combine output() followed by indentlevel() .
train
false
36,114
def _hyper_to_meijerg(func): ap = func.ap bq = func.bq p = len(ap) q = len(bq) ispoly = any((((i <= 0) and (int(i) == i)) for i in ap)) if ispoly: return hyperexpand(func) z = func.args[2] an = ((1 - i) for i in ap) anp = () bm = (S(0),) bmq = ((1 - i) for i in bq) k = S(1) for i in bq: k = (k * gamma(i)) for i in ap: k = (k / gamma(i)) return (k * meijerg(an, anp, bm, bmq, (- z)))
[ "def", "_hyper_to_meijerg", "(", "func", ")", ":", "ap", "=", "func", ".", "ap", "bq", "=", "func", ".", "bq", "p", "=", "len", "(", "ap", ")", "q", "=", "len", "(", "bq", ")", "ispoly", "=", "any", "(", "(", "(", "(", "i", "<=", "0", ")", "and", "(", "int", "(", "i", ")", "==", "i", ")", ")", "for", "i", "in", "ap", ")", ")", "if", "ispoly", ":", "return", "hyperexpand", "(", "func", ")", "z", "=", "func", ".", "args", "[", "2", "]", "an", "=", "(", "(", "1", "-", "i", ")", "for", "i", "in", "ap", ")", "anp", "=", "(", ")", "bm", "=", "(", "S", "(", "0", ")", ",", ")", "bmq", "=", "(", "(", "1", "-", "i", ")", "for", "i", "in", "bq", ")", "k", "=", "S", "(", "1", ")", "for", "i", "in", "bq", ":", "k", "=", "(", "k", "*", "gamma", "(", "i", ")", ")", "for", "i", "in", "ap", ":", "k", "=", "(", "k", "/", "gamma", "(", "i", ")", ")", "return", "(", "k", "*", "meijerg", "(", "an", ",", "anp", ",", "bm", ",", "bmq", ",", "(", "-", "z", ")", ")", ")" ]
converts a hyper to meijerg .
train
false
36,115
def chars_after(chars, match): if (match.end >= len(match.input_string)): return True return (match.input_string[match.end] in chars)
[ "def", "chars_after", "(", "chars", ",", "match", ")", ":", "if", "(", "match", ".", "end", ">=", "len", "(", "match", ".", "input_string", ")", ")", ":", "return", "True", "return", "(", "match", ".", "input_string", "[", "match", ".", "end", "]", "in", "chars", ")" ]
validate the match if right character is in a given sequence .
train
true
36,117
def _aresame(a, b): from .function import AppliedUndef, UndefinedFunction as UndefFunc for (i, j) in zip_longest(preorder_traversal(a), preorder_traversal(b)): if ((i != j) or (type(i) != type(j))): if ((isinstance(i, UndefFunc) and isinstance(j, UndefFunc)) or (isinstance(i, AppliedUndef) and isinstance(j, AppliedUndef))): if (i.class_key() != j.class_key()): return False else: return False else: return True
[ "def", "_aresame", "(", "a", ",", "b", ")", ":", "from", ".", "function", "import", "AppliedUndef", ",", "UndefinedFunction", "as", "UndefFunc", "for", "(", "i", ",", "j", ")", "in", "zip_longest", "(", "preorder_traversal", "(", "a", ")", ",", "preorder_traversal", "(", "b", ")", ")", ":", "if", "(", "(", "i", "!=", "j", ")", "or", "(", "type", "(", "i", ")", "!=", "type", "(", "j", ")", ")", ")", ":", "if", "(", "(", "isinstance", "(", "i", ",", "UndefFunc", ")", "and", "isinstance", "(", "j", ",", "UndefFunc", ")", ")", "or", "(", "isinstance", "(", "i", ",", "AppliedUndef", ")", "and", "isinstance", "(", "j", ",", "AppliedUndef", ")", ")", ")", ":", "if", "(", "i", ".", "class_key", "(", ")", "!=", "j", ".", "class_key", "(", ")", ")", ":", "return", "False", "else", ":", "return", "False", "else", ":", "return", "True" ]
return true if a and b are structurally the same .
train
false
36,118
def _find_cfgs(path, cfgs=None): ignored = ['var', 'parts'] dirs = [] if (not cfgs): cfgs = [] for i in os.listdir(path): fi = os.path.join(path, i) if (fi.endswith('.cfg') and os.path.isfile(fi)): cfgs.append(fi) if (os.path.isdir(fi) and (i not in ignored)): dirs.append(fi) for fpath in dirs: for (p, ids, ifs) in os.walk(fpath): for i in ifs: if i.endswith('.cfg'): cfgs.append(os.path.join(p, i)) return cfgs
[ "def", "_find_cfgs", "(", "path", ",", "cfgs", "=", "None", ")", ":", "ignored", "=", "[", "'var'", ",", "'parts'", "]", "dirs", "=", "[", "]", "if", "(", "not", "cfgs", ")", ":", "cfgs", "=", "[", "]", "for", "i", "in", "os", ".", "listdir", "(", "path", ")", ":", "fi", "=", "os", ".", "path", ".", "join", "(", "path", ",", "i", ")", "if", "(", "fi", ".", "endswith", "(", "'.cfg'", ")", "and", "os", ".", "path", ".", "isfile", "(", "fi", ")", ")", ":", "cfgs", ".", "append", "(", "fi", ")", "if", "(", "os", ".", "path", ".", "isdir", "(", "fi", ")", "and", "(", "i", "not", "in", "ignored", ")", ")", ":", "dirs", ".", "append", "(", "fi", ")", "for", "fpath", "in", "dirs", ":", "for", "(", "p", ",", "ids", ",", "ifs", ")", "in", "os", ".", "walk", "(", "fpath", ")", ":", "for", "i", "in", "ifs", ":", "if", "i", ".", "endswith", "(", "'.cfg'", ")", ":", "cfgs", ".", "append", "(", "os", ".", "path", ".", "join", "(", "p", ",", "i", ")", ")", "return", "cfgs" ]
find all buildout configs in a subdirectory .
train
true
36,119
def instance_uuids_overlap(host_state, uuids): if isinstance(uuids, six.string_types): uuids = [uuids] set_uuids = set(uuids) host_uuids = set(host_state.instances.keys()) return bool(host_uuids.intersection(set_uuids))
[ "def", "instance_uuids_overlap", "(", "host_state", ",", "uuids", ")", ":", "if", "isinstance", "(", "uuids", ",", "six", ".", "string_types", ")", ":", "uuids", "=", "[", "uuids", "]", "set_uuids", "=", "set", "(", "uuids", ")", "host_uuids", "=", "set", "(", "host_state", ".", "instances", ".", "keys", "(", ")", ")", "return", "bool", "(", "host_uuids", ".", "intersection", "(", "set_uuids", ")", ")" ]
tests for overlap between a host_state and a list of uuids .
train
false
36,120
def concatenate(tensor_list, axis=0): concat_size = sum((tt.shape[axis] for tt in tensor_list)) output_shape = () for k in range(axis): output_shape += (tensor_list[0].shape[k],) output_shape += (concat_size,) for k in range((axis + 1), tensor_list[0].ndim): output_shape += (tensor_list[0].shape[k],) out = tensor.zeros(output_shape) offset = 0 for tt in tensor_list: indices = () for k in range(axis): indices += (slice(None),) indices += (slice(offset, (offset + tt.shape[axis])),) for k in range((axis + 1), tensor_list[0].ndim): indices += (slice(None),) out = tensor.set_subtensor(out[indices], tt) offset += tt.shape[axis] return out
[ "def", "concatenate", "(", "tensor_list", ",", "axis", "=", "0", ")", ":", "concat_size", "=", "sum", "(", "(", "tt", ".", "shape", "[", "axis", "]", "for", "tt", "in", "tensor_list", ")", ")", "output_shape", "=", "(", ")", "for", "k", "in", "range", "(", "axis", ")", ":", "output_shape", "+=", "(", "tensor_list", "[", "0", "]", ".", "shape", "[", "k", "]", ",", ")", "output_shape", "+=", "(", "concat_size", ",", ")", "for", "k", "in", "range", "(", "(", "axis", "+", "1", ")", ",", "tensor_list", "[", "0", "]", ".", "ndim", ")", ":", "output_shape", "+=", "(", "tensor_list", "[", "0", "]", ".", "shape", "[", "k", "]", ",", ")", "out", "=", "tensor", ".", "zeros", "(", "output_shape", ")", "offset", "=", "0", "for", "tt", "in", "tensor_list", ":", "indices", "=", "(", ")", "for", "k", "in", "range", "(", "axis", ")", ":", "indices", "+=", "(", "slice", "(", "None", ")", ",", ")", "indices", "+=", "(", "slice", "(", "offset", ",", "(", "offset", "+", "tt", ".", "shape", "[", "axis", "]", ")", ")", ",", ")", "for", "k", "in", "range", "(", "(", "axis", "+", "1", ")", ",", "tensor_list", "[", "0", "]", ".", "ndim", ")", ":", "indices", "+=", "(", "slice", "(", "None", ")", ",", ")", "out", "=", "tensor", ".", "set_subtensor", "(", "out", "[", "indices", "]", ",", "tt", ")", "offset", "+=", "tt", ".", "shape", "[", "axis", "]", "return", "out" ]
alias for join .
train
false
36,121
def post_process(matches): groups = _group_overlapping_rectangles(matches) for group_matches in groups.values(): mins = numpy.stack((numpy.array(m[0]) for m in group_matches)) maxs = numpy.stack((numpy.array(m[1]) for m in group_matches)) present_probs = numpy.array([m[2] for m in group_matches]) letter_probs = numpy.stack((m[3] for m in group_matches)) (yield (numpy.max(mins, axis=0).flatten(), numpy.min(maxs, axis=0).flatten(), numpy.max(present_probs), letter_probs[numpy.argmax(present_probs)]))
[ "def", "post_process", "(", "matches", ")", ":", "groups", "=", "_group_overlapping_rectangles", "(", "matches", ")", "for", "group_matches", "in", "groups", ".", "values", "(", ")", ":", "mins", "=", "numpy", ".", "stack", "(", "(", "numpy", ".", "array", "(", "m", "[", "0", "]", ")", "for", "m", "in", "group_matches", ")", ")", "maxs", "=", "numpy", ".", "stack", "(", "(", "numpy", ".", "array", "(", "m", "[", "1", "]", ")", "for", "m", "in", "group_matches", ")", ")", "present_probs", "=", "numpy", ".", "array", "(", "[", "m", "[", "2", "]", "for", "m", "in", "group_matches", "]", ")", "letter_probs", "=", "numpy", ".", "stack", "(", "(", "m", "[", "3", "]", "for", "m", "in", "group_matches", ")", ")", "(", "yield", "(", "numpy", ".", "max", "(", "mins", ",", "axis", "=", "0", ")", ".", "flatten", "(", ")", ",", "numpy", ".", "min", "(", "maxs", ",", "axis", "=", "0", ")", ".", "flatten", "(", ")", ",", "numpy", ".", "max", "(", "present_probs", ")", ",", "letter_probs", "[", "numpy", ".", "argmax", "(", "present_probs", ")", "]", ")", ")" ]
take an iterable of matches as returned by detect and merge duplicates .
train
false
36,122
def fake_payment_instruction(db, participant, team): start_date = max(participant.claimed_time, team.ctime) ctime = faker.date_time_between(start_date) return insert_fake_data(db, 'payment_instructions', ctime=ctime, mtime=faker.date_time_between(ctime), participant_id=participant.id, team_id=team.id, amount=fake_tip_amount())
[ "def", "fake_payment_instruction", "(", "db", ",", "participant", ",", "team", ")", ":", "start_date", "=", "max", "(", "participant", ".", "claimed_time", ",", "team", ".", "ctime", ")", "ctime", "=", "faker", ".", "date_time_between", "(", "start_date", ")", "return", "insert_fake_data", "(", "db", ",", "'payment_instructions'", ",", "ctime", "=", "ctime", ",", "mtime", "=", "faker", ".", "date_time_between", "(", "ctime", ")", ",", "participant_id", "=", "participant", ".", "id", ",", "team_id", "=", "team", ".", "id", ",", "amount", "=", "fake_tip_amount", "(", ")", ")" ]
create a fake payment_instruction .
train
false
36,124
def _class_matcher(target_cls): def match(node): return isinstance(node, target_cls) return match
[ "def", "_class_matcher", "(", "target_cls", ")", ":", "def", "match", "(", "node", ")", ":", "return", "isinstance", "(", "node", ",", "target_cls", ")", "return", "match" ]
match a node if its an instance of the given class .
train
false
36,125
def dbg(*args, **kwds): mkQApp() from . import console c = console.ConsoleWidget(*args, **kwds) c.catchAllExceptions() c.show() global consoles try: consoles.append(c) except NameError: consoles = [c] return c
[ "def", "dbg", "(", "*", "args", ",", "**", "kwds", ")", ":", "mkQApp", "(", ")", "from", ".", "import", "console", "c", "=", "console", ".", "ConsoleWidget", "(", "*", "args", ",", "**", "kwds", ")", "c", ".", "catchAllExceptions", "(", ")", "c", ".", "show", "(", ")", "global", "consoles", "try", ":", "consoles", ".", "append", "(", "c", ")", "except", "NameError", ":", "consoles", "=", "[", "c", "]", "return", "c" ]
looks at the stack .
train
false
36,126
def raise_exception(typ, val, tbtext): if (orig_type(typ) == str): raise typ else: val._remote_traceback = tbtext raise val
[ "def", "raise_exception", "(", "typ", ",", "val", ",", "tbtext", ")", ":", "if", "(", "orig_type", "(", "typ", ")", "==", "str", ")", ":", "raise", "typ", "else", ":", "val", ".", "_remote_traceback", "=", "tbtext", "raise", "val" ]
a helper for raising remote exceptions .
train
false
36,127
def match_replace_regex(regex, src_namespace, dest_namespace): match = regex.match(src_namespace) if match: return dest_namespace.replace('*', match.group(1)) return None
[ "def", "match_replace_regex", "(", "regex", ",", "src_namespace", ",", "dest_namespace", ")", ":", "match", "=", "regex", ".", "match", "(", "src_namespace", ")", "if", "match", ":", "return", "dest_namespace", ".", "replace", "(", "'*'", ",", "match", ".", "group", "(", "1", ")", ")", "return", "None" ]
return the new mapped namespace if the src_namespace matches the regex .
train
true
36,128
def check_suggestion_permissions(request, mode, translation): if (mode in (u'accept', u'accept_edit')): if (not can_accept_suggestion(request.user, translation)): messages.error(request, _(u'You do not have privilege to accept suggestions!')) return False elif (mode == u'delete'): if (not can_delete_suggestion(request.user, translation)): messages.error(request, _(u'You do not have privilege to delete suggestions!')) return False elif (mode in (u'upvode', u'downvote')): if (not can_vote_suggestion(request.user, translation)): messages.error(request, _(u'You do not have privilege to vote for suggestions!')) return False return True
[ "def", "check_suggestion_permissions", "(", "request", ",", "mode", ",", "translation", ")", ":", "if", "(", "mode", "in", "(", "u'accept'", ",", "u'accept_edit'", ")", ")", ":", "if", "(", "not", "can_accept_suggestion", "(", "request", ".", "user", ",", "translation", ")", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "u'You do not have privilege to accept suggestions!'", ")", ")", "return", "False", "elif", "(", "mode", "==", "u'delete'", ")", ":", "if", "(", "not", "can_delete_suggestion", "(", "request", ".", "user", ",", "translation", ")", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "u'You do not have privilege to delete suggestions!'", ")", ")", "return", "False", "elif", "(", "mode", "in", "(", "u'upvode'", ",", "u'downvote'", ")", ")", ":", "if", "(", "not", "can_vote_suggestion", "(", "request", ".", "user", ",", "translation", ")", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "u'You do not have privilege to vote for suggestions!'", ")", ")", "return", "False", "return", "True" ]
checks permission for suggestion handling .
train
false
36,130
def verify_vat(vat_id, default_prefix=u''): vat_id = re.sub(u'\\s+', u'', vat_id.upper()) vat_id = vat_id.replace(u'-', u'') prefix = vat_id[:2] if (prefix not in PATTERNS): prefix = default_prefix spec = PATTERNS.get(prefix) if ((not spec) or (not prefix)): raise VatCannotIdentifyValidationError(u'VAT ID could not be identified') if (not vat_id.startswith(prefix)): vat_id = (prefix + vat_id) patterns = (spec.get(u'pattern') or []) if isinstance(patterns, six.string_types): patterns = [patterns] for pat in patterns: regexp = compile_pattern(prefix, pat) match = regexp.match(vat_id) if match: return (prefix, match.groups()) raise VatInvalidValidationError((u'VAT ID for %(country)s could not be validated' % spec))
[ "def", "verify_vat", "(", "vat_id", ",", "default_prefix", "=", "u''", ")", ":", "vat_id", "=", "re", ".", "sub", "(", "u'\\\\s+'", ",", "u''", ",", "vat_id", ".", "upper", "(", ")", ")", "vat_id", "=", "vat_id", ".", "replace", "(", "u'-'", ",", "u''", ")", "prefix", "=", "vat_id", "[", ":", "2", "]", "if", "(", "prefix", "not", "in", "PATTERNS", ")", ":", "prefix", "=", "default_prefix", "spec", "=", "PATTERNS", ".", "get", "(", "prefix", ")", "if", "(", "(", "not", "spec", ")", "or", "(", "not", "prefix", ")", ")", ":", "raise", "VatCannotIdentifyValidationError", "(", "u'VAT ID could not be identified'", ")", "if", "(", "not", "vat_id", ".", "startswith", "(", "prefix", ")", ")", ":", "vat_id", "=", "(", "prefix", "+", "vat_id", ")", "patterns", "=", "(", "spec", ".", "get", "(", "u'pattern'", ")", "or", "[", "]", ")", "if", "isinstance", "(", "patterns", ",", "six", ".", "string_types", ")", ":", "patterns", "=", "[", "patterns", "]", "for", "pat", "in", "patterns", ":", "regexp", "=", "compile_pattern", "(", "prefix", ",", "pat", ")", "match", "=", "regexp", ".", "match", "(", "vat_id", ")", "if", "match", ":", "return", "(", "prefix", ",", "match", ".", "groups", "(", ")", ")", "raise", "VatInvalidValidationError", "(", "(", "u'VAT ID for %(country)s could not be validated'", "%", "spec", ")", ")" ]
verify an eu vat id .
train
false
36,132
def create_cluster(**kwargs): cluster_name = kwargs.get('name') cluster_spec = kwargs.get('cluster_spec') datacenter = kwargs.get('datacenter') if (cluster_name is None): raise ValueError('Missing value for name.') if (datacenter is None): raise ValueError('Missing value for datacenter.') if (cluster_spec is None): cluster_spec = vim.cluster.ConfigSpecEx() host_folder = datacenter.hostFolder cluster = host_folder.CreateClusterEx(name=cluster_name, spec=cluster_spec) return cluster
[ "def", "create_cluster", "(", "**", "kwargs", ")", ":", "cluster_name", "=", "kwargs", ".", "get", "(", "'name'", ")", "cluster_spec", "=", "kwargs", ".", "get", "(", "'cluster_spec'", ")", "datacenter", "=", "kwargs", ".", "get", "(", "'datacenter'", ")", "if", "(", "cluster_name", "is", "None", ")", ":", "raise", "ValueError", "(", "'Missing value for name.'", ")", "if", "(", "datacenter", "is", "None", ")", ":", "raise", "ValueError", "(", "'Missing value for datacenter.'", ")", "if", "(", "cluster_spec", "is", "None", ")", ":", "cluster_spec", "=", "vim", ".", "cluster", ".", "ConfigSpecEx", "(", ")", "host_folder", "=", "datacenter", ".", "hostFolder", "cluster", "=", "host_folder", ".", "CreateClusterEx", "(", "name", "=", "cluster_name", ",", "spec", "=", "cluster_spec", ")", "return", "cluster" ]
method to create a cluster in vcenter .
train
false
36,133
def rs_client_noauth(h=client_context.host, p=client_context.port, **kwargs): return _mongo_client(h, p, authenticate=False, **kwargs)
[ "def", "rs_client_noauth", "(", "h", "=", "client_context", ".", "host", ",", "p", "=", "client_context", ".", "port", ",", "**", "kwargs", ")", ":", "return", "_mongo_client", "(", "h", ",", "p", ",", "authenticate", "=", "False", ",", "**", "kwargs", ")" ]
connect to the replica set .
train
false
36,136
def _pretty_frame_relation(frel): outstr = u'<{0.type.superFrameName}={0.superFrameName} -- {0.type.name} -> {0.type.subFrameName}={0.subFrameName}>'.format(frel) return outstr
[ "def", "_pretty_frame_relation", "(", "frel", ")", ":", "outstr", "=", "u'<{0.type.superFrameName}={0.superFrameName} -- {0.type.name} -> {0.type.subFrameName}={0.subFrameName}>'", ".", "format", "(", "frel", ")", "return", "outstr" ]
helper function for pretty-printing a frame relation .
train
false
36,137
def train_experiment(state, channel): yaml_template = state.yaml_template hyper_parameters = expand(flatten(state.hyper_parameters), dict_type=ydict) final_yaml_str = (yaml_template % hyper_parameters) train_obj = pylearn2.config.yaml_parse.load(final_yaml_str) try: iter(train_obj) iterable = True except TypeError: iterable = False if iterable: raise NotImplementedError('Current implementation does not support running multiple models in one yaml string. Please change the yaml template and parameters to contain only one single model.') else: train_obj.main_loop() state.results = jobman.tools.resolve(state.extract_results)(train_obj) return channel.COMPLETE
[ "def", "train_experiment", "(", "state", ",", "channel", ")", ":", "yaml_template", "=", "state", ".", "yaml_template", "hyper_parameters", "=", "expand", "(", "flatten", "(", "state", ".", "hyper_parameters", ")", ",", "dict_type", "=", "ydict", ")", "final_yaml_str", "=", "(", "yaml_template", "%", "hyper_parameters", ")", "train_obj", "=", "pylearn2", ".", "config", ".", "yaml_parse", ".", "load", "(", "final_yaml_str", ")", "try", ":", "iter", "(", "train_obj", ")", "iterable", "=", "True", "except", "TypeError", ":", "iterable", "=", "False", "if", "iterable", ":", "raise", "NotImplementedError", "(", "'Current implementation does not support running multiple models in one yaml string. Please change the yaml template and parameters to contain only one single model.'", ")", "else", ":", "train_obj", ".", "main_loop", "(", ")", "state", ".", "results", "=", "jobman", ".", "tools", ".", "resolve", "(", "state", ".", "extract_results", ")", "(", "train_obj", ")", "return", "channel", ".", "COMPLETE" ]
train a model specified in state .
train
false
36,138
@verbose def search_demo(keywords='nltk'): oauth = credsfromfile() client = Query(**oauth) for tweet in client.search_tweets(keywords=keywords, limit=10): print(tweet['text'])
[ "@", "verbose", "def", "search_demo", "(", "keywords", "=", "'nltk'", ")", ":", "oauth", "=", "credsfromfile", "(", ")", "client", "=", "Query", "(", "**", "oauth", ")", "for", "tweet", "in", "client", ".", "search_tweets", "(", "keywords", "=", "keywords", ",", "limit", "=", "10", ")", ":", "print", "(", "tweet", "[", "'text'", "]", ")" ]
use the rest api to search for past tweets containing a given keyword .
train
false
36,140
def snapshot_metadata_update(context, snapshot_id, metadata, delete): IMPL.snapshot_metadata_update(context, snapshot_id, metadata, delete)
[ "def", "snapshot_metadata_update", "(", "context", ",", "snapshot_id", ",", "metadata", ",", "delete", ")", ":", "IMPL", ".", "snapshot_metadata_update", "(", "context", ",", "snapshot_id", ",", "metadata", ",", "delete", ")" ]
update metadata if it exists .
train
false
36,141
def get_all_param_values(layer, **tags): params = get_all_params(layer, **tags) return [p.get_value() for p in params]
[ "def", "get_all_param_values", "(", "layer", ",", "**", "tags", ")", ":", "params", "=", "get_all_params", "(", "layer", ",", "**", "tags", ")", "return", "[", "p", ".", "get_value", "(", ")", "for", "p", "in", "params", "]" ]
this function returns the values of the parameters of all layers below one or more given :class:layer instances .
train
false
36,142
def ptmsiReallocationComplete(): a = TpPd(pd=3) b = MessageType(mesType=17) packet = (a / b) return packet
[ "def", "ptmsiReallocationComplete", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "17", ")", "packet", "=", "(", "a", "/", "b", ")", "return", "packet" ]
p-tmsi reallocation complete section 9 .
train
true
36,143
def writeServerPassword(password): assert (len(password) == const.SHARED_SECRET_LENGTH) assert (const.STATE_LOCATION != '') passwordFile = os.path.join(const.STATE_LOCATION, const.PASSWORD_FILE) log.info(("Writing server password to file `%s'." % passwordFile)) password_str = '# You are supposed to give this password to your clients to append it to their Bridge line' password_str = '# For example: Bridge scramblesuit 192.0.2.1:5555 EXAMPLEFINGERPRINTNOTREAL password=EXAMPLEPASSWORDNOTREAL' password_str = '# Here is your password:' password_str = ('password=%s\n' % base64.b32encode(password)) try: with open(passwordFile, 'w') as fd: fd.write(password_str) except IOError as err: log.error(("Error writing password file to `%s': %s" % (passwordFile, err)))
[ "def", "writeServerPassword", "(", "password", ")", ":", "assert", "(", "len", "(", "password", ")", "==", "const", ".", "SHARED_SECRET_LENGTH", ")", "assert", "(", "const", ".", "STATE_LOCATION", "!=", "''", ")", "passwordFile", "=", "os", ".", "path", ".", "join", "(", "const", ".", "STATE_LOCATION", ",", "const", ".", "PASSWORD_FILE", ")", "log", ".", "info", "(", "(", "\"Writing server password to file `%s'.\"", "%", "passwordFile", ")", ")", "password_str", "=", "'# You are supposed to give this password to your clients to append it to their Bridge line'", "password_str", "=", "'# For example: Bridge scramblesuit 192.0.2.1:5555 EXAMPLEFINGERPRINTNOTREAL password=EXAMPLEPASSWORDNOTREAL'", "password_str", "=", "'# Here is your password:'", "password_str", "=", "(", "'password=%s\\n'", "%", "base64", ".", "b32encode", "(", "password", ")", ")", "try", ":", "with", "open", "(", "passwordFile", ",", "'w'", ")", "as", "fd", ":", "fd", ".", "write", "(", "password_str", ")", "except", "IOError", "as", "err", ":", "log", ".", "error", "(", "(", "\"Error writing password file to `%s': %s\"", "%", "(", "passwordFile", ",", "err", ")", ")", ")" ]
dump our scramblesuit server descriptor to file .
train
false
36,144
def security_group_rule_get_by_security_group(context, security_group_id): return IMPL.security_group_rule_get_by_security_group(context, security_group_id)
[ "def", "security_group_rule_get_by_security_group", "(", "context", ",", "security_group_id", ")", ":", "return", "IMPL", ".", "security_group_rule_get_by_security_group", "(", "context", ",", "security_group_id", ")" ]
get all rules for a given security group .
train
false
36,145
@handle_response_format @treeio_login_required @_process_mass_form def index_weblinks(request, response_format='html'): if request.GET: query = _get_direct_filter_query(request.GET) links = Object.filter_by_request(request, WebLink.objects.filter(query).order_by('-last_updated')) else: links = Object.filter_by_request(request, WebLink.objects.order_by('-last_updated')) filters = FilterForm(request.user.profile, 'title', request.GET) context = _get_default_context(request) context.update({'filters': filters, 'links': links}) return render_to_response('documents/index_weblinks', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "_process_mass_form", "def", "index_weblinks", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "request", ".", "GET", ":", "query", "=", "_get_direct_filter_query", "(", "request", ".", "GET", ")", "links", "=", "Object", ".", "filter_by_request", "(", "request", ",", "WebLink", ".", "objects", ".", "filter", "(", "query", ")", ".", "order_by", "(", "'-last_updated'", ")", ")", "else", ":", "links", "=", "Object", ".", "filter_by_request", "(", "request", ",", "WebLink", ".", "objects", ".", "order_by", "(", "'-last_updated'", ")", ")", "filters", "=", "FilterForm", "(", "request", ".", "user", ".", "profile", ",", "'title'", ",", "request", ".", "GET", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'filters'", ":", "filters", ",", "'links'", ":", "links", "}", ")", "return", "render_to_response", "(", "'documents/index_weblinks'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
index_weblinks page: displays all weblinks .
train
false
36,147
def test_float_range_3(): try: float_range('foobar') assert False except Exception: pass
[ "def", "test_float_range_3", "(", ")", ":", "try", ":", "float_range", "(", "'foobar'", ")", "assert", "False", "except", "Exception", ":", "pass" ]
assert that the tpot cli interfaces float range throws an exception when input is not a float .
train
false
36,148
def test_step_has_repr(): step = Step.from_string(I_HAVE_TASTY_BEVERAGES) assert_equals(repr(step), (('<Step: "' + string.split(I_HAVE_TASTY_BEVERAGES, '\n')[0]) + '">'))
[ "def", "test_step_has_repr", "(", ")", ":", "step", "=", "Step", ".", "from_string", "(", "I_HAVE_TASTY_BEVERAGES", ")", "assert_equals", "(", "repr", "(", "step", ")", ",", "(", "(", "'<Step: \"'", "+", "string", ".", "split", "(", "I_HAVE_TASTY_BEVERAGES", ",", "'\\n'", ")", "[", "0", "]", ")", "+", "'\">'", ")", ")" ]
step implements __repr__ nicely .
train
false
36,149
def budget_bundle(): s3.prep = (lambda r: (r.representation == 's3json')) return s3_rest_controller()
[ "def", "budget_bundle", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", "return", "s3_rest_controller", "(", ")" ]
rest controller to retrieve budget_budget_bundle field options .
train
false
36,150
def transaction_objects(): from inbox.models import Calendar, Contact, Message, Event, Block, Category, Thread, Metadata return {'calendar': Calendar, 'contact': Contact, 'draft': Message, 'event': Event, 'file': Block, 'message': Message, 'thread': Thread, 'label': Category, 'folder': Category, 'account': Account, 'metadata': Metadata}
[ "def", "transaction_objects", "(", ")", ":", "from", "inbox", ".", "models", "import", "Calendar", ",", "Contact", ",", "Message", ",", "Event", ",", "Block", ",", "Category", ",", "Thread", ",", "Metadata", "return", "{", "'calendar'", ":", "Calendar", ",", "'contact'", ":", "Contact", ",", "'draft'", ":", "Message", ",", "'event'", ":", "Event", ",", "'file'", ":", "Block", ",", "'message'", ":", "Message", ",", "'thread'", ":", "Thread", ",", "'label'", ":", "Category", ",", "'folder'", ":", "Category", ",", "'account'", ":", "Account", ",", "'metadata'", ":", "Metadata", "}" ]
return the mapping from api object name - which becomes the transaction .
train
false
36,151
def get_package_data(): package_data = {} package_data['jupyterhub'] = ['alembic.ini', 'alembic/*', 'alembic/versions/*'] return package_data
[ "def", "get_package_data", "(", ")", ":", "package_data", "=", "{", "}", "package_data", "[", "'jupyterhub'", "]", "=", "[", "'alembic.ini'", ",", "'alembic/*'", ",", "'alembic/versions/*'", "]", "return", "package_data" ]
return data files for package *name* with extensions in *extlist* .
train
false
36,152
def dict_to_tuple_key(dictionary): return tuple(sorted(dictionary.items()))
[ "def", "dict_to_tuple_key", "(", "dictionary", ")", ":", "return", "tuple", "(", "sorted", "(", "dictionary", ".", "items", "(", ")", ")", ")" ]
converts a dictionary to a tuple that can be used as an immutable key .
train
false
36,154
def dt_links(reporter, row=1, tableID='datatable', quiet=True): config = current.test_config browser = config.browser links = [] column = 1 while True: td = (".//*[@id='%s']/tbody/tr[%s]/td[%s]" % (tableID, row, column)) try: elem = browser.find_element_by_xpath(td) except: break cnt = 1 while True: link = (".//*[@id='%s']/tbody/tr[%s]/td[%s]/a[%s]" % (tableID, row, column, cnt)) try: elem = browser.find_element_by_xpath(link) except: break cnt += 1 if (not quiet): reporter(('%2d) %s' % (column, elem.text))) links.append([column, elem.text]) column += 1 return links
[ "def", "dt_links", "(", "reporter", ",", "row", "=", "1", ",", "tableID", "=", "'datatable'", ",", "quiet", "=", "True", ")", ":", "config", "=", "current", ".", "test_config", "browser", "=", "config", ".", "browser", "links", "=", "[", "]", "column", "=", "1", "while", "True", ":", "td", "=", "(", "\".//*[@id='%s']/tbody/tr[%s]/td[%s]\"", "%", "(", "tableID", ",", "row", ",", "column", ")", ")", "try", ":", "elem", "=", "browser", ".", "find_element_by_xpath", "(", "td", ")", "except", ":", "break", "cnt", "=", "1", "while", "True", ":", "link", "=", "(", "\".//*[@id='%s']/tbody/tr[%s]/td[%s]/a[%s]\"", "%", "(", "tableID", ",", "row", ",", "column", ",", "cnt", ")", ")", "try", ":", "elem", "=", "browser", ".", "find_element_by_xpath", "(", "link", ")", "except", ":", "break", "cnt", "+=", "1", "if", "(", "not", "quiet", ")", ":", "reporter", "(", "(", "'%2d) %s'", "%", "(", "column", ",", "elem", ".", "text", ")", ")", ")", "links", ".", "append", "(", "[", "column", ",", "elem", ".", "text", "]", ")", "column", "+=", "1", "return", "links" ]
returns a list of links in the given row of the datatable .
train
false
36,156
def _table_attrs(table): cmd = 'osqueryi --json "pragma table_info({0})"'.format(table) res = __salt__['cmd.run_all'](cmd) if (res['retcode'] == 0): attrs = [] text = json.loads(res['stdout']) for item in text: attrs.append(item['name']) return attrs return False
[ "def", "_table_attrs", "(", "table", ")", ":", "cmd", "=", "'osqueryi --json \"pragma table_info({0})\"'", ".", "format", "(", "table", ")", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "if", "(", "res", "[", "'retcode'", "]", "==", "0", ")", ":", "attrs", "=", "[", "]", "text", "=", "json", ".", "loads", "(", "res", "[", "'stdout'", "]", ")", "for", "item", "in", "text", ":", "attrs", ".", "append", "(", "item", "[", "'name'", "]", ")", "return", "attrs", "return", "False" ]
helper function to find valid table attributes .
train
true
36,157
def this_is_a_testdir(dirpath): return TESTDIR_RE.match(osp.basename(dirpath))
[ "def", "this_is_a_testdir", "(", "dirpath", ")", ":", "return", "TESTDIR_RE", ".", "match", "(", "osp", ".", "basename", "(", "dirpath", ")", ")" ]
returns true if filename seems to be a test directory .
train
false
36,160
def pam_auth(): def pam_auth_aux(username, password): return authenticate(username, password) return pam_auth_aux
[ "def", "pam_auth", "(", ")", ":", "def", "pam_auth_aux", "(", "username", ",", "password", ")", ":", "return", "authenticate", "(", "username", ",", "password", ")", "return", "pam_auth_aux" ]
to use pam_login: from gluon .
train
false
36,164
def scalar_inc_dec(word, valence, is_cap_diff): scalar = 0.0 word_lower = word.lower() if (word_lower in BOOSTER_DICT): scalar = BOOSTER_DICT[word_lower] if (valence < 0): scalar *= (-1) if (word.isupper() and is_cap_diff): if (valence > 0): scalar += C_INCR else: scalar -= C_INCR return scalar
[ "def", "scalar_inc_dec", "(", "word", ",", "valence", ",", "is_cap_diff", ")", ":", "scalar", "=", "0.0", "word_lower", "=", "word", ".", "lower", "(", ")", "if", "(", "word_lower", "in", "BOOSTER_DICT", ")", ":", "scalar", "=", "BOOSTER_DICT", "[", "word_lower", "]", "if", "(", "valence", "<", "0", ")", ":", "scalar", "*=", "(", "-", "1", ")", "if", "(", "word", ".", "isupper", "(", ")", "and", "is_cap_diff", ")", ":", "if", "(", "valence", ">", "0", ")", ":", "scalar", "+=", "C_INCR", "else", ":", "scalar", "-=", "C_INCR", "return", "scalar" ]
check if the preceding words increase .
train
true
36,165
def get_blobstore(layout): if layout.is_s3: from wal_e.blobstore import s3 blobstore = s3 elif layout.is_wabs: from wal_e.blobstore import wabs blobstore = wabs elif layout.is_swift: from wal_e.blobstore import swift blobstore = swift elif layout.is_gs: from wal_e.blobstore import gs blobstore = gs return blobstore
[ "def", "get_blobstore", "(", "layout", ")", ":", "if", "layout", ".", "is_s3", ":", "from", "wal_e", ".", "blobstore", "import", "s3", "blobstore", "=", "s3", "elif", "layout", ".", "is_wabs", ":", "from", "wal_e", ".", "blobstore", "import", "wabs", "blobstore", "=", "wabs", "elif", "layout", ".", "is_swift", ":", "from", "wal_e", ".", "blobstore", "import", "swift", "blobstore", "=", "swift", "elif", "layout", ".", "is_gs", ":", "from", "wal_e", ".", "blobstore", "import", "gs", "blobstore", "=", "gs", "return", "blobstore" ]
return blobstore instance for a given storage layout args: layout : target storage layout .
train
true
36,166
def create_custom_metric(client, project_id, custom_metric_type, metric_kind): metrics_descriptor = {'type': custom_metric_type, 'labels': [{'key': 'environment', 'valueType': 'STRING', 'description': 'An arbitrary measurement'}], 'metricKind': metric_kind, 'valueType': 'INT64', 'unit': 'items', 'description': 'An arbitrary measurement.', 'displayName': 'Custom Metric'} client.projects().metricDescriptors().create(name=project_id, body=metrics_descriptor).execute()
[ "def", "create_custom_metric", "(", "client", ",", "project_id", ",", "custom_metric_type", ",", "metric_kind", ")", ":", "metrics_descriptor", "=", "{", "'type'", ":", "custom_metric_type", ",", "'labels'", ":", "[", "{", "'key'", ":", "'environment'", ",", "'valueType'", ":", "'STRING'", ",", "'description'", ":", "'An arbitrary measurement'", "}", "]", ",", "'metricKind'", ":", "metric_kind", ",", "'valueType'", ":", "'INT64'", ",", "'unit'", ":", "'items'", ",", "'description'", ":", "'An arbitrary measurement.'", ",", "'displayName'", ":", "'Custom Metric'", "}", "client", ".", "projects", "(", ")", ".", "metricDescriptors", "(", ")", ".", "create", "(", "name", "=", "project_id", ",", "body", "=", "metrics_descriptor", ")", ".", "execute", "(", ")" ]
create custom metric descriptor .
train
false
36,167
def register_shape_c_code(type, code, version=()): Shape.c_code_and_version[type] = (code, version)
[ "def", "register_shape_c_code", "(", "type", ",", "code", ",", "version", "=", "(", ")", ")", ":", "Shape", ".", "c_code_and_version", "[", "type", "]", "=", "(", "code", ",", "version", ")" ]
tell shape op how to generate c code for a theano type .
train
false
36,168
def arr_to_chars(arr): dims = list(arr.shape) if (not dims): dims = [1] dims.append(int(arr.dtype.str[2:])) arr = np.ndarray(shape=dims, dtype=arr_dtype_number(arr, 1), buffer=arr) empties = [(arr == '')] if (not np.any(empties)): return arr arr = arr.copy() arr[empties] = ' ' return arr
[ "def", "arr_to_chars", "(", "arr", ")", ":", "dims", "=", "list", "(", "arr", ".", "shape", ")", "if", "(", "not", "dims", ")", ":", "dims", "=", "[", "1", "]", "dims", ".", "append", "(", "int", "(", "arr", ".", "dtype", ".", "str", "[", "2", ":", "]", ")", ")", "arr", "=", "np", ".", "ndarray", "(", "shape", "=", "dims", ",", "dtype", "=", "arr_dtype_number", "(", "arr", ",", "1", ")", ",", "buffer", "=", "arr", ")", "empties", "=", "[", "(", "arr", "==", "''", ")", "]", "if", "(", "not", "np", ".", "any", "(", "empties", ")", ")", ":", "return", "arr", "arr", "=", "arr", ".", "copy", "(", ")", "arr", "[", "empties", "]", "=", "' '", "return", "arr" ]
convert string array to char array .
train
false
36,169
def agent_build_update(context, agent_build_id, values): IMPL.agent_build_update(context, agent_build_id, values)
[ "def", "agent_build_update", "(", "context", ",", "agent_build_id", ",", "values", ")", ":", "IMPL", ".", "agent_build_update", "(", "context", ",", "agent_build_id", ",", "values", ")" ]
update agent build entry .
train
false
36,170
@pytest.mark.django_db def test_order_package_parent_links(): source = get_order_source_with_a_package() order = OrderCreator().create_order(source) lines = [prettify_order_line(line) for line in order.lines.all()] assert (lines == [u'#0 10 x PackageParent', u'#1 10 x PackageChild-0, child of #0', u'#2 20 x PackageChild-1, child of #0', u'#3 30 x PackageChild-2, child of #0', u'#4 40 x PackageChild-3, child of #0'])
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_order_package_parent_links", "(", ")", ":", "source", "=", "get_order_source_with_a_package", "(", ")", "order", "=", "OrderCreator", "(", ")", ".", "create_order", "(", "source", ")", "lines", "=", "[", "prettify_order_line", "(", "line", ")", "for", "line", "in", "order", ".", "lines", ".", "all", "(", ")", "]", "assert", "(", "lines", "==", "[", "u'#0 10 x PackageParent'", ",", "u'#1 10 x PackageChild-0, child of #0'", ",", "u'#2 20 x PackageChild-1, child of #0'", ",", "u'#3 30 x PackageChild-2, child of #0'", ",", "u'#4 40 x PackageChild-3, child of #0'", "]", ")" ]
test ordercreator creates package parent links for child lines .
train
false
36,171
def get_email_date(): (day, month, dayno, hms, year) = time.asctime(time.gmtime()).split() return ('%s, %s %s %s %s +0000' % (day, dayno, month, year, hms))
[ "def", "get_email_date", "(", ")", ":", "(", "day", ",", "month", ",", "dayno", ",", "hms", ",", "year", ")", "=", "time", ".", "asctime", "(", "time", ".", "gmtime", "(", ")", ")", ".", "split", "(", ")", "return", "(", "'%s, %s %s %s %s +0000'", "%", "(", "day", ",", "dayno", ",", "month", ",", "year", ",", "hms", ")", ")" ]
return un-localized date string for the date: field .
train
false
36,173
def population(): output = s3_rest_controller() return output
[ "def", "population", "(", ")", ":", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
restful controller .
train
false
36,178
def gf_add_ground(f, a, p, K): if (not f): a = (a % p) else: a = ((f[(-1)] + a) % p) if (len(f) > 1): return (f[:(-1)] + [a]) if (not a): return [] else: return [a]
[ "def", "gf_add_ground", "(", "f", ",", "a", ",", "p", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "a", "=", "(", "a", "%", "p", ")", "else", ":", "a", "=", "(", "(", "f", "[", "(", "-", "1", ")", "]", "+", "a", ")", "%", "p", ")", "if", "(", "len", "(", "f", ")", ">", "1", ")", ":", "return", "(", "f", "[", ":", "(", "-", "1", ")", "]", "+", "[", "a", "]", ")", "if", "(", "not", "a", ")", ":", "return", "[", "]", "else", ":", "return", "[", "a", "]" ]
compute f + a where f in gf(p)[x] and a in gf(p) .
train
false
36,180
def _canonical_unit_name(name): if (not isinstance(name, six.string_types)): name = str(name) if any((name.endswith(suffix) for suffix in VALID_UNIT_TYPES)): return name return ('%s.service' % name)
[ "def", "_canonical_unit_name", "(", "name", ")", ":", "if", "(", "not", "isinstance", "(", "name", ",", "six", ".", "string_types", ")", ")", ":", "name", "=", "str", "(", "name", ")", "if", "any", "(", "(", "name", ".", "endswith", "(", "suffix", ")", "for", "suffix", "in", "VALID_UNIT_TYPES", ")", ")", ":", "return", "name", "return", "(", "'%s.service'", "%", "name", ")" ]
build a canonical unit name treating unit names without one of the valid suffixes as a service .
train
true
36,181
def restore_apps(storage, bucket): return BR.app_restore(storage, bucket_name=bucket)
[ "def", "restore_apps", "(", "storage", ",", "bucket", ")", ":", "return", "BR", ".", "app_restore", "(", "storage", ",", "bucket_name", "=", "bucket", ")" ]
triggers a restore of apps for the current deployment .
train
false
36,182
def top(**kwargs): if ('id' not in kwargs['opts']): return {} cmd = '{0} {1}'.format(__opts__['master_tops']['ext_nodes'], kwargs['opts']['id']) ndata = yaml.safe_load(subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).communicate()[0]) if (not ndata): log.info('master_tops ext_nodes call did not return any data') ret = {} if ('environment' in ndata): env = ndata['environment'] else: env = 'base' if ('classes' in ndata): if isinstance(ndata['classes'], dict): ret[env] = list(ndata['classes']) elif isinstance(ndata['classes'], list): ret[env] = ndata['classes'] else: return ret else: log.info('master_tops ext_nodes call did not have a dictionary with a "classes" key.') return ret
[ "def", "top", "(", "**", "kwargs", ")", ":", "if", "(", "'id'", "not", "in", "kwargs", "[", "'opts'", "]", ")", ":", "return", "{", "}", "cmd", "=", "'{0} {1}'", ".", "format", "(", "__opts__", "[", "'master_tops'", "]", "[", "'ext_nodes'", "]", ",", "kwargs", "[", "'opts'", "]", "[", "'id'", "]", ")", "ndata", "=", "yaml", ".", "safe_load", "(", "subprocess", ".", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", ".", "communicate", "(", ")", "[", "0", "]", ")", "if", "(", "not", "ndata", ")", ":", "log", ".", "info", "(", "'master_tops ext_nodes call did not return any data'", ")", "ret", "=", "{", "}", "if", "(", "'environment'", "in", "ndata", ")", ":", "env", "=", "ndata", "[", "'environment'", "]", "else", ":", "env", "=", "'base'", "if", "(", "'classes'", "in", "ndata", ")", ":", "if", "isinstance", "(", "ndata", "[", "'classes'", "]", ",", "dict", ")", ":", "ret", "[", "env", "]", "=", "list", "(", "ndata", "[", "'classes'", "]", ")", "elif", "isinstance", "(", "ndata", "[", "'classes'", "]", ",", "list", ")", ":", "ret", "[", "env", "]", "=", "ndata", "[", "'classes'", "]", "else", ":", "return", "ret", "else", ":", "log", ".", "info", "(", "'master_tops ext_nodes call did not have a dictionary with a \"classes\" key.'", ")", "return", "ret" ]
runs the docker top command on a specific container name container name or id cli example: **return data** a list of dictionaries containing information about each process .
train
true
36,183
def make_scalar(): raise NotImplementedError('TODO: implement this function.')
[ "def", "make_scalar", "(", ")", ":", "raise", "NotImplementedError", "(", "'TODO: implement this function.'", ")" ]
returns a new theano scalar .
train
false
36,186
def is_file_old(db_file_name, older_than=24): if os.path.isfile(db_file_name): from datetime import timedelta file_datetime = datetime.fromtimestamp(os.stat(db_file_name).st_ctime) if ((datetime.today() - file_datetime) >= timedelta(hours=older_than)): if verbose: print u'File is old' return True else: if verbose: print u'File is recent' return False else: if verbose: print u'File does not exist' return True
[ "def", "is_file_old", "(", "db_file_name", ",", "older_than", "=", "24", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "db_file_name", ")", ":", "from", "datetime", "import", "timedelta", "file_datetime", "=", "datetime", ".", "fromtimestamp", "(", "os", ".", "stat", "(", "db_file_name", ")", ".", "st_ctime", ")", "if", "(", "(", "datetime", ".", "today", "(", ")", "-", "file_datetime", ")", ">=", "timedelta", "(", "hours", "=", "older_than", ")", ")", ":", "if", "verbose", ":", "print", "u'File is old'", "return", "True", "else", ":", "if", "verbose", ":", "print", "u'File is recent'", "return", "False", "else", ":", "if", "verbose", ":", "print", "u'File does not exist'", "return", "True" ]
checks if file exists and is older than specified hours returns -> true: file does not exist or file is old false: file is new .
train
false
36,189
def check_layer(uploaded): msg = ('Was expecting layer object, got %s' % type(uploaded)) assert isinstance(uploaded, Layer), msg msg = ('The layer does not have a valid name: %s' % uploaded.name) assert (len(uploaded.name) > 0), msg
[ "def", "check_layer", "(", "uploaded", ")", ":", "msg", "=", "(", "'Was expecting layer object, got %s'", "%", "type", "(", "uploaded", ")", ")", "assert", "isinstance", "(", "uploaded", ",", "Layer", ")", ",", "msg", "msg", "=", "(", "'The layer does not have a valid name: %s'", "%", "uploaded", ".", "name", ")", "assert", "(", "len", "(", "uploaded", ".", "name", ")", ">", "0", ")", ",", "msg" ]
verify if an object is a valid layer .
train
false
36,190
def prefetch_modeladmin_get_queryset(modeladmin, *lookups): def do_wrap(f): @wraps(f) def wrapper(request, *args, **kwargs): qs = f(request, *args, **kwargs) qs = qs.prefetch_related(*lookups) return qs return wrapper modeladmin.get_queryset = do_wrap(modeladmin.get_queryset)
[ "def", "prefetch_modeladmin_get_queryset", "(", "modeladmin", ",", "*", "lookups", ")", ":", "def", "do_wrap", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "qs", "=", "f", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "qs", "=", "qs", ".", "prefetch_related", "(", "*", "lookups", ")", "return", "qs", "return", "wrapper", "modeladmin", ".", "get_queryset", "=", "do_wrap", "(", "modeladmin", ".", "get_queryset", ")" ]
wraps default modeladmin get_queryset to prefetch related lookups .
train
false
36,192
def reduce_order(func): (nap, nbq, operators) = _reduce_order(func.ap, func.bq, ReduceOrder, default_sort_key) return (Hyper_Function(Tuple(*nap), Tuple(*nbq)), operators)
[ "def", "reduce_order", "(", "func", ")", ":", "(", "nap", ",", "nbq", ",", "operators", ")", "=", "_reduce_order", "(", "func", ".", "ap", ",", "func", ".", "bq", ",", "ReduceOrder", ",", "default_sort_key", ")", "return", "(", "Hyper_Function", "(", "Tuple", "(", "*", "nap", ")", ",", "Tuple", "(", "*", "nbq", ")", ")", ",", "operators", ")" ]
given the hypergeometric function func .
train
false
36,194
def fix_IE_for_attach(request, response): useragent = request.META.get('HTTP_USER_AGENT', '').upper() if (('MSIE' not in useragent) and ('CHROMEFRAME' not in useragent)): return response offending_headers = ('no-cache', 'no-store') if response.has_header('Content-Disposition'): try: del response['Pragma'] except KeyError: pass if response.has_header('Cache-Control'): cache_control_values = [value.strip() for value in response['Cache-Control'].split(',') if (value.strip().lower() not in offending_headers)] if (not len(cache_control_values)): del response['Cache-Control'] else: response['Cache-Control'] = ', '.join(cache_control_values) return response
[ "def", "fix_IE_for_attach", "(", "request", ",", "response", ")", ":", "useragent", "=", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "''", ")", ".", "upper", "(", ")", "if", "(", "(", "'MSIE'", "not", "in", "useragent", ")", "and", "(", "'CHROMEFRAME'", "not", "in", "useragent", ")", ")", ":", "return", "response", "offending_headers", "=", "(", "'no-cache'", ",", "'no-store'", ")", "if", "response", ".", "has_header", "(", "'Content-Disposition'", ")", ":", "try", ":", "del", "response", "[", "'Pragma'", "]", "except", "KeyError", ":", "pass", "if", "response", ".", "has_header", "(", "'Cache-Control'", ")", ":", "cache_control_values", "=", "[", "value", ".", "strip", "(", ")", "for", "value", "in", "response", "[", "'Cache-Control'", "]", ".", "split", "(", "','", ")", "if", "(", "value", ".", "strip", "(", ")", ".", "lower", "(", ")", "not", "in", "offending_headers", ")", "]", "if", "(", "not", "len", "(", "cache_control_values", ")", ")", ":", "del", "response", "[", "'Cache-Control'", "]", "else", ":", "response", "[", "'Cache-Control'", "]", "=", "', '", ".", "join", "(", "cache_control_values", ")", "return", "response" ]
this function will prevent django from serving a content-disposition header while expecting the browser to cache it .
train
false
36,196
def _get_epochs(): raw = read_raw_fif(raw_fname) raw.add_proj([], remove_existing=True) events = read_events(event_name) picks = _get_picks(raw) picks = picks[np.round(np.linspace(0, (len(picks) - 1), n_chan)).astype(int)] picks = np.concatenate([[2, 3, 4, 6, 7], picks]) epochs = Epochs(raw, events[:5], event_id, tmin, tmax, picks=picks) epochs.info['bads'] = [epochs.ch_names[(-1)]] return epochs
[ "def", "_get_epochs", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", "raw", ".", "add_proj", "(", "[", "]", ",", "remove_existing", "=", "True", ")", "events", "=", "read_events", "(", "event_name", ")", "picks", "=", "_get_picks", "(", "raw", ")", "picks", "=", "picks", "[", "np", ".", "round", "(", "np", ".", "linspace", "(", "0", ",", "(", "len", "(", "picks", ")", "-", "1", ")", ",", "n_chan", ")", ")", ".", "astype", "(", "int", ")", "]", "picks", "=", "np", ".", "concatenate", "(", "[", "[", "2", ",", "3", ",", "4", ",", "6", ",", "7", "]", ",", "picks", "]", ")", "epochs", "=", "Epochs", "(", "raw", ",", "events", "[", ":", "5", "]", ",", "event_id", ",", "tmin", ",", "tmax", ",", "picks", "=", "picks", ")", "epochs", ".", "info", "[", "'bads'", "]", "=", "[", "epochs", ".", "ch_names", "[", "(", "-", "1", ")", "]", "]", "return", "epochs" ]
get epochs .
train
false
36,197
def get_elevation_value(label): name = 'elevation' _check_is_integral(name, label) _check_range(name, label, (-1), 8) if (label == (-1)): return None else: return ((label * 5) + 30)
[ "def", "get_elevation_value", "(", "label", ")", ":", "name", "=", "'elevation'", "_check_is_integral", "(", "name", ",", "label", ")", "_check_range", "(", "name", ",", "label", ",", "(", "-", "1", ")", ",", "8", ")", "if", "(", "label", "==", "(", "-", "1", ")", ")", ":", "return", "None", "else", ":", "return", "(", "(", "label", "*", "5", ")", "+", "30", ")" ]
returns the angle in degrees represented by a elevation label int .
train
false
36,202
def ExpandMacros(string, expansions): if ('$' in string): for (old, new) in expansions.iteritems(): assert ('$(' not in new), new string = string.replace(old, new) return string
[ "def", "ExpandMacros", "(", "string", ",", "expansions", ")", ":", "if", "(", "'$'", "in", "string", ")", ":", "for", "(", "old", ",", "new", ")", "in", "expansions", ".", "iteritems", "(", ")", ":", "assert", "(", "'$('", "not", "in", "new", ")", ",", "new", "string", "=", "string", ".", "replace", "(", "old", ",", "new", ")", "return", "string" ]
expand $ per expansions dict .
train
false
36,203
def maybe_delivery_mode(v, modes=DELIVERY_MODES, default=PERSISTENT_DELIVERY_MODE): if v: return (v if isinstance(v, numbers.Integral) else modes[v]) return default
[ "def", "maybe_delivery_mode", "(", "v", ",", "modes", "=", "DELIVERY_MODES", ",", "default", "=", "PERSISTENT_DELIVERY_MODE", ")", ":", "if", "v", ":", "return", "(", "v", "if", "isinstance", "(", "v", ",", "numbers", ".", "Integral", ")", "else", "modes", "[", "v", "]", ")", "return", "default" ]
get delivery mode by name .
train
false
36,204
def _create_image(latex_program, latex_document, base_name, color, **kwargs): rel_source_path = (base_name + '.tex') pdf_path = os.path.join(temp_path, (base_name + '.pdf')) image_path = os.path.join(temp_path, (base_name + _IMAGE_EXTENSION)) if os.path.exists(pdf_path): return source_path = os.path.join(temp_path, rel_source_path) with open(source_path, 'w', encoding='utf-8') as f: f.write(latex_document) execute_command([latex_program, '-interaction=nonstopmode', rel_source_path], cwd=temp_path) pdf_exists = os.path.exists(pdf_path) if (not pdf_exists): dvi_path = os.path.join(temp_path, (base_name + '.dvi')) if os.path.exists(dvi_path): pdf_path = dvi_path pdf_exists = True if pdf_exists: (rc, _, output) = run_ghostscript_command(['-sDEVICE=bbox', '-r72', '-dLastPage=1', pdf_path], stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) if (rc == 0): try: bbox = [int(x) for x in output.splitlines()[0].lstrip('%%BoundingBox: ').split()] except ValueError: bbox = None else: bbox = None scale_factor = (8 if _hires else 1) command = ['-sDEVICE=pngalpha', '-dLastPage=1', '-sOutputFile={image_path}'.format(image_path=image_path), '-r{density}'.format(density=(_density * scale_factor)), '-dDownScaleFactor={0}'.format(scale_factor), '-dTextAlphaBits=4', '-dGraphicsAlphaBits=4'] if bbox: width = round((((((bbox[2] - bbox[0]) + 4) * _density) * scale_factor) / 72)) height = round((((((bbox[3] - bbox[1]) + 4) * _density) * scale_factor) / 72)) command.extend(['-g{width}x{height}'.format(**locals()), '-c', '<</Install {{{0} {1} translate}}>> setpagedevice'.format(((-1) * (bbox[0] - 2)), ((-1) * (bbox[1] - 2))), '-f']) command.append(pdf_path) run_ghostscript_command(command) err_file_path = (image_path + _ERROR_EXTENSION) err_log = [] if (not pdf_exists): err_log.append("Failed to run '{latex_program}' to create pdf to preview.".format(**locals())) err_log.append('') err_log.append('') log_file = os.path.join(temp_path, (base_name + '.log')) log_exists = os.path.exists(log_file) if (not log_exists): err_log.append('No log file found.') else: with open(log_file, 'rb') as f: log_data = f.read() try: (errors, warnings, _) = parse_tex_log(log_data, temp_path) except: err_log.append('Error while parsing log file.') errors = warnings = [] if errors: err_log.append('Errors:') err_log.extend(errors) if warnings: err_log.append('Warnings:') err_log.extend(warnings) err_log.append('') err_log.append('LaTeX document:') err_log.append('-----BEGIN DOCUMENT-----') err_log.append(latex_document) err_log.append('-----END DOCUMENT-----') if log_exists: err_log.append('') log_content = log_data.decode('utf8', 'ignore') err_log.append('Log file:') err_log.append('-----BEGIN LOG-----') err_log.append(log_content) err_log.append('-----END LOG-----') elif (not os.path.exists(image_path)): err_log.append('Failed to convert pdf to png to preview.') if err_log: with open(err_file_path, 'w') as f: f.write('\n'.join(err_log)) for ext in ['tex', 'aux', 'log', 'pdf', 'dvi']: delete_path = os.path.join(temp_path, ((base_name + '.') + ext)) if os.path.exists(delete_path): os.remove(delete_path)
[ "def", "_create_image", "(", "latex_program", ",", "latex_document", ",", "base_name", ",", "color", ",", "**", "kwargs", ")", ":", "rel_source_path", "=", "(", "base_name", "+", "'.tex'", ")", "pdf_path", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "(", "base_name", "+", "'.pdf'", ")", ")", "image_path", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "(", "base_name", "+", "_IMAGE_EXTENSION", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "pdf_path", ")", ":", "return", "source_path", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "rel_source_path", ")", "with", "open", "(", "source_path", ",", "'w'", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "f", ".", "write", "(", "latex_document", ")", "execute_command", "(", "[", "latex_program", ",", "'-interaction=nonstopmode'", ",", "rel_source_path", "]", ",", "cwd", "=", "temp_path", ")", "pdf_exists", "=", "os", ".", "path", ".", "exists", "(", "pdf_path", ")", "if", "(", "not", "pdf_exists", ")", ":", "dvi_path", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "(", "base_name", "+", "'.dvi'", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "dvi_path", ")", ":", "pdf_path", "=", "dvi_path", "pdf_exists", "=", "True", "if", "pdf_exists", ":", "(", "rc", ",", "_", ",", "output", ")", "=", "run_ghostscript_command", "(", "[", "'-sDEVICE=bbox'", ",", "'-r72'", ",", "'-dLastPage=1'", ",", "pdf_path", "]", ",", "stdout", "=", "subprocess", ".", "DEVNULL", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "if", "(", "rc", "==", "0", ")", ":", "try", ":", "bbox", "=", "[", "int", "(", "x", ")", "for", "x", "in", "output", ".", "splitlines", "(", ")", "[", "0", "]", ".", "lstrip", "(", "'%%BoundingBox: '", ")", ".", "split", "(", ")", "]", "except", "ValueError", ":", "bbox", "=", "None", "else", ":", "bbox", "=", "None", "scale_factor", "=", "(", "8", "if", "_hires", "else", "1", ")", "command", "=", "[", "'-sDEVICE=pngalpha'", ",", "'-dLastPage=1'", ",", "'-sOutputFile={image_path}'", ".", "format", "(", "image_path", "=", "image_path", ")", ",", "'-r{density}'", ".", "format", "(", "density", "=", "(", "_density", "*", "scale_factor", ")", ")", ",", "'-dDownScaleFactor={0}'", ".", "format", "(", "scale_factor", ")", ",", "'-dTextAlphaBits=4'", ",", "'-dGraphicsAlphaBits=4'", "]", "if", "bbox", ":", "width", "=", "round", "(", "(", "(", "(", "(", "(", "bbox", "[", "2", "]", "-", "bbox", "[", "0", "]", ")", "+", "4", ")", "*", "_density", ")", "*", "scale_factor", ")", "/", "72", ")", ")", "height", "=", "round", "(", "(", "(", "(", "(", "(", "bbox", "[", "3", "]", "-", "bbox", "[", "1", "]", ")", "+", "4", ")", "*", "_density", ")", "*", "scale_factor", ")", "/", "72", ")", ")", "command", ".", "extend", "(", "[", "'-g{width}x{height}'", ".", "format", "(", "**", "locals", "(", ")", ")", ",", "'-c'", ",", "'<</Install {{{0} {1} translate}}>> setpagedevice'", ".", "format", "(", "(", "(", "-", "1", ")", "*", "(", "bbox", "[", "0", "]", "-", "2", ")", ")", ",", "(", "(", "-", "1", ")", "*", "(", "bbox", "[", "1", "]", "-", "2", ")", ")", ")", ",", "'-f'", "]", ")", "command", ".", "append", "(", "pdf_path", ")", "run_ghostscript_command", "(", "command", ")", "err_file_path", "=", "(", "image_path", "+", "_ERROR_EXTENSION", ")", "err_log", "=", "[", "]", "if", "(", "not", "pdf_exists", ")", ":", "err_log", ".", "append", "(", "\"Failed to run '{latex_program}' to create pdf to preview.\"", ".", "format", "(", "**", "locals", "(", ")", ")", ")", "err_log", ".", "append", "(", "''", ")", "err_log", ".", "append", "(", "''", ")", "log_file", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "(", "base_name", "+", "'.log'", ")", ")", "log_exists", "=", "os", ".", "path", ".", "exists", "(", "log_file", ")", "if", "(", "not", "log_exists", ")", ":", "err_log", ".", "append", "(", "'No log file found.'", ")", "else", ":", "with", "open", "(", "log_file", ",", "'rb'", ")", "as", "f", ":", "log_data", "=", "f", ".", "read", "(", ")", "try", ":", "(", "errors", ",", "warnings", ",", "_", ")", "=", "parse_tex_log", "(", "log_data", ",", "temp_path", ")", "except", ":", "err_log", ".", "append", "(", "'Error while parsing log file.'", ")", "errors", "=", "warnings", "=", "[", "]", "if", "errors", ":", "err_log", ".", "append", "(", "'Errors:'", ")", "err_log", ".", "extend", "(", "errors", ")", "if", "warnings", ":", "err_log", ".", "append", "(", "'Warnings:'", ")", "err_log", ".", "extend", "(", "warnings", ")", "err_log", ".", "append", "(", "''", ")", "err_log", ".", "append", "(", "'LaTeX document:'", ")", "err_log", ".", "append", "(", "'-----BEGIN DOCUMENT-----'", ")", "err_log", ".", "append", "(", "latex_document", ")", "err_log", ".", "append", "(", "'-----END DOCUMENT-----'", ")", "if", "log_exists", ":", "err_log", ".", "append", "(", "''", ")", "log_content", "=", "log_data", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "err_log", ".", "append", "(", "'Log file:'", ")", "err_log", ".", "append", "(", "'-----BEGIN LOG-----'", ")", "err_log", ".", "append", "(", "log_content", ")", "err_log", ".", "append", "(", "'-----END LOG-----'", ")", "elif", "(", "not", "os", ".", "path", ".", "exists", "(", "image_path", ")", ")", ":", "err_log", ".", "append", "(", "'Failed to convert pdf to png to preview.'", ")", "if", "err_log", ":", "with", "open", "(", "err_file_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'\\n'", ".", "join", "(", "err_log", ")", ")", "for", "ext", "in", "[", "'tex'", ",", "'aux'", ",", "'log'", ",", "'pdf'", ",", "'dvi'", "]", ":", "delete_path", "=", "os", ".", "path", ".", "join", "(", "temp_path", ",", "(", "(", "base_name", "+", "'.'", ")", "+", "ext", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "delete_path", ")", ":", "os", ".", "remove", "(", "delete_path", ")" ]
creates vdi from the image stored in the local cache .
train
false
36,206
def gf_trunc(f, p): return gf_strip([(a % p) for a in f])
[ "def", "gf_trunc", "(", "f", ",", "p", ")", ":", "return", "gf_strip", "(", "[", "(", "a", "%", "p", ")", "for", "a", "in", "f", "]", ")" ]
reduce all coefficients modulo p .
train
false
36,208
def unquote_filename(name, win32=(sys.platform == 'win32')): warn("'unquote_filename' is deprecated since IPython 5.0 and should not be used anymore", DeprecationWarning, stacklevel=2) if win32: if (name.startswith(("'", '"')) and name.endswith(("'", '"'))): name = name[1:(-1)] return name
[ "def", "unquote_filename", "(", "name", ",", "win32", "=", "(", "sys", ".", "platform", "==", "'win32'", ")", ")", ":", "warn", "(", "\"'unquote_filename' is deprecated since IPython 5.0 and should not be used anymore\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "if", "win32", ":", "if", "(", "name", ".", "startswith", "(", "(", "\"'\"", ",", "'\"'", ")", ")", "and", "name", ".", "endswith", "(", "(", "\"'\"", ",", "'\"'", ")", ")", ")", ":", "name", "=", "name", "[", "1", ":", "(", "-", "1", ")", "]", "return", "name" ]
on windows .
train
false
36,209
def backup_nzb(filename, data): path = cfg.nzb_backup_dir.get_path() if path: save_compressed(path, filename, data)
[ "def", "backup_nzb", "(", "filename", ",", "data", ")", ":", "path", "=", "cfg", ".", "nzb_backup_dir", ".", "get_path", "(", ")", "if", "path", ":", "save_compressed", "(", "path", ",", "filename", ",", "data", ")" ]
backup nzb file .
train
false
36,212
def parse_list_header(value): result = [] for item in _parse_list_header(value): if (item[:1] == item[(-1):] == '"'): item = unquote_header_value(item[1:(-1)]) result.append(item) return result
[ "def", "parse_list_header", "(", "value", ")", ":", "result", "=", "[", "]", "for", "item", "in", "_parse_list_header", "(", "value", ")", ":", "if", "(", "item", "[", ":", "1", "]", "==", "item", "[", "(", "-", "1", ")", ":", "]", "==", "'\"'", ")", ":", "item", "=", "unquote_header_value", "(", "item", "[", "1", ":", "(", "-", "1", ")", "]", ")", "result", ".", "append", "(", "item", ")", "return", "result" ]
parse lists as described by rfc 2068 section 2 .
train
true
36,213
def test_dataset_shuffle_split(): skip_if_no_sklearn() mapping = {'dataset_iterator': 'DatasetShuffleSplit'} test_yaml = (test_yaml_dataset_iterator % mapping) trainer = yaml_parse.load(test_yaml) trainer.main_loop()
[ "def", "test_dataset_shuffle_split", "(", ")", ":", "skip_if_no_sklearn", "(", ")", "mapping", "=", "{", "'dataset_iterator'", ":", "'DatasetShuffleSplit'", "}", "test_yaml", "=", "(", "test_yaml_dataset_iterator", "%", "mapping", ")", "trainer", "=", "yaml_parse", ".", "load", "(", "test_yaml", ")", "trainer", ".", "main_loop", "(", ")" ]
test datasetshufflesplit .
train
false
36,214
def get_maximum_file_descriptors(): limits = resource.getrlimit(resource.RLIMIT_NOFILE) result = limits[1] if (result == resource.RLIM_INFINITY): result = MAXFD return result
[ "def", "get_maximum_file_descriptors", "(", ")", ":", "limits", "=", "resource", ".", "getrlimit", "(", "resource", ".", "RLIMIT_NOFILE", ")", "result", "=", "limits", "[", "1", "]", "if", "(", "result", "==", "resource", ".", "RLIM_INFINITY", ")", ":", "result", "=", "MAXFD", "return", "result" ]
return the maximum number of open file descriptors for this process .
train
false