id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
52,673
|
def is_aws_instance():
global __IS_ON_AWS
if (__IS_ON_AWS == None):
__IS_ON_AWS = fetch(AWS_METADATA_URL, timeout=1).ok()
return __IS_ON_AWS
|
[
"def",
"is_aws_instance",
"(",
")",
":",
"global",
"__IS_ON_AWS",
"if",
"(",
"__IS_ON_AWS",
"==",
"None",
")",
":",
"__IS_ON_AWS",
"=",
"fetch",
"(",
"AWS_METADATA_URL",
",",
"timeout",
"=",
"1",
")",
".",
"ok",
"(",
")",
"return",
"__IS_ON_AWS"
] |
determine if we are running on an amazon web services instance .
|
train
| false
|
52,675
|
def define_check(func):
CHECKERS.append(func)
return func
|
[
"def",
"define_check",
"(",
"func",
")",
":",
"CHECKERS",
".",
"append",
"(",
"func",
")",
"return",
"func"
] |
helper decorator to register a check function .
|
train
| false
|
52,676
|
def locale_and_slug_from_path(path, request=None, path_locale=None):
(locale, slug, needs_redirect) = ('', path, False)
mdn_languages_lower = dict(((x.lower(), x) for x in settings.MDN_LANGUAGES))
if ('/' in path):
(maybe_locale, maybe_slug) = path.split('/', 1)
l_locale = maybe_locale.lower()
if (l_locale in settings.MT_TO_KUMA_LOCALE_MAP):
needs_redirect = True
locale = settings.MT_TO_KUMA_LOCALE_MAP[l_locale]
slug = maybe_slug
elif (l_locale in mdn_languages_lower):
needs_redirect = True
locale = mdn_languages_lower[l_locale]
slug = maybe_slug
if (locale == ''):
if request:
locale = request.LANGUAGE_CODE
elif path_locale:
locale = path_locale
if (locale == ''):
locale = getattr(settings, 'WIKI_DEFAULT_LANGUAGE', 'en-US')
return (locale, slug, needs_redirect)
|
[
"def",
"locale_and_slug_from_path",
"(",
"path",
",",
"request",
"=",
"None",
",",
"path_locale",
"=",
"None",
")",
":",
"(",
"locale",
",",
"slug",
",",
"needs_redirect",
")",
"=",
"(",
"''",
",",
"path",
",",
"False",
")",
"mdn_languages_lower",
"=",
"dict",
"(",
"(",
"(",
"x",
".",
"lower",
"(",
")",
",",
"x",
")",
"for",
"x",
"in",
"settings",
".",
"MDN_LANGUAGES",
")",
")",
"if",
"(",
"'/'",
"in",
"path",
")",
":",
"(",
"maybe_locale",
",",
"maybe_slug",
")",
"=",
"path",
".",
"split",
"(",
"'/'",
",",
"1",
")",
"l_locale",
"=",
"maybe_locale",
".",
"lower",
"(",
")",
"if",
"(",
"l_locale",
"in",
"settings",
".",
"MT_TO_KUMA_LOCALE_MAP",
")",
":",
"needs_redirect",
"=",
"True",
"locale",
"=",
"settings",
".",
"MT_TO_KUMA_LOCALE_MAP",
"[",
"l_locale",
"]",
"slug",
"=",
"maybe_slug",
"elif",
"(",
"l_locale",
"in",
"mdn_languages_lower",
")",
":",
"needs_redirect",
"=",
"True",
"locale",
"=",
"mdn_languages_lower",
"[",
"l_locale",
"]",
"slug",
"=",
"maybe_slug",
"if",
"(",
"locale",
"==",
"''",
")",
":",
"if",
"request",
":",
"locale",
"=",
"request",
".",
"LANGUAGE_CODE",
"elif",
"path_locale",
":",
"locale",
"=",
"path_locale",
"if",
"(",
"locale",
"==",
"''",
")",
":",
"locale",
"=",
"getattr",
"(",
"settings",
",",
"'WIKI_DEFAULT_LANGUAGE'",
",",
"'en-US'",
")",
"return",
"(",
"locale",
",",
"slug",
",",
"needs_redirect",
")"
] |
given a proposed doc path .
|
train
| false
|
52,678
|
def patch_os():
patch_module('os')
|
[
"def",
"patch_os",
"(",
")",
":",
"patch_module",
"(",
"'os'",
")"
] |
replace :func:os .
|
train
| false
|
52,679
|
def sendmsg(socket, data, ancillary=[], flags=0):
if _PY3:
return socket.sendmsg([data], ancillary, flags)
else:
return send1msg(socket.fileno(), data, flags, ancillary)
|
[
"def",
"sendmsg",
"(",
"socket",
",",
"data",
",",
"ancillary",
"=",
"[",
"]",
",",
"flags",
"=",
"0",
")",
":",
"if",
"_PY3",
":",
"return",
"socket",
".",
"sendmsg",
"(",
"[",
"data",
"]",
",",
"ancillary",
",",
"flags",
")",
"else",
":",
"return",
"send1msg",
"(",
"socket",
".",
"fileno",
"(",
")",
",",
"data",
",",
"flags",
",",
"ancillary",
")"
] |
send a message on a socket .
|
train
| false
|
52,680
|
def _generate_new_xsrf_secret_key():
return os.urandom(16).encode('hex')
|
[
"def",
"_generate_new_xsrf_secret_key",
"(",
")",
":",
"return",
"os",
".",
"urandom",
"(",
"16",
")",
".",
"encode",
"(",
"'hex'",
")"
] |
returns a random xsrf secret key .
|
train
| false
|
52,681
|
def reload_(name):
term(name)
|
[
"def",
"reload_",
"(",
"name",
")",
":",
"term",
"(",
"name",
")"
] |
reload the named service cli example: .
|
train
| false
|
52,683
|
def in6_addrtovendor(addr):
mac = in6_addrtomac(addr)
if (mac is None):
return None
res = conf.manufdb._get_manuf(mac)
if ((len(res) == 17) and (res.count(':') != 5)):
res = 'UNKNOWN'
return res
|
[
"def",
"in6_addrtovendor",
"(",
"addr",
")",
":",
"mac",
"=",
"in6_addrtomac",
"(",
"addr",
")",
"if",
"(",
"mac",
"is",
"None",
")",
":",
"return",
"None",
"res",
"=",
"conf",
".",
"manufdb",
".",
"_get_manuf",
"(",
"mac",
")",
"if",
"(",
"(",
"len",
"(",
"res",
")",
"==",
"17",
")",
"and",
"(",
"res",
".",
"count",
"(",
"':'",
")",
"!=",
"5",
")",
")",
":",
"res",
"=",
"'UNKNOWN'",
"return",
"res"
] |
extract the mac address from a modified eui-64 constructed ipv6 address provided and use the iana oui .
|
train
| true
|
52,685
|
def convert_to_hash(input_string, max_length):
if (not isinstance(input_string, basestring)):
raise Exception(('Expected string, received %s of type %s' % (input_string, type(input_string))))
encoded_string = base64.urlsafe_b64encode(hashlib.sha1(input_string.encode('utf-8')).digest())
return encoded_string[:max_length]
|
[
"def",
"convert_to_hash",
"(",
"input_string",
",",
"max_length",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"input_string",
",",
"basestring",
")",
")",
":",
"raise",
"Exception",
"(",
"(",
"'Expected string, received %s of type %s'",
"%",
"(",
"input_string",
",",
"type",
"(",
"input_string",
")",
")",
")",
")",
"encoded_string",
"=",
"base64",
".",
"urlsafe_b64encode",
"(",
"hashlib",
".",
"sha1",
"(",
"input_string",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"digest",
"(",
")",
")",
"return",
"encoded_string",
"[",
":",
"max_length",
"]"
] |
convert a string to a sha1 hash .
|
train
| false
|
52,687
|
def GetConfigMockClass(sections=None):
if (sections is None):
sections = {}
missing = object()
type_infos = []
values = {}
raw_values = {}
default_values = {}
for (section_name, section) in sections.iteritems():
for (parameter_name, parameter_data) in section.iteritems():
name = ('%s.%s' % (section_name, parameter_name))
descriptor = utils.DataObject(section=section_name, name=name)
type_infos.append(descriptor)
if ('value' in parameter_data):
values[name] = parameter_data['value']
if ('raw_value' in parameter_data):
raw_values[name] = parameter_data['raw_value']
if ('default_value' in parameter_data):
default_values[name] = parameter_data['default_value']
def Get(parameter, default=missing):
try:
return values[parameter]
except KeyError:
if (default is missing):
return default_values[parameter]
return default
def GetRaw(parameter, default=missing):
try:
return raw_values[parameter]
except KeyError:
if (default is missing):
return default_values[parameter]
return default
return {'Get': Get, 'GetRaw': GetRaw, 'type_infos': type_infos}
|
[
"def",
"GetConfigMockClass",
"(",
"sections",
"=",
"None",
")",
":",
"if",
"(",
"sections",
"is",
"None",
")",
":",
"sections",
"=",
"{",
"}",
"missing",
"=",
"object",
"(",
")",
"type_infos",
"=",
"[",
"]",
"values",
"=",
"{",
"}",
"raw_values",
"=",
"{",
"}",
"default_values",
"=",
"{",
"}",
"for",
"(",
"section_name",
",",
"section",
")",
"in",
"sections",
".",
"iteritems",
"(",
")",
":",
"for",
"(",
"parameter_name",
",",
"parameter_data",
")",
"in",
"section",
".",
"iteritems",
"(",
")",
":",
"name",
"=",
"(",
"'%s.%s'",
"%",
"(",
"section_name",
",",
"parameter_name",
")",
")",
"descriptor",
"=",
"utils",
".",
"DataObject",
"(",
"section",
"=",
"section_name",
",",
"name",
"=",
"name",
")",
"type_infos",
".",
"append",
"(",
"descriptor",
")",
"if",
"(",
"'value'",
"in",
"parameter_data",
")",
":",
"values",
"[",
"name",
"]",
"=",
"parameter_data",
"[",
"'value'",
"]",
"if",
"(",
"'raw_value'",
"in",
"parameter_data",
")",
":",
"raw_values",
"[",
"name",
"]",
"=",
"parameter_data",
"[",
"'raw_value'",
"]",
"if",
"(",
"'default_value'",
"in",
"parameter_data",
")",
":",
"default_values",
"[",
"name",
"]",
"=",
"parameter_data",
"[",
"'default_value'",
"]",
"def",
"Get",
"(",
"parameter",
",",
"default",
"=",
"missing",
")",
":",
"try",
":",
"return",
"values",
"[",
"parameter",
"]",
"except",
"KeyError",
":",
"if",
"(",
"default",
"is",
"missing",
")",
":",
"return",
"default_values",
"[",
"parameter",
"]",
"return",
"default",
"def",
"GetRaw",
"(",
"parameter",
",",
"default",
"=",
"missing",
")",
":",
"try",
":",
"return",
"raw_values",
"[",
"parameter",
"]",
"except",
"KeyError",
":",
"if",
"(",
"default",
"is",
"missing",
")",
":",
"return",
"default_values",
"[",
"parameter",
"]",
"return",
"default",
"return",
"{",
"'Get'",
":",
"Get",
",",
"'GetRaw'",
":",
"GetRaw",
",",
"'type_infos'",
":",
"type_infos",
"}"
] |
mocks a configuration file for use by the api handler .
|
train
| false
|
52,690
|
def top_terms(results=15):
kwargs = {}
if results:
kwargs['results'] = results
'Get top terms'
result = util.callm(('%s/%s' % ('artist', 'top_terms')), kwargs)
return result['response']['terms']
|
[
"def",
"top_terms",
"(",
"results",
"=",
"15",
")",
":",
"kwargs",
"=",
"{",
"}",
"if",
"results",
":",
"kwargs",
"[",
"'results'",
"]",
"=",
"results",
"'Get top terms'",
"result",
"=",
"util",
".",
"callm",
"(",
"(",
"'%s/%s'",
"%",
"(",
"'artist'",
",",
"'top_terms'",
")",
")",
",",
"kwargs",
")",
"return",
"result",
"[",
"'response'",
"]",
"[",
"'terms'",
"]"
] |
get a list of the top overall terms args: kwargs: results : an integer number of results to return returns: a list of term document dicts example: .
|
train
| true
|
52,691
|
def busses():
return (Bus(g) for (k, g) in groupby(sorted(core.find(find_all=True), key=(lambda d: d.bus)), (lambda d: d.bus)))
|
[
"def",
"busses",
"(",
")",
":",
"return",
"(",
"Bus",
"(",
"g",
")",
"for",
"(",
"k",
",",
"g",
")",
"in",
"groupby",
"(",
"sorted",
"(",
"core",
".",
"find",
"(",
"find_all",
"=",
"True",
")",
",",
"key",
"=",
"(",
"lambda",
"d",
":",
"d",
".",
"bus",
")",
")",
",",
"(",
"lambda",
"d",
":",
"d",
".",
"bus",
")",
")",
")"
] |
returns a tuple with the usb busses .
|
train
| true
|
52,693
|
def show_options():
feedback = alfred.Feedback()
feedback.addItem(title='List compatible workflows', autocomplete='list', valid='no')
feedback.addItem(title='Check for updates', subtitle=('This may take a while...' if (get_updateable_timeout() <= 10.0) else ''), autocomplete='update', valid='no')
feedback.addItem(title='Reset cache', autocomplete='reset', valid='no')
feedback.addItem(title='View log', autocomplete='log', valid='no')
feedback.output()
|
[
"def",
"show_options",
"(",
")",
":",
"feedback",
"=",
"alfred",
".",
"Feedback",
"(",
")",
"feedback",
".",
"addItem",
"(",
"title",
"=",
"'List compatible workflows'",
",",
"autocomplete",
"=",
"'list'",
",",
"valid",
"=",
"'no'",
")",
"feedback",
".",
"addItem",
"(",
"title",
"=",
"'Check for updates'",
",",
"subtitle",
"=",
"(",
"'This may take a while...'",
"if",
"(",
"get_updateable_timeout",
"(",
")",
"<=",
"10.0",
")",
"else",
"''",
")",
",",
"autocomplete",
"=",
"'update'",
",",
"valid",
"=",
"'no'",
")",
"feedback",
".",
"addItem",
"(",
"title",
"=",
"'Reset cache'",
",",
"autocomplete",
"=",
"'reset'",
",",
"valid",
"=",
"'no'",
")",
"feedback",
".",
"addItem",
"(",
"title",
"=",
"'View log'",
",",
"autocomplete",
"=",
"'log'",
",",
"valid",
"=",
"'no'",
")",
"feedback",
".",
"output",
"(",
")"
] |
displays initial options .
|
train
| false
|
52,694
|
def chexor(old, name, timestamp):
if (name is None):
raise Exception('name is None!')
new = hashlib.md5(('%s-%s' % (name, timestamp)).encode('utf8')).hexdigest()
return ('%032x' % (int(old, 16) ^ int(new, 16)))
|
[
"def",
"chexor",
"(",
"old",
",",
"name",
",",
"timestamp",
")",
":",
"if",
"(",
"name",
"is",
"None",
")",
":",
"raise",
"Exception",
"(",
"'name is None!'",
")",
"new",
"=",
"hashlib",
".",
"md5",
"(",
"(",
"'%s-%s'",
"%",
"(",
"name",
",",
"timestamp",
")",
")",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"hexdigest",
"(",
")",
"return",
"(",
"'%032x'",
"%",
"(",
"int",
"(",
"old",
",",
"16",
")",
"^",
"int",
"(",
"new",
",",
"16",
")",
")",
")"
] |
each entry in the account and container databases is xored by the 128-bit hash on insert or delete .
|
train
| false
|
52,695
|
def parse_named_char(source, info, in_set):
saved_pos = source.pos
if source.match('{'):
name = source.get_while(NAMED_CHAR_PART)
if source.match('}'):
try:
value = unicodedata.lookup(name)
return make_character(info, ord(value), in_set)
except KeyError:
raise error('undefined character name', source.string, source.pos)
source.pos = saved_pos
return make_character(info, ord('N'), in_set)
|
[
"def",
"parse_named_char",
"(",
"source",
",",
"info",
",",
"in_set",
")",
":",
"saved_pos",
"=",
"source",
".",
"pos",
"if",
"source",
".",
"match",
"(",
"'{'",
")",
":",
"name",
"=",
"source",
".",
"get_while",
"(",
"NAMED_CHAR_PART",
")",
"if",
"source",
".",
"match",
"(",
"'}'",
")",
":",
"try",
":",
"value",
"=",
"unicodedata",
".",
"lookup",
"(",
"name",
")",
"return",
"make_character",
"(",
"info",
",",
"ord",
"(",
"value",
")",
",",
"in_set",
")",
"except",
"KeyError",
":",
"raise",
"error",
"(",
"'undefined character name'",
",",
"source",
".",
"string",
",",
"source",
".",
"pos",
")",
"source",
".",
"pos",
"=",
"saved_pos",
"return",
"make_character",
"(",
"info",
",",
"ord",
"(",
"'N'",
")",
",",
"in_set",
")"
] |
parses a named character .
|
train
| false
|
52,698
|
def make_account_trees():
for id in accounts.keys():
account = accounts[id]
if account.get(u'parent_id'):
if accounts.get(account[u'parent_id']):
accounts[account[u'parent_id']][account[u'name']] = account
del account[u'parent_id']
del account[u'name']
for id in accounts.keys():
if ((u'children' in accounts[id]) and (not accounts[id].get(u'children'))):
del accounts[id][u'children']
|
[
"def",
"make_account_trees",
"(",
")",
":",
"for",
"id",
"in",
"accounts",
".",
"keys",
"(",
")",
":",
"account",
"=",
"accounts",
"[",
"id",
"]",
"if",
"account",
".",
"get",
"(",
"u'parent_id'",
")",
":",
"if",
"accounts",
".",
"get",
"(",
"account",
"[",
"u'parent_id'",
"]",
")",
":",
"accounts",
"[",
"account",
"[",
"u'parent_id'",
"]",
"]",
"[",
"account",
"[",
"u'name'",
"]",
"]",
"=",
"account",
"del",
"account",
"[",
"u'parent_id'",
"]",
"del",
"account",
"[",
"u'name'",
"]",
"for",
"id",
"in",
"accounts",
".",
"keys",
"(",
")",
":",
"if",
"(",
"(",
"u'children'",
"in",
"accounts",
"[",
"id",
"]",
")",
"and",
"(",
"not",
"accounts",
"[",
"id",
"]",
".",
"get",
"(",
"u'children'",
")",
")",
")",
":",
"del",
"accounts",
"[",
"id",
"]",
"[",
"u'children'",
"]"
] |
build tree hierarchy .
|
train
| false
|
52,699
|
@lower_builtin(type, types.Any)
def type_impl(context, builder, sig, args):
return context.get_dummy_value()
|
[
"@",
"lower_builtin",
"(",
"type",
",",
"types",
".",
"Any",
")",
"def",
"type_impl",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"return",
"context",
".",
"get_dummy_value",
"(",
")"
] |
one-argument type() builtin .
|
train
| false
|
52,701
|
def _media_size_to_long(maxSize):
if (len(maxSize) < 2):
return 0
units = maxSize[(-2):].upper()
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
if (bit_shift is not None):
return (int(maxSize[:(-2)]) << bit_shift)
else:
return int(maxSize)
|
[
"def",
"_media_size_to_long",
"(",
"maxSize",
")",
":",
"if",
"(",
"len",
"(",
"maxSize",
")",
"<",
"2",
")",
":",
"return",
"0",
"units",
"=",
"maxSize",
"[",
"(",
"-",
"2",
")",
":",
"]",
".",
"upper",
"(",
")",
"bit_shift",
"=",
"_MEDIA_SIZE_BIT_SHIFTS",
".",
"get",
"(",
"units",
")",
"if",
"(",
"bit_shift",
"is",
"not",
"None",
")",
":",
"return",
"(",
"int",
"(",
"maxSize",
"[",
":",
"(",
"-",
"2",
")",
"]",
")",
"<<",
"bit_shift",
")",
"else",
":",
"return",
"int",
"(",
"maxSize",
")"
] |
convert a string media size .
|
train
| false
|
52,702
|
def register_backends(base_name, base_path):
modules = load_modules(base_name, base_path)
mod_for = {}
for module in modules:
if hasattr(module, 'PROVIDER'):
provider_name = module.PROVIDER
if (provider_name == 'generic'):
for (p_name, p) in providers.iteritems():
p_type = p.get('type', None)
if ((p_type == 'generic') and (p_name not in mod_for)):
mod_for[p_name] = module
else:
mod_for[provider_name] = module
return mod_for
|
[
"def",
"register_backends",
"(",
"base_name",
",",
"base_path",
")",
":",
"modules",
"=",
"load_modules",
"(",
"base_name",
",",
"base_path",
")",
"mod_for",
"=",
"{",
"}",
"for",
"module",
"in",
"modules",
":",
"if",
"hasattr",
"(",
"module",
",",
"'PROVIDER'",
")",
":",
"provider_name",
"=",
"module",
".",
"PROVIDER",
"if",
"(",
"provider_name",
"==",
"'generic'",
")",
":",
"for",
"(",
"p_name",
",",
"p",
")",
"in",
"providers",
".",
"iteritems",
"(",
")",
":",
"p_type",
"=",
"p",
".",
"get",
"(",
"'type'",
",",
"None",
")",
"if",
"(",
"(",
"p_type",
"==",
"'generic'",
")",
"and",
"(",
"p_name",
"not",
"in",
"mod_for",
")",
")",
":",
"mod_for",
"[",
"p_name",
"]",
"=",
"module",
"else",
":",
"mod_for",
"[",
"provider_name",
"]",
"=",
"module",
"return",
"mod_for"
] |
dynamically loads all packages contained within thread backends module .
|
train
| false
|
52,703
|
def show_packages():
logger.info('These packages are available:')
for this_package in packages_sources.values():
if (this_package.name in installed_packages_list):
state = ('u' if (installed_packages_list[this_package.name].timestamp < this_package.timestamp) else 'i')
else:
state = '-'
package_time = time.strftime('%a, %d %b %Y %H:%M:%S', time.gmtime(this_package.timestamp))
logger.info('{0} {1:<20} {2:<8} {3:<30} {4}'.format(state, this_package.name, this_package.readable_size, package_time, this_package.source))
|
[
"def",
"show_packages",
"(",
")",
":",
"logger",
".",
"info",
"(",
"'These packages are available:'",
")",
"for",
"this_package",
"in",
"packages_sources",
".",
"values",
"(",
")",
":",
"if",
"(",
"this_package",
".",
"name",
"in",
"installed_packages_list",
")",
":",
"state",
"=",
"(",
"'u'",
"if",
"(",
"installed_packages_list",
"[",
"this_package",
".",
"name",
"]",
".",
"timestamp",
"<",
"this_package",
".",
"timestamp",
")",
"else",
"'i'",
")",
"else",
":",
"state",
"=",
"'-'",
"package_time",
"=",
"time",
".",
"strftime",
"(",
"'%a, %d %b %Y %H:%M:%S'",
",",
"time",
".",
"gmtime",
"(",
"this_package",
".",
"timestamp",
")",
")",
"logger",
".",
"info",
"(",
"'{0} {1:<20} {2:<8} {3:<30} {4}'",
".",
"format",
"(",
"state",
",",
"this_package",
".",
"name",
",",
"this_package",
".",
"readable_size",
",",
"package_time",
",",
"this_package",
".",
"source",
")",
")"
] |
list all available packages .
|
train
| false
|
52,704
|
def rs_newton(p, x, prec):
deg = p.degree()
p1 = _invert_monoms(p)
p2 = rs_series_inversion(p1, x, prec)
p3 = rs_mul(p1.diff(x), p2, x, prec)
res = (deg - (p3 * x))
return res
|
[
"def",
"rs_newton",
"(",
"p",
",",
"x",
",",
"prec",
")",
":",
"deg",
"=",
"p",
".",
"degree",
"(",
")",
"p1",
"=",
"_invert_monoms",
"(",
"p",
")",
"p2",
"=",
"rs_series_inversion",
"(",
"p1",
",",
"x",
",",
"prec",
")",
"p3",
"=",
"rs_mul",
"(",
"p1",
".",
"diff",
"(",
"x",
")",
",",
"p2",
",",
"x",
",",
"prec",
")",
"res",
"=",
"(",
"deg",
"-",
"(",
"p3",
"*",
"x",
")",
")",
"return",
"res"
] |
compute the truncated newton sum of the polynomial p examples .
|
train
| false
|
52,706
|
def file_is_remote(file_location):
file_loc_lc = file_location.lower()
for prefix in ['http://', 'https://', 'ftp://']:
if file_loc_lc.startswith(prefix):
return True
return False
|
[
"def",
"file_is_remote",
"(",
"file_location",
")",
":",
"file_loc_lc",
"=",
"file_location",
".",
"lower",
"(",
")",
"for",
"prefix",
"in",
"[",
"'http://'",
",",
"'https://'",
",",
"'ftp://'",
"]",
":",
"if",
"file_loc_lc",
".",
"startswith",
"(",
"prefix",
")",
":",
"return",
"True",
"return",
"False"
] |
returns true if the file is remote and referenced via a protocol we support .
|
train
| false
|
52,707
|
@contextmanager
def stdin_encoding(encoding=None):
import sys
_stdin = sys.stdin
sys.stdin = SimpleMock(sys.stdin, 'encoding', encoding)
(yield)
sys.stdin = _stdin
|
[
"@",
"contextmanager",
"def",
"stdin_encoding",
"(",
"encoding",
"=",
"None",
")",
":",
"import",
"sys",
"_stdin",
"=",
"sys",
".",
"stdin",
"sys",
".",
"stdin",
"=",
"SimpleMock",
"(",
"sys",
".",
"stdin",
",",
"'encoding'",
",",
"encoding",
")",
"(",
"yield",
")",
"sys",
".",
"stdin",
"=",
"_stdin"
] |
context manager for running bits of code while emulating an arbitrary stdin encoding .
|
train
| false
|
52,709
|
def get_tempo(artist, title):
results = song.search(artist=artist, title=title, results=1, buckets=['audio_summary'])
if (len(results) > 0):
return results[0].audio_summary['tempo']
else:
return None
|
[
"def",
"get_tempo",
"(",
"artist",
",",
"title",
")",
":",
"results",
"=",
"song",
".",
"search",
"(",
"artist",
"=",
"artist",
",",
"title",
"=",
"title",
",",
"results",
"=",
"1",
",",
"buckets",
"=",
"[",
"'audio_summary'",
"]",
")",
"if",
"(",
"len",
"(",
"results",
")",
">",
"0",
")",
":",
"return",
"results",
"[",
"0",
"]",
".",
"audio_summary",
"[",
"'tempo'",
"]",
"else",
":",
"return",
"None"
] |
gets the tempo for a song .
|
train
| true
|
52,710
|
def get_dummy_course(start, announcement=None, is_new=None, advertised_start=None, end=None, certs='end'):
system = DummySystem(load_error_modules=True)
def to_attrb(n, v):
return ('' if (v is None) else '{0}="{1}"'.format(n, v).lower())
is_new = to_attrb('is_new', is_new)
announcement = to_attrb('announcement', announcement)
advertised_start = to_attrb('advertised_start', advertised_start)
end = to_attrb('end', end)
start_xml = '\n <course org="{org}" course="{course}" display_organization="{org}_display" display_coursenumber="{course}_display"\n graceperiod="1 day" url_name="test"\n start="{start}"\n {announcement}\n {is_new}\n {advertised_start}\n {end}\n certificates_display_behavior="{certs}">\n <chapter url="hi" url_name="ch" display_name="CH">\n <html url_name="h" display_name="H">Two houses, ...</html>\n </chapter>\n </course>\n '.format(org=ORG, course=COURSE, start=start, is_new=is_new, announcement=announcement, advertised_start=advertised_start, end=end, certs=certs)
return system.process_xml(start_xml)
|
[
"def",
"get_dummy_course",
"(",
"start",
",",
"announcement",
"=",
"None",
",",
"is_new",
"=",
"None",
",",
"advertised_start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"certs",
"=",
"'end'",
")",
":",
"system",
"=",
"DummySystem",
"(",
"load_error_modules",
"=",
"True",
")",
"def",
"to_attrb",
"(",
"n",
",",
"v",
")",
":",
"return",
"(",
"''",
"if",
"(",
"v",
"is",
"None",
")",
"else",
"'{0}=\"{1}\"'",
".",
"format",
"(",
"n",
",",
"v",
")",
".",
"lower",
"(",
")",
")",
"is_new",
"=",
"to_attrb",
"(",
"'is_new'",
",",
"is_new",
")",
"announcement",
"=",
"to_attrb",
"(",
"'announcement'",
",",
"announcement",
")",
"advertised_start",
"=",
"to_attrb",
"(",
"'advertised_start'",
",",
"advertised_start",
")",
"end",
"=",
"to_attrb",
"(",
"'end'",
",",
"end",
")",
"start_xml",
"=",
"'\\n <course org=\"{org}\" course=\"{course}\" display_organization=\"{org}_display\" display_coursenumber=\"{course}_display\"\\n graceperiod=\"1 day\" url_name=\"test\"\\n start=\"{start}\"\\n {announcement}\\n {is_new}\\n {advertised_start}\\n {end}\\n certificates_display_behavior=\"{certs}\">\\n <chapter url=\"hi\" url_name=\"ch\" display_name=\"CH\">\\n <html url_name=\"h\" display_name=\"H\">Two houses, ...</html>\\n </chapter>\\n </course>\\n '",
".",
"format",
"(",
"org",
"=",
"ORG",
",",
"course",
"=",
"COURSE",
",",
"start",
"=",
"start",
",",
"is_new",
"=",
"is_new",
",",
"announcement",
"=",
"announcement",
",",
"advertised_start",
"=",
"advertised_start",
",",
"end",
"=",
"end",
",",
"certs",
"=",
"certs",
")",
"return",
"system",
".",
"process_xml",
"(",
"start_xml",
")"
] |
get a dummy course .
|
train
| false
|
52,711
|
def LoadFirmwareImage(chip, filename):
with open(filename, 'rb') as f:
if (chip == 'esp32'):
return ESP32FirmwareImage(f)
else:
magic = ord(f.read(1))
f.seek(0)
if (magic == ESPLoader.ESP_IMAGE_MAGIC):
return ESPFirmwareImage(f)
elif (magic == ESPBOOTLOADER.IMAGE_V2_MAGIC):
return OTAFirmwareImage(f)
else:
raise FatalError(('Invalid image magic number: %d' % magic))
|
[
"def",
"LoadFirmwareImage",
"(",
"chip",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"f",
":",
"if",
"(",
"chip",
"==",
"'esp32'",
")",
":",
"return",
"ESP32FirmwareImage",
"(",
"f",
")",
"else",
":",
"magic",
"=",
"ord",
"(",
"f",
".",
"read",
"(",
"1",
")",
")",
"f",
".",
"seek",
"(",
"0",
")",
"if",
"(",
"magic",
"==",
"ESPLoader",
".",
"ESP_IMAGE_MAGIC",
")",
":",
"return",
"ESPFirmwareImage",
"(",
"f",
")",
"elif",
"(",
"magic",
"==",
"ESPBOOTLOADER",
".",
"IMAGE_V2_MAGIC",
")",
":",
"return",
"OTAFirmwareImage",
"(",
"f",
")",
"else",
":",
"raise",
"FatalError",
"(",
"(",
"'Invalid image magic number: %d'",
"%",
"magic",
")",
")"
] |
load a firmware image .
|
train
| false
|
52,712
|
def get_seqs_to_keep_lookup_from_seq_id_file(id_to_keep_f):
return set([l.split()[0].strip() for l in id_to_keep_f if (l.strip() and (not l.startswith('#')))])
|
[
"def",
"get_seqs_to_keep_lookup_from_seq_id_file",
"(",
"id_to_keep_f",
")",
":",
"return",
"set",
"(",
"[",
"l",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"for",
"l",
"in",
"id_to_keep_f",
"if",
"(",
"l",
".",
"strip",
"(",
")",
"and",
"(",
"not",
"l",
".",
"startswith",
"(",
"'#'",
")",
")",
")",
"]",
")"
] |
generate a lookup dict of chimeras in chimera file .
|
train
| false
|
52,713
|
def set_image_paths(app, encoded_repository_id, text):
if text:
if repository_util.is_tool_shed_client(app):
route_to_images = ('admin_toolshed/static/images/%s' % encoded_repository_id)
else:
route_to_images = ('/repository/static/images/%s' % encoded_repository_id)
text = text.replace('$PATH_TO_IMAGES', '')
text = text.replace('${static_path}', '')
for match in re.findall('.. image:: (?!http)/?(.+)', text):
text = text.replace(match, match.replace('/', '%2F'))
text = re.sub('\\.\\. image:: (?!https?://)/?(.+)', ('.. image:: %s/\\1' % route_to_images), text)
return text
|
[
"def",
"set_image_paths",
"(",
"app",
",",
"encoded_repository_id",
",",
"text",
")",
":",
"if",
"text",
":",
"if",
"repository_util",
".",
"is_tool_shed_client",
"(",
"app",
")",
":",
"route_to_images",
"=",
"(",
"'admin_toolshed/static/images/%s'",
"%",
"encoded_repository_id",
")",
"else",
":",
"route_to_images",
"=",
"(",
"'/repository/static/images/%s'",
"%",
"encoded_repository_id",
")",
"text",
"=",
"text",
".",
"replace",
"(",
"'$PATH_TO_IMAGES'",
",",
"''",
")",
"text",
"=",
"text",
".",
"replace",
"(",
"'${static_path}'",
",",
"''",
")",
"for",
"match",
"in",
"re",
".",
"findall",
"(",
"'.. image:: (?!http)/?(.+)'",
",",
"text",
")",
":",
"text",
"=",
"text",
".",
"replace",
"(",
"match",
",",
"match",
".",
"replace",
"(",
"'/'",
",",
"'%2F'",
")",
")",
"text",
"=",
"re",
".",
"sub",
"(",
"'\\\\.\\\\. image:: (?!https?://)/?(.+)'",
",",
"(",
"'.. image:: %s/\\\\1'",
"%",
"route_to_images",
")",
",",
"text",
")",
"return",
"text"
] |
handle tool help image display for tools that are contained in repositories in the tool shed or installed into galaxy as well as image display in repository readme files .
|
train
| false
|
52,714
|
def processAppendElementNode(archivableObjects, elementNode, parentNode):
if (elementNode == None):
return
object = elementNode.getFirstChildByLocalName('object')
if ('bf:type' not in object.attributes):
return
shapeType = object.attributes['bf:type']
if (shapeType not in globalCarvableClassObjectTable):
return
carvableClassObject = globalCarvableClassObjectTable[shapeType]
archivableObject = getCarvableObject(elementNode, carvableClassObject, object)
archivableObject.elementNode.attributes['visible'] = elementNode.attributes['visible']
archivableObject.setToArtOfIllusionDictionary()
archivableObject.elementNode.parentNode = parentNode
archivableObjects.append(archivableObject)
|
[
"def",
"processAppendElementNode",
"(",
"archivableObjects",
",",
"elementNode",
",",
"parentNode",
")",
":",
"if",
"(",
"elementNode",
"==",
"None",
")",
":",
"return",
"object",
"=",
"elementNode",
".",
"getFirstChildByLocalName",
"(",
"'object'",
")",
"if",
"(",
"'bf:type'",
"not",
"in",
"object",
".",
"attributes",
")",
":",
"return",
"shapeType",
"=",
"object",
".",
"attributes",
"[",
"'bf:type'",
"]",
"if",
"(",
"shapeType",
"not",
"in",
"globalCarvableClassObjectTable",
")",
":",
"return",
"carvableClassObject",
"=",
"globalCarvableClassObjectTable",
"[",
"shapeType",
"]",
"archivableObject",
"=",
"getCarvableObject",
"(",
"elementNode",
",",
"carvableClassObject",
",",
"object",
")",
"archivableObject",
".",
"elementNode",
".",
"attributes",
"[",
"'visible'",
"]",
"=",
"elementNode",
".",
"attributes",
"[",
"'visible'",
"]",
"archivableObject",
".",
"setToArtOfIllusionDictionary",
"(",
")",
"archivableObject",
".",
"elementNode",
".",
"parentNode",
"=",
"parentNode",
"archivableObjects",
".",
"append",
"(",
"archivableObject",
")"
] |
add the object info if it is carvable .
|
train
| false
|
52,715
|
def _get_test_value(test=None, **kwargs):
ret = True
if (test is None):
if salt.utils.test_mode(test=test, **kwargs):
ret = True
else:
ret = __opts__.get('test', None)
else:
ret = test
return ret
|
[
"def",
"_get_test_value",
"(",
"test",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"ret",
"=",
"True",
"if",
"(",
"test",
"is",
"None",
")",
":",
"if",
"salt",
".",
"utils",
".",
"test_mode",
"(",
"test",
"=",
"test",
",",
"**",
"kwargs",
")",
":",
"ret",
"=",
"True",
"else",
":",
"ret",
"=",
"__opts__",
".",
"get",
"(",
"'test'",
",",
"None",
")",
"else",
":",
"ret",
"=",
"test",
"return",
"ret"
] |
determine the correct value for the test flag .
|
train
| true
|
52,716
|
@app.route('/')
def view_landing_page():
tracking_enabled = ('HTTPBIN_TRACKING' in os.environ)
return render_template('index.html', tracking_enabled=tracking_enabled)
|
[
"@",
"app",
".",
"route",
"(",
"'/'",
")",
"def",
"view_landing_page",
"(",
")",
":",
"tracking_enabled",
"=",
"(",
"'HTTPBIN_TRACKING'",
"in",
"os",
".",
"environ",
")",
"return",
"render_template",
"(",
"'index.html'",
",",
"tracking_enabled",
"=",
"tracking_enabled",
")"
] |
generates landing page .
|
train
| false
|
52,717
|
def _build_schema_resource(fields):
infos = []
for field in fields:
info = {'name': field.name, 'type': field.field_type, 'mode': field.mode}
if (field.description is not None):
info['description'] = field.description
if (field.fields is not None):
info['fields'] = _build_schema_resource(field.fields)
infos.append(info)
return infos
|
[
"def",
"_build_schema_resource",
"(",
"fields",
")",
":",
"infos",
"=",
"[",
"]",
"for",
"field",
"in",
"fields",
":",
"info",
"=",
"{",
"'name'",
":",
"field",
".",
"name",
",",
"'type'",
":",
"field",
".",
"field_type",
",",
"'mode'",
":",
"field",
".",
"mode",
"}",
"if",
"(",
"field",
".",
"description",
"is",
"not",
"None",
")",
":",
"info",
"[",
"'description'",
"]",
"=",
"field",
".",
"description",
"if",
"(",
"field",
".",
"fields",
"is",
"not",
"None",
")",
":",
"info",
"[",
"'fields'",
"]",
"=",
"_build_schema_resource",
"(",
"field",
".",
"fields",
")",
"infos",
".",
"append",
"(",
"info",
")",
"return",
"infos"
] |
generate a resource fragment for a schema .
|
train
| false
|
52,718
|
def _list_projects(source=None):
project = acl.get_limited_to_project(flask.request.headers)
if project:
if source:
if (project in flask.request.storage_conn.get_projects(source=source)):
projects = [project]
else:
projects = []
else:
projects = [project]
else:
projects = flask.request.storage_conn.get_projects(source=source)
return flask.jsonify(projects=list(projects))
|
[
"def",
"_list_projects",
"(",
"source",
"=",
"None",
")",
":",
"project",
"=",
"acl",
".",
"get_limited_to_project",
"(",
"flask",
".",
"request",
".",
"headers",
")",
"if",
"project",
":",
"if",
"source",
":",
"if",
"(",
"project",
"in",
"flask",
".",
"request",
".",
"storage_conn",
".",
"get_projects",
"(",
"source",
"=",
"source",
")",
")",
":",
"projects",
"=",
"[",
"project",
"]",
"else",
":",
"projects",
"=",
"[",
"]",
"else",
":",
"projects",
"=",
"[",
"project",
"]",
"else",
":",
"projects",
"=",
"flask",
".",
"request",
".",
"storage_conn",
".",
"get_projects",
"(",
"source",
"=",
"source",
")",
"return",
"flask",
".",
"jsonify",
"(",
"projects",
"=",
"list",
"(",
"projects",
")",
")"
] |
return a list of project names .
|
train
| false
|
52,719
|
def AddAclBatchSample():
client = CreateClient()
doc = gdata.docs.data.Resource(type='document', title='My Sample Doc')
doc = client.CreateResource(doc)
acl1 = gdata.docs.data.AclEntry(scope=gdata.acl.data.AclScope(value='user1@example.com', type='user'), role=gdata.acl.data.AclRole(value='reader'), batch_operation=gdata.data.BatchOperation(type='insert'))
acl2 = gdata.docs.data.AclEntry(scope=gdata.acl.data.AclScope(value='user2@example.com', type='user'), role=gdata.acl.data.AclRole(value='reader'), batch_operation=gdata.data.BatchOperation(type='insert'))
acl_operations = [acl1, acl2]
client.BatchProcessAclEntries(doc, acl_operations)
|
[
"def",
"AddAclBatchSample",
"(",
")",
":",
"client",
"=",
"CreateClient",
"(",
")",
"doc",
"=",
"gdata",
".",
"docs",
".",
"data",
".",
"Resource",
"(",
"type",
"=",
"'document'",
",",
"title",
"=",
"'My Sample Doc'",
")",
"doc",
"=",
"client",
".",
"CreateResource",
"(",
"doc",
")",
"acl1",
"=",
"gdata",
".",
"docs",
".",
"data",
".",
"AclEntry",
"(",
"scope",
"=",
"gdata",
".",
"acl",
".",
"data",
".",
"AclScope",
"(",
"value",
"=",
"'user1@example.com'",
",",
"type",
"=",
"'user'",
")",
",",
"role",
"=",
"gdata",
".",
"acl",
".",
"data",
".",
"AclRole",
"(",
"value",
"=",
"'reader'",
")",
",",
"batch_operation",
"=",
"gdata",
".",
"data",
".",
"BatchOperation",
"(",
"type",
"=",
"'insert'",
")",
")",
"acl2",
"=",
"gdata",
".",
"docs",
".",
"data",
".",
"AclEntry",
"(",
"scope",
"=",
"gdata",
".",
"acl",
".",
"data",
".",
"AclScope",
"(",
"value",
"=",
"'user2@example.com'",
",",
"type",
"=",
"'user'",
")",
",",
"role",
"=",
"gdata",
".",
"acl",
".",
"data",
".",
"AclRole",
"(",
"value",
"=",
"'reader'",
")",
",",
"batch_operation",
"=",
"gdata",
".",
"data",
".",
"BatchOperation",
"(",
"type",
"=",
"'insert'",
")",
")",
"acl_operations",
"=",
"[",
"acl1",
",",
"acl2",
"]",
"client",
".",
"BatchProcessAclEntries",
"(",
"doc",
",",
"acl_operations",
")"
] |
add a list of acls as a batch .
|
train
| false
|
52,720
|
def raw_memmove(builder, dst, src, count, itemsize, align=1):
return _raw_memcpy(builder, 'llvm.memmove', dst, src, count, itemsize, align)
|
[
"def",
"raw_memmove",
"(",
"builder",
",",
"dst",
",",
"src",
",",
"count",
",",
"itemsize",
",",
"align",
"=",
"1",
")",
":",
"return",
"_raw_memcpy",
"(",
"builder",
",",
"'llvm.memmove'",
",",
"dst",
",",
"src",
",",
"count",
",",
"itemsize",
",",
"align",
")"
] |
emit a raw memmove() call for count items of size itemsize from src to dest .
|
train
| false
|
52,721
|
def parse_pkginfo(line, osarch=None):
try:
(name, epoch, version, release, arch, repoid) = line.split('_|-')
except ValueError:
return None
name = resolve_name(name, arch, osarch)
if release:
version += '-{0}'.format(release)
if (epoch not in ('(none)', '0')):
version = ':'.join((epoch, version))
return pkginfo(name, version, arch, repoid)
|
[
"def",
"parse_pkginfo",
"(",
"line",
",",
"osarch",
"=",
"None",
")",
":",
"try",
":",
"(",
"name",
",",
"epoch",
",",
"version",
",",
"release",
",",
"arch",
",",
"repoid",
")",
"=",
"line",
".",
"split",
"(",
"'_|-'",
")",
"except",
"ValueError",
":",
"return",
"None",
"name",
"=",
"resolve_name",
"(",
"name",
",",
"arch",
",",
"osarch",
")",
"if",
"release",
":",
"version",
"+=",
"'-{0}'",
".",
"format",
"(",
"release",
")",
"if",
"(",
"epoch",
"not",
"in",
"(",
"'(none)'",
",",
"'0'",
")",
")",
":",
"version",
"=",
"':'",
".",
"join",
"(",
"(",
"epoch",
",",
"version",
")",
")",
"return",
"pkginfo",
"(",
"name",
",",
"version",
",",
"arch",
",",
"repoid",
")"
] |
a small helper to parse an rpm/repoquery commands output .
|
train
| false
|
52,722
|
def test_error_on_file_to_FileLinks():
td = mkdtemp()
tf1 = NamedTemporaryFile(dir=td)
nt.assert_raises(ValueError, display.FileLinks, tf1.name)
|
[
"def",
"test_error_on_file_to_FileLinks",
"(",
")",
":",
"td",
"=",
"mkdtemp",
"(",
")",
"tf1",
"=",
"NamedTemporaryFile",
"(",
"dir",
"=",
"td",
")",
"nt",
".",
"assert_raises",
"(",
"ValueError",
",",
"display",
".",
"FileLinks",
",",
"tf1",
".",
"name",
")"
] |
filelinks: raises error when passed file .
|
train
| false
|
52,724
|
def psd(x, NFFT=256, Fs=2, detrend=detrend_none, window=window_hanning, noverlap=0, pad_to=None, sides='default', scale_by_freq=None):
(Pxx, freqs) = csd(x, x, NFFT, Fs, detrend, window, noverlap, pad_to, sides, scale_by_freq)
return (Pxx.real, freqs)
|
[
"def",
"psd",
"(",
"x",
",",
"NFFT",
"=",
"256",
",",
"Fs",
"=",
"2",
",",
"detrend",
"=",
"detrend_none",
",",
"window",
"=",
"window_hanning",
",",
"noverlap",
"=",
"0",
",",
"pad_to",
"=",
"None",
",",
"sides",
"=",
"'default'",
",",
"scale_by_freq",
"=",
"None",
")",
":",
"(",
"Pxx",
",",
"freqs",
")",
"=",
"csd",
"(",
"x",
",",
"x",
",",
"NFFT",
",",
"Fs",
",",
"detrend",
",",
"window",
",",
"noverlap",
",",
"pad_to",
",",
"sides",
",",
"scale_by_freq",
")",
"return",
"(",
"Pxx",
".",
"real",
",",
"freqs",
")"
] |
the power spectral density by welchs average periodogram method .
|
train
| false
|
52,725
|
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.'))
@utils.arg('flavor', metavar='<flavor>', help=_('Name or ID of new flavor.'))
@utils.arg('--poll', dest='poll', action='store_true', default=False, help=_('Report the server resize progress until it completes.'))
def do_resize(cs, args):
server = _find_server(cs, args.server)
flavor = _find_flavor(cs, args.flavor)
kwargs = utils.get_resource_manager_extra_kwargs(do_resize, args)
server.resize(flavor, **kwargs)
if args.poll:
_poll_for_status(cs.servers.get, server.id, 'resizing', ['active', 'verify_resize'])
|
[
"@",
"utils",
".",
"arg",
"(",
"'server'",
",",
"metavar",
"=",
"'<server>'",
",",
"help",
"=",
"_",
"(",
"'Name or ID of server.'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'flavor'",
",",
"metavar",
"=",
"'<flavor>'",
",",
"help",
"=",
"_",
"(",
"'Name or ID of new flavor.'",
")",
")",
"@",
"utils",
".",
"arg",
"(",
"'--poll'",
",",
"dest",
"=",
"'poll'",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"_",
"(",
"'Report the server resize progress until it completes.'",
")",
")",
"def",
"do_resize",
"(",
"cs",
",",
"args",
")",
":",
"server",
"=",
"_find_server",
"(",
"cs",
",",
"args",
".",
"server",
")",
"flavor",
"=",
"_find_flavor",
"(",
"cs",
",",
"args",
".",
"flavor",
")",
"kwargs",
"=",
"utils",
".",
"get_resource_manager_extra_kwargs",
"(",
"do_resize",
",",
"args",
")",
"server",
".",
"resize",
"(",
"flavor",
",",
"**",
"kwargs",
")",
"if",
"args",
".",
"poll",
":",
"_poll_for_status",
"(",
"cs",
".",
"servers",
".",
"get",
",",
"server",
".",
"id",
",",
"'resizing'",
",",
"[",
"'active'",
",",
"'verify_resize'",
"]",
")"
] |
resize a server .
|
train
| false
|
52,728
|
def binary_ip(host):
return socket.inet_aton(socket.gethostbyname(host))
|
[
"def",
"binary_ip",
"(",
"host",
")",
":",
"return",
"socket",
".",
"inet_aton",
"(",
"socket",
".",
"gethostbyname",
"(",
"host",
")",
")"
] |
binary_ip -> str resolve host and return ip as four byte string .
|
train
| false
|
52,729
|
def tree_data(G, root, attrs=_attrs):
if (G.number_of_nodes() != (G.number_of_edges() + 1)):
raise TypeError('G is not a tree.')
if (not G.is_directed()):
raise TypeError('G is not directed.')
id_ = attrs['id']
children = attrs['children']
if (id_ == children):
raise nx.NetworkXError('Attribute names are not unique.')
def add_children(n, G):
nbrs = G[n]
if (len(nbrs) == 0):
return []
children_ = []
for child in nbrs:
d = dict(chain(G.node[child].items(), [(id_, child)]))
c = add_children(child, G)
if c:
d[children] = c
children_.append(d)
return children_
data = dict(chain(G.node[root].items(), [(id_, root)]))
data[children] = add_children(root, G)
return data
|
[
"def",
"tree_data",
"(",
"G",
",",
"root",
",",
"attrs",
"=",
"_attrs",
")",
":",
"if",
"(",
"G",
".",
"number_of_nodes",
"(",
")",
"!=",
"(",
"G",
".",
"number_of_edges",
"(",
")",
"+",
"1",
")",
")",
":",
"raise",
"TypeError",
"(",
"'G is not a tree.'",
")",
"if",
"(",
"not",
"G",
".",
"is_directed",
"(",
")",
")",
":",
"raise",
"TypeError",
"(",
"'G is not directed.'",
")",
"id_",
"=",
"attrs",
"[",
"'id'",
"]",
"children",
"=",
"attrs",
"[",
"'children'",
"]",
"if",
"(",
"id_",
"==",
"children",
")",
":",
"raise",
"nx",
".",
"NetworkXError",
"(",
"'Attribute names are not unique.'",
")",
"def",
"add_children",
"(",
"n",
",",
"G",
")",
":",
"nbrs",
"=",
"G",
"[",
"n",
"]",
"if",
"(",
"len",
"(",
"nbrs",
")",
"==",
"0",
")",
":",
"return",
"[",
"]",
"children_",
"=",
"[",
"]",
"for",
"child",
"in",
"nbrs",
":",
"d",
"=",
"dict",
"(",
"chain",
"(",
"G",
".",
"node",
"[",
"child",
"]",
".",
"items",
"(",
")",
",",
"[",
"(",
"id_",
",",
"child",
")",
"]",
")",
")",
"c",
"=",
"add_children",
"(",
"child",
",",
"G",
")",
"if",
"c",
":",
"d",
"[",
"children",
"]",
"=",
"c",
"children_",
".",
"append",
"(",
"d",
")",
"return",
"children_",
"data",
"=",
"dict",
"(",
"chain",
"(",
"G",
".",
"node",
"[",
"root",
"]",
".",
"items",
"(",
")",
",",
"[",
"(",
"id_",
",",
"root",
")",
"]",
")",
")",
"data",
"[",
"children",
"]",
"=",
"add_children",
"(",
"root",
",",
"G",
")",
"return",
"data"
] |
return data in tree format that is suitable for json serialization and use in javascript documents .
|
train
| false
|
52,731
|
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
|
[
"def",
"pairwise_tukeyhsd",
"(",
"endog",
",",
"groups",
",",
"alpha",
"=",
"0.05",
")",
":",
"return",
"MultiComparison",
"(",
"endog",
",",
"groups",
")",
".",
"tukeyhsd",
"(",
"alpha",
"=",
"alpha",
")"
] |
calculate all pairwise comparisons with tukeyhsd confidence intervals this is just a wrapper around tukeyhsd method of multicomparison parameters endog : ndarray .
|
train
| false
|
52,732
|
def _parse_timestamp(entry):
if (not isinstance(entry, (str, str_type))):
raise ValueError(('parse_timestamp() input must be a str, got a %s' % type(entry)))
try:
time = [int(x) for x in _timestamp_re.match(entry).groups()]
except AttributeError:
raise ValueError(('Expected timestamp in format YYYY-MM-DD HH:MM:ss but got ' + entry))
return datetime.datetime(time[0], time[1], time[2], time[3], time[4], time[5])
|
[
"def",
"_parse_timestamp",
"(",
"entry",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"entry",
",",
"(",
"str",
",",
"str_type",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'parse_timestamp() input must be a str, got a %s'",
"%",
"type",
"(",
"entry",
")",
")",
")",
"try",
":",
"time",
"=",
"[",
"int",
"(",
"x",
")",
"for",
"x",
"in",
"_timestamp_re",
".",
"match",
"(",
"entry",
")",
".",
"groups",
"(",
")",
"]",
"except",
"AttributeError",
":",
"raise",
"ValueError",
"(",
"(",
"'Expected timestamp in format YYYY-MM-DD HH:MM:ss but got '",
"+",
"entry",
")",
")",
"return",
"datetime",
".",
"datetime",
"(",
"time",
"[",
"0",
"]",
",",
"time",
"[",
"1",
"]",
",",
"time",
"[",
"2",
"]",
",",
"time",
"[",
"3",
"]",
",",
"time",
"[",
"4",
"]",
",",
"time",
"[",
"5",
"]",
")"
] |
parses the date and time that in format like like .
|
train
| false
|
52,733
|
def check_role(username, role):
return (role in get_roles(username))
|
[
"def",
"check_role",
"(",
"username",
",",
"role",
")",
":",
"return",
"(",
"role",
"in",
"get_roles",
"(",
"username",
")",
")"
] |
check if user is assigned a specific role on switch .
|
train
| false
|
52,734
|
def skeletonize_3d(img):
if ((img.ndim < 2) or (img.ndim > 3)):
raise ValueError(('skeletonize_3d can only handle 2D or 3D images; got img.ndim = %s instead.' % img.ndim))
img = np.ascontiguousarray(img)
img = img_as_ubyte(img, force_copy=False)
img_o = img
if (img.ndim == 2):
img_o = img[np.newaxis, ...]
img_o = np.pad(img_o, pad_width=1, mode='constant')
maxval = img_o.max()
img_o[(img_o != 0)] = 1
img_o = np.asarray(_compute_thin_image(img_o))
img_o = crop(img_o, crop_width=1)
if (img.ndim == 2):
img_o = img_o[0]
img_o *= maxval
return img_o
|
[
"def",
"skeletonize_3d",
"(",
"img",
")",
":",
"if",
"(",
"(",
"img",
".",
"ndim",
"<",
"2",
")",
"or",
"(",
"img",
".",
"ndim",
">",
"3",
")",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'skeletonize_3d can only handle 2D or 3D images; got img.ndim = %s instead.'",
"%",
"img",
".",
"ndim",
")",
")",
"img",
"=",
"np",
".",
"ascontiguousarray",
"(",
"img",
")",
"img",
"=",
"img_as_ubyte",
"(",
"img",
",",
"force_copy",
"=",
"False",
")",
"img_o",
"=",
"img",
"if",
"(",
"img",
".",
"ndim",
"==",
"2",
")",
":",
"img_o",
"=",
"img",
"[",
"np",
".",
"newaxis",
",",
"...",
"]",
"img_o",
"=",
"np",
".",
"pad",
"(",
"img_o",
",",
"pad_width",
"=",
"1",
",",
"mode",
"=",
"'constant'",
")",
"maxval",
"=",
"img_o",
".",
"max",
"(",
")",
"img_o",
"[",
"(",
"img_o",
"!=",
"0",
")",
"]",
"=",
"1",
"img_o",
"=",
"np",
".",
"asarray",
"(",
"_compute_thin_image",
"(",
"img_o",
")",
")",
"img_o",
"=",
"crop",
"(",
"img_o",
",",
"crop_width",
"=",
"1",
")",
"if",
"(",
"img",
".",
"ndim",
"==",
"2",
")",
":",
"img_o",
"=",
"img_o",
"[",
"0",
"]",
"img_o",
"*=",
"maxval",
"return",
"img_o"
] |
compute the skeleton of a binary image .
|
train
| false
|
52,736
|
def clear_compatversion_cache_on_save(sender, instance, created, **kw):
try:
if (not (instance.addon.type == amo.ADDON_EXTENSION)):
return
except ObjectDoesNotExist:
return
if ((not kw.get('raw')) and (created or instance.deleted)):
instance.addon.invalidate_d2c_versions()
|
[
"def",
"clear_compatversion_cache_on_save",
"(",
"sender",
",",
"instance",
",",
"created",
",",
"**",
"kw",
")",
":",
"try",
":",
"if",
"(",
"not",
"(",
"instance",
".",
"addon",
".",
"type",
"==",
"amo",
".",
"ADDON_EXTENSION",
")",
")",
":",
"return",
"except",
"ObjectDoesNotExist",
":",
"return",
"if",
"(",
"(",
"not",
"kw",
".",
"get",
"(",
"'raw'",
")",
")",
"and",
"(",
"created",
"or",
"instance",
".",
"deleted",
")",
")",
":",
"instance",
".",
"addon",
".",
"invalidate_d2c_versions",
"(",
")"
] |
clears compatversion cache if new version created .
|
train
| false
|
52,738
|
def dataset_fixed_cov():
(n, dim) = (300, 2)
np.random.seed(0)
C = np.array([[0.0, (-0.23)], [0.83, 0.23]])
X = np.r_[(np.dot(np.random.randn(n, dim), C), (np.dot(np.random.randn(n, dim), C) + np.array([1, 1])))]
y = np.hstack((np.zeros(n), np.ones(n)))
return (X, y)
|
[
"def",
"dataset_fixed_cov",
"(",
")",
":",
"(",
"n",
",",
"dim",
")",
"=",
"(",
"300",
",",
"2",
")",
"np",
".",
"random",
".",
"seed",
"(",
"0",
")",
"C",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.0",
",",
"(",
"-",
"0.23",
")",
"]",
",",
"[",
"0.83",
",",
"0.23",
"]",
"]",
")",
"X",
"=",
"np",
".",
"r_",
"[",
"(",
"np",
".",
"dot",
"(",
"np",
".",
"random",
".",
"randn",
"(",
"n",
",",
"dim",
")",
",",
"C",
")",
",",
"(",
"np",
".",
"dot",
"(",
"np",
".",
"random",
".",
"randn",
"(",
"n",
",",
"dim",
")",
",",
"C",
")",
"+",
"np",
".",
"array",
"(",
"[",
"1",
",",
"1",
"]",
")",
")",
")",
"]",
"y",
"=",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"zeros",
"(",
"n",
")",
",",
"np",
".",
"ones",
"(",
"n",
")",
")",
")",
"return",
"(",
"X",
",",
"y",
")"
] |
generate 2 gaussians samples with the same covariance matrix .
|
train
| false
|
52,740
|
def call_insert(tup):
try:
(cache, key, files, overwrite) = tup
return cache.insert(key, files, overwrite)
except NonfatalArtifactCacheError as e:
logger.warn(u'Error while inserting into artifact cache: {0}'.format(e))
return False
|
[
"def",
"call_insert",
"(",
"tup",
")",
":",
"try",
":",
"(",
"cache",
",",
"key",
",",
"files",
",",
"overwrite",
")",
"=",
"tup",
"return",
"cache",
".",
"insert",
"(",
"key",
",",
"files",
",",
"overwrite",
")",
"except",
"NonfatalArtifactCacheError",
"as",
"e",
":",
"logger",
".",
"warn",
"(",
"u'Error while inserting into artifact cache: {0}'",
".",
"format",
"(",
"e",
")",
")",
"return",
"False"
] |
importable helper for multi-proc calling of artifactcache .
|
train
| true
|
52,741
|
def pre_call_hook(service, call, request, response, rpc=None):
if recorder_proxy.has_recorder_for_current_request():
if config.DEBUG:
logging.debug('pre_call_hook: recording %s.%s', service, call)
recorder_proxy.record_rpc_request(service, call, request, response, rpc)
|
[
"def",
"pre_call_hook",
"(",
"service",
",",
"call",
",",
"request",
",",
"response",
",",
"rpc",
"=",
"None",
")",
":",
"if",
"recorder_proxy",
".",
"has_recorder_for_current_request",
"(",
")",
":",
"if",
"config",
".",
"DEBUG",
":",
"logging",
".",
"debug",
"(",
"'pre_call_hook: recording %s.%s'",
",",
"service",
",",
"call",
")",
"recorder_proxy",
".",
"record_rpc_request",
"(",
"service",
",",
"call",
",",
"request",
",",
"response",
",",
"rpc",
")"
] |
pre-call hook function for apiprixy_stub_map .
|
train
| false
|
52,742
|
def write_to_json_file(data, timestamp):
with open(STATUS_FILE.format(postfix=timestamp), 'w') as status_file:
json.dump(data, status_file)
|
[
"def",
"write_to_json_file",
"(",
"data",
",",
"timestamp",
")",
":",
"with",
"open",
"(",
"STATUS_FILE",
".",
"format",
"(",
"postfix",
"=",
"timestamp",
")",
",",
"'w'",
")",
"as",
"status_file",
":",
"json",
".",
"dump",
"(",
"data",
",",
"status_file",
")"
] |
writes the dictionary containing the status of operations performed during the upgrade process into a json file .
|
train
| false
|
52,743
|
def _reverseLogLevelMapping():
mapping = {}
for (logLevel, pyLogLevel) in toStdlibLogLevelMapping.items():
mapping[pyLogLevel] = logLevel
mapping[stdlibLogging.getLevelName(pyLogLevel)] = logLevel
return mapping
|
[
"def",
"_reverseLogLevelMapping",
"(",
")",
":",
"mapping",
"=",
"{",
"}",
"for",
"(",
"logLevel",
",",
"pyLogLevel",
")",
"in",
"toStdlibLogLevelMapping",
".",
"items",
"(",
")",
":",
"mapping",
"[",
"pyLogLevel",
"]",
"=",
"logLevel",
"mapping",
"[",
"stdlibLogging",
".",
"getLevelName",
"(",
"pyLogLevel",
")",
"]",
"=",
"logLevel",
"return",
"mapping"
] |
reverse the above mapping .
|
train
| false
|
52,744
|
def authenticationAndCipheringRequest(AuthenticationParameterRAND_presence=0, CiphKeySeqNr_presence=0):
a = TpPd(pd=3)
b = MessageType(mesType=18)
d = CipheringAlgorithmAndImeisvRequest()
e = ForceToStandbyAndAcReferenceNumber()
packet = (((a / b) / d) / e)
if (AuthenticationParameterRAND_presence is 1):
g = AuthenticationParameterRAND(ieiAPR=33)
packet = (packet / g)
if (CiphKeySeqNr_presence is 1):
h = CiphKeySeqNrHdr(ieiCKSN=8, eightBitCKSN=0)
packet = (packet / h)
return packet
|
[
"def",
"authenticationAndCipheringRequest",
"(",
"AuthenticationParameterRAND_presence",
"=",
"0",
",",
"CiphKeySeqNr_presence",
"=",
"0",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"3",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"18",
")",
"d",
"=",
"CipheringAlgorithmAndImeisvRequest",
"(",
")",
"e",
"=",
"ForceToStandbyAndAcReferenceNumber",
"(",
")",
"packet",
"=",
"(",
"(",
"(",
"a",
"/",
"b",
")",
"/",
"d",
")",
"/",
"e",
")",
"if",
"(",
"AuthenticationParameterRAND_presence",
"is",
"1",
")",
":",
"g",
"=",
"AuthenticationParameterRAND",
"(",
"ieiAPR",
"=",
"33",
")",
"packet",
"=",
"(",
"packet",
"/",
"g",
")",
"if",
"(",
"CiphKeySeqNr_presence",
"is",
"1",
")",
":",
"h",
"=",
"CiphKeySeqNrHdr",
"(",
"ieiCKSN",
"=",
"8",
",",
"eightBitCKSN",
"=",
"0",
")",
"packet",
"=",
"(",
"packet",
"/",
"h",
")",
"return",
"packet"
] |
authentication and ciphering request section 9 .
|
train
| true
|
52,745
|
@expect_json
@login_required
@require_http_methods(('GET', 'POST', 'DELETE'))
def videos_handler(request, course_key_string, edx_video_id=None):
course = _get_and_validate_course(course_key_string, request.user)
if (not course):
return HttpResponseNotFound()
if (request.method == 'GET'):
if ('application/json' in request.META.get('HTTP_ACCEPT', '')):
return videos_index_json(course)
else:
return videos_index_html(course)
elif (request.method == 'DELETE'):
remove_video_for_course(course_key_string, edx_video_id)
return JsonResponse()
else:
if is_status_update_request(request.json):
return send_video_status_update(request.json)
return videos_post(course, request)
|
[
"@",
"expect_json",
"@",
"login_required",
"@",
"require_http_methods",
"(",
"(",
"'GET'",
",",
"'POST'",
",",
"'DELETE'",
")",
")",
"def",
"videos_handler",
"(",
"request",
",",
"course_key_string",
",",
"edx_video_id",
"=",
"None",
")",
":",
"course",
"=",
"_get_and_validate_course",
"(",
"course_key_string",
",",
"request",
".",
"user",
")",
"if",
"(",
"not",
"course",
")",
":",
"return",
"HttpResponseNotFound",
"(",
")",
"if",
"(",
"request",
".",
"method",
"==",
"'GET'",
")",
":",
"if",
"(",
"'application/json'",
"in",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_ACCEPT'",
",",
"''",
")",
")",
":",
"return",
"videos_index_json",
"(",
"course",
")",
"else",
":",
"return",
"videos_index_html",
"(",
"course",
")",
"elif",
"(",
"request",
".",
"method",
"==",
"'DELETE'",
")",
":",
"remove_video_for_course",
"(",
"course_key_string",
",",
"edx_video_id",
")",
"return",
"JsonResponse",
"(",
")",
"else",
":",
"if",
"is_status_update_request",
"(",
"request",
".",
"json",
")",
":",
"return",
"send_video_status_update",
"(",
"request",
".",
"json",
")",
"return",
"videos_post",
"(",
"course",
",",
"request",
")"
] |
the restful handler for video uploads .
|
train
| false
|
52,746
|
def bias(x, y, axis=1):
x_shape = x.shape
y_shape = y.shape
if chainer.is_debug():
assert (x_shape[axis:(axis + len(y_shape))] == y_shape)
y1_shape = tuple(((([1] * axis) + list(y_shape)) + ([1] * ((len(x_shape) - axis) - len(y_shape)))))
y1 = reshape.reshape(y, y1_shape)
y2 = broadcast.broadcast_to(y1, x_shape)
return (x + y2)
|
[
"def",
"bias",
"(",
"x",
",",
"y",
",",
"axis",
"=",
"1",
")",
":",
"x_shape",
"=",
"x",
".",
"shape",
"y_shape",
"=",
"y",
".",
"shape",
"if",
"chainer",
".",
"is_debug",
"(",
")",
":",
"assert",
"(",
"x_shape",
"[",
"axis",
":",
"(",
"axis",
"+",
"len",
"(",
"y_shape",
")",
")",
"]",
"==",
"y_shape",
")",
"y1_shape",
"=",
"tuple",
"(",
"(",
"(",
"(",
"[",
"1",
"]",
"*",
"axis",
")",
"+",
"list",
"(",
"y_shape",
")",
")",
"+",
"(",
"[",
"1",
"]",
"*",
"(",
"(",
"len",
"(",
"x_shape",
")",
"-",
"axis",
")",
"-",
"len",
"(",
"y_shape",
")",
")",
")",
")",
")",
"y1",
"=",
"reshape",
".",
"reshape",
"(",
"y",
",",
"y1_shape",
")",
"y2",
"=",
"broadcast",
".",
"broadcast_to",
"(",
"y1",
",",
"x_shape",
")",
"return",
"(",
"x",
"+",
"y2",
")"
] |
elementwise summation with broadcasting .
|
train
| false
|
52,747
|
def is_directed_acyclic_graph(G):
if (not G.is_directed()):
return False
try:
consume(topological_sort(G))
return True
except nx.NetworkXUnfeasible:
return False
|
[
"def",
"is_directed_acyclic_graph",
"(",
"G",
")",
":",
"if",
"(",
"not",
"G",
".",
"is_directed",
"(",
")",
")",
":",
"return",
"False",
"try",
":",
"consume",
"(",
"topological_sort",
"(",
"G",
")",
")",
"return",
"True",
"except",
"nx",
".",
"NetworkXUnfeasible",
":",
"return",
"False"
] |
return true if the graph g is a directed acyclic graph or false if not .
|
train
| false
|
52,748
|
def projective_prob_parse_demo():
from nltk.parse.dependencygraph import conll_data2
graphs = [DependencyGraph(entry) for entry in conll_data2.split(u'\n\n') if entry]
ppdp = ProbabilisticProjectiveDependencyParser()
print(u'Training Probabilistic Projective Dependency Parser...')
ppdp.train(graphs)
sent = [u'Cathy', u'zag', u'hen', u'wild', u'zwaaien', u'.']
print(u"Parsing '", u' '.join(sent), u"'...")
print(u'Parse:')
for tree in ppdp.parse(sent):
print(tree)
|
[
"def",
"projective_prob_parse_demo",
"(",
")",
":",
"from",
"nltk",
".",
"parse",
".",
"dependencygraph",
"import",
"conll_data2",
"graphs",
"=",
"[",
"DependencyGraph",
"(",
"entry",
")",
"for",
"entry",
"in",
"conll_data2",
".",
"split",
"(",
"u'\\n\\n'",
")",
"if",
"entry",
"]",
"ppdp",
"=",
"ProbabilisticProjectiveDependencyParser",
"(",
")",
"print",
"(",
"u'Training Probabilistic Projective Dependency Parser...'",
")",
"ppdp",
".",
"train",
"(",
"graphs",
")",
"sent",
"=",
"[",
"u'Cathy'",
",",
"u'zag'",
",",
"u'hen'",
",",
"u'wild'",
",",
"u'zwaaien'",
",",
"u'.'",
"]",
"print",
"(",
"u\"Parsing '\"",
",",
"u' '",
".",
"join",
"(",
"sent",
")",
",",
"u\"'...\"",
")",
"print",
"(",
"u'Parse:'",
")",
"for",
"tree",
"in",
"ppdp",
".",
"parse",
"(",
"sent",
")",
":",
"print",
"(",
"tree",
")"
] |
a demo showing the training and use of a projective dependency parser .
|
train
| false
|
52,749
|
def ep_match(season, episode, expr, title=None):
m = _RE_SP.search(expr)
if m:
req_season = int(m.group(1))
req_episode = int(m.group(2))
if ((season > req_season) or ((season == req_season) and (episode >= req_episode))):
if title:
show = expr[:m.start()].replace('.', ' ').replace('_', ' ').strip()
show = show.replace(' ', '[._ ]+')
return bool(re.search(show, title, re.I))
else:
return True
else:
return False
else:
return True
|
[
"def",
"ep_match",
"(",
"season",
",",
"episode",
",",
"expr",
",",
"title",
"=",
"None",
")",
":",
"m",
"=",
"_RE_SP",
".",
"search",
"(",
"expr",
")",
"if",
"m",
":",
"req_season",
"=",
"int",
"(",
"m",
".",
"group",
"(",
"1",
")",
")",
"req_episode",
"=",
"int",
"(",
"m",
".",
"group",
"(",
"2",
")",
")",
"if",
"(",
"(",
"season",
">",
"req_season",
")",
"or",
"(",
"(",
"season",
"==",
"req_season",
")",
"and",
"(",
"episode",
">=",
"req_episode",
")",
")",
")",
":",
"if",
"title",
":",
"show",
"=",
"expr",
"[",
":",
"m",
".",
"start",
"(",
")",
"]",
".",
"replace",
"(",
"'.'",
",",
"' '",
")",
".",
"replace",
"(",
"'_'",
",",
"' '",
")",
".",
"strip",
"(",
")",
"show",
"=",
"show",
".",
"replace",
"(",
"' '",
",",
"'[._ ]+'",
")",
"return",
"bool",
"(",
"re",
".",
"search",
"(",
"show",
",",
"title",
",",
"re",
".",
"I",
")",
")",
"else",
":",
"return",
"True",
"else",
":",
"return",
"False",
"else",
":",
"return",
"True"
] |
return true if season .
|
train
| false
|
52,752
|
def render_view(context, request, name='', secure=True):
iterable = render_view_to_iterable(context, request, name, secure)
if (iterable is None):
return None
return ''.join(iterable)
|
[
"def",
"render_view",
"(",
"context",
",",
"request",
",",
"name",
"=",
"''",
",",
"secure",
"=",
"True",
")",
":",
"iterable",
"=",
"render_view_to_iterable",
"(",
"context",
",",
"request",
",",
"name",
",",
"secure",
")",
"if",
"(",
"iterable",
"is",
"None",
")",
":",
"return",
"None",
"return",
"''",
".",
"join",
"(",
"iterable",
")"
] |
call the :term:view callable configured with a :term:view configuration that matches the :term:view name name registered against the specified context and request and unwind the view responses app_iter into a single bytestring .
|
train
| false
|
52,753
|
def LocalGroup(uname=None):
level = 3
if (uname is None):
uname = win32api.GetUserName()
if (uname.find('\\') < 0):
uname = ((win32api.GetDomainName() + '\\') + uname)
group = 'python_test_group'
try:
win32net.NetLocalGroupDel(server, group)
print "WARNING: existing local group '%s' has been deleted."
except win32net.error:
pass
group_data = {'name': group}
win32net.NetLocalGroupAdd(server, 1, group_data)
try:
u = {'domainandname': uname}
win32net.NetLocalGroupAddMembers(server, group, level, [u])
(mem, tot, res) = win32net.NetLocalGroupGetMembers(server, group, level)
print 'members are', mem
if (mem[0]['domainandname'] != uname):
print ('ERROR: LocalGroup just added %s, but members are %r' % (uname, mem))
win32net.NetLocalGroupDelMembers(server, group, [m['domainandname'] for m in mem])
finally:
win32net.NetLocalGroupDel(server, group)
print 'Created a local group, added and removed members, then deleted the group'
|
[
"def",
"LocalGroup",
"(",
"uname",
"=",
"None",
")",
":",
"level",
"=",
"3",
"if",
"(",
"uname",
"is",
"None",
")",
":",
"uname",
"=",
"win32api",
".",
"GetUserName",
"(",
")",
"if",
"(",
"uname",
".",
"find",
"(",
"'\\\\'",
")",
"<",
"0",
")",
":",
"uname",
"=",
"(",
"(",
"win32api",
".",
"GetDomainName",
"(",
")",
"+",
"'\\\\'",
")",
"+",
"uname",
")",
"group",
"=",
"'python_test_group'",
"try",
":",
"win32net",
".",
"NetLocalGroupDel",
"(",
"server",
",",
"group",
")",
"print",
"\"WARNING: existing local group '%s' has been deleted.\"",
"except",
"win32net",
".",
"error",
":",
"pass",
"group_data",
"=",
"{",
"'name'",
":",
"group",
"}",
"win32net",
".",
"NetLocalGroupAdd",
"(",
"server",
",",
"1",
",",
"group_data",
")",
"try",
":",
"u",
"=",
"{",
"'domainandname'",
":",
"uname",
"}",
"win32net",
".",
"NetLocalGroupAddMembers",
"(",
"server",
",",
"group",
",",
"level",
",",
"[",
"u",
"]",
")",
"(",
"mem",
",",
"tot",
",",
"res",
")",
"=",
"win32net",
".",
"NetLocalGroupGetMembers",
"(",
"server",
",",
"group",
",",
"level",
")",
"print",
"'members are'",
",",
"mem",
"if",
"(",
"mem",
"[",
"0",
"]",
"[",
"'domainandname'",
"]",
"!=",
"uname",
")",
":",
"print",
"(",
"'ERROR: LocalGroup just added %s, but members are %r'",
"%",
"(",
"uname",
",",
"mem",
")",
")",
"win32net",
".",
"NetLocalGroupDelMembers",
"(",
"server",
",",
"group",
",",
"[",
"m",
"[",
"'domainandname'",
"]",
"for",
"m",
"in",
"mem",
"]",
")",
"finally",
":",
"win32net",
".",
"NetLocalGroupDel",
"(",
"server",
",",
"group",
")",
"print",
"'Created a local group, added and removed members, then deleted the group'"
] |
creates a local group .
|
train
| false
|
52,755
|
def get_resources_on_service_clients(logical_line, physical_line, filename, line_number, lines):
if (not _common_service_clients_check(logical_line, physical_line, filename, 'ignored_list_T110.txt')):
return
for line in lines[line_number:]:
if (METHOD.match(line) or CLASS.match(line)):
return
if (('self.get(' not in line) and (('self.show_resource(' not in line) and ('self.list_resources(' not in line))):
continue
if METHOD_GET_RESOURCE.match(logical_line):
return
msg = 'T110: [GET /resources] methods should be list_<resource name>s or show_<resource name>'
(yield (0, msg))
|
[
"def",
"get_resources_on_service_clients",
"(",
"logical_line",
",",
"physical_line",
",",
"filename",
",",
"line_number",
",",
"lines",
")",
":",
"if",
"(",
"not",
"_common_service_clients_check",
"(",
"logical_line",
",",
"physical_line",
",",
"filename",
",",
"'ignored_list_T110.txt'",
")",
")",
":",
"return",
"for",
"line",
"in",
"lines",
"[",
"line_number",
":",
"]",
":",
"if",
"(",
"METHOD",
".",
"match",
"(",
"line",
")",
"or",
"CLASS",
".",
"match",
"(",
"line",
")",
")",
":",
"return",
"if",
"(",
"(",
"'self.get('",
"not",
"in",
"line",
")",
"and",
"(",
"(",
"'self.show_resource('",
"not",
"in",
"line",
")",
"and",
"(",
"'self.list_resources('",
"not",
"in",
"line",
")",
")",
")",
":",
"continue",
"if",
"METHOD_GET_RESOURCE",
".",
"match",
"(",
"logical_line",
")",
":",
"return",
"msg",
"=",
"'T110: [GET /resources] methods should be list_<resource name>s or show_<resource name>'",
"(",
"yield",
"(",
"0",
",",
"msg",
")",
")"
] |
check that service client names of get should be consistent t110 .
|
train
| false
|
52,756
|
def hover(browser, element):
ActionChains(browser).move_to_element(element).perform()
|
[
"def",
"hover",
"(",
"browser",
",",
"element",
")",
":",
"ActionChains",
"(",
"browser",
")",
".",
"move_to_element",
"(",
"element",
")",
".",
"perform",
"(",
")"
] |
hover over an element .
|
train
| false
|
52,757
|
def _convert_array(array, dtype):
if (array.dtype == dtype):
return array
elif ((array.dtype.itemsize == dtype.itemsize) and (not (np.issubdtype(array.dtype, np.number) and np.issubdtype(dtype, np.number)))):
return array.view(dtype)
else:
return array.astype(dtype)
|
[
"def",
"_convert_array",
"(",
"array",
",",
"dtype",
")",
":",
"if",
"(",
"array",
".",
"dtype",
"==",
"dtype",
")",
":",
"return",
"array",
"elif",
"(",
"(",
"array",
".",
"dtype",
".",
"itemsize",
"==",
"dtype",
".",
"itemsize",
")",
"and",
"(",
"not",
"(",
"np",
".",
"issubdtype",
"(",
"array",
".",
"dtype",
",",
"np",
".",
"number",
")",
"and",
"np",
".",
"issubdtype",
"(",
"dtype",
",",
"np",
".",
"number",
")",
")",
")",
")",
":",
"return",
"array",
".",
"view",
"(",
"dtype",
")",
"else",
":",
"return",
"array",
".",
"astype",
"(",
"dtype",
")"
] |
converts an array to a new dtype--if the itemsize of the new dtype is the same as the old dtype and both types are not numeric .
|
train
| false
|
52,758
|
def max_pooling_nd(x, ksize, stride=None, pad=0, cover_all=True, use_cudnn=True):
ndim = len(x.shape[2:])
return MaxPoolingND(ndim, ksize, stride, pad, cover_all, use_cudnn)(x)
|
[
"def",
"max_pooling_nd",
"(",
"x",
",",
"ksize",
",",
"stride",
"=",
"None",
",",
"pad",
"=",
"0",
",",
"cover_all",
"=",
"True",
",",
"use_cudnn",
"=",
"True",
")",
":",
"ndim",
"=",
"len",
"(",
"x",
".",
"shape",
"[",
"2",
":",
"]",
")",
"return",
"MaxPoolingND",
"(",
"ndim",
",",
"ksize",
",",
"stride",
",",
"pad",
",",
"cover_all",
",",
"use_cudnn",
")",
"(",
"x",
")"
] |
n-dimensionally spatial max pooling function .
|
train
| false
|
52,759
|
def link_reverse_func(reverse_name):
return (lambda course, reverse_url_func: reverse_url_func(reverse_name, args=[course.id.to_deprecated_string()]))
|
[
"def",
"link_reverse_func",
"(",
"reverse_name",
")",
":",
"return",
"(",
"lambda",
"course",
",",
"reverse_url_func",
":",
"reverse_url_func",
"(",
"reverse_name",
",",
"args",
"=",
"[",
"course",
".",
"id",
".",
"to_deprecated_string",
"(",
")",
"]",
")",
")"
] |
returns a function that takes in a course and reverse_url_func .
|
train
| false
|
52,760
|
def jordan_cell(eigenval, n):
n = as_int(n)
out = zeros(n)
for i in range((n - 1)):
out[(i, i)] = eigenval
out[(i, (i + 1))] = S.One
out[((n - 1), (n - 1))] = eigenval
return out
|
[
"def",
"jordan_cell",
"(",
"eigenval",
",",
"n",
")",
":",
"n",
"=",
"as_int",
"(",
"n",
")",
"out",
"=",
"zeros",
"(",
"n",
")",
"for",
"i",
"in",
"range",
"(",
"(",
"n",
"-",
"1",
")",
")",
":",
"out",
"[",
"(",
"i",
",",
"i",
")",
"]",
"=",
"eigenval",
"out",
"[",
"(",
"i",
",",
"(",
"i",
"+",
"1",
")",
")",
"]",
"=",
"S",
".",
"One",
"out",
"[",
"(",
"(",
"n",
"-",
"1",
")",
",",
"(",
"n",
"-",
"1",
")",
")",
"]",
"=",
"eigenval",
"return",
"out"
] |
create matrix of jordan cell kind: examples .
|
train
| false
|
52,761
|
def EntityFullName(entity):
names = []
for element in entity.path().element_list():
if element.has_id():
name = ('%s:%s' % (element.type(), str(element.id())))
elif element.has_name():
name = ('%s:%s' % (element.type(), str(element.name())))
else:
name = ('%s:None' % element.type())
names.append(name)
fullname = '.'.join(names)
return fullname
|
[
"def",
"EntityFullName",
"(",
"entity",
")",
":",
"names",
"=",
"[",
"]",
"for",
"element",
"in",
"entity",
".",
"path",
"(",
")",
".",
"element_list",
"(",
")",
":",
"if",
"element",
".",
"has_id",
"(",
")",
":",
"name",
"=",
"(",
"'%s:%s'",
"%",
"(",
"element",
".",
"type",
"(",
")",
",",
"str",
"(",
"element",
".",
"id",
"(",
")",
")",
")",
")",
"elif",
"element",
".",
"has_name",
"(",
")",
":",
"name",
"=",
"(",
"'%s:%s'",
"%",
"(",
"element",
".",
"type",
"(",
")",
",",
"str",
"(",
"element",
".",
"name",
"(",
")",
")",
")",
")",
"else",
":",
"name",
"=",
"(",
"'%s:None'",
"%",
"element",
".",
"type",
"(",
")",
")",
"names",
".",
"append",
"(",
"name",
")",
"fullname",
"=",
"'.'",
".",
"join",
"(",
"names",
")",
"return",
"fullname"
] |
given entity primary key as a reference proto .
|
train
| false
|
52,763
|
def get_object_properties(vim, collector, mobj, type, properties):
client_factory = vim.client.factory
if (mobj is None):
return None
usecoll = collector
if (usecoll is None):
usecoll = vim.get_service_content().propertyCollector
property_filter_spec = client_factory.create('ns0:PropertyFilterSpec')
property_spec = client_factory.create('ns0:PropertySpec')
property_spec.all = ((properties is None) or (len(properties) == 0))
property_spec.pathSet = properties
property_spec.type = type
object_spec = client_factory.create('ns0:ObjectSpec')
object_spec.obj = mobj
object_spec.skip = False
property_filter_spec.propSet = [property_spec]
property_filter_spec.objectSet = [object_spec]
return vim.RetrieveProperties(usecoll, specSet=[property_filter_spec])
|
[
"def",
"get_object_properties",
"(",
"vim",
",",
"collector",
",",
"mobj",
",",
"type",
",",
"properties",
")",
":",
"client_factory",
"=",
"vim",
".",
"client",
".",
"factory",
"if",
"(",
"mobj",
"is",
"None",
")",
":",
"return",
"None",
"usecoll",
"=",
"collector",
"if",
"(",
"usecoll",
"is",
"None",
")",
":",
"usecoll",
"=",
"vim",
".",
"get_service_content",
"(",
")",
".",
"propertyCollector",
"property_filter_spec",
"=",
"client_factory",
".",
"create",
"(",
"'ns0:PropertyFilterSpec'",
")",
"property_spec",
"=",
"client_factory",
".",
"create",
"(",
"'ns0:PropertySpec'",
")",
"property_spec",
".",
"all",
"=",
"(",
"(",
"properties",
"is",
"None",
")",
"or",
"(",
"len",
"(",
"properties",
")",
"==",
"0",
")",
")",
"property_spec",
".",
"pathSet",
"=",
"properties",
"property_spec",
".",
"type",
"=",
"type",
"object_spec",
"=",
"client_factory",
".",
"create",
"(",
"'ns0:ObjectSpec'",
")",
"object_spec",
".",
"obj",
"=",
"mobj",
"object_spec",
".",
"skip",
"=",
"False",
"property_filter_spec",
".",
"propSet",
"=",
"[",
"property_spec",
"]",
"property_filter_spec",
".",
"objectSet",
"=",
"[",
"object_spec",
"]",
"return",
"vim",
".",
"RetrieveProperties",
"(",
"usecoll",
",",
"specSet",
"=",
"[",
"property_filter_spec",
"]",
")"
] |
gets the properties of the managed object specified .
|
train
| false
|
52,764
|
def solidity_unresolved_symbols(hex_code):
return set(re.findall('_.{39}', hex_code))
|
[
"def",
"solidity_unresolved_symbols",
"(",
"hex_code",
")",
":",
"return",
"set",
"(",
"re",
".",
"findall",
"(",
"'_.{39}'",
",",
"hex_code",
")",
")"
] |
return the unresolved symbols contained in the hex_code .
|
train
| false
|
52,765
|
def _add_cli_param(params, key, value):
if (value is not None):
params.append('--{0}={1}'.format(key, value))
|
[
"def",
"_add_cli_param",
"(",
"params",
",",
"key",
",",
"value",
")",
":",
"if",
"(",
"value",
"is",
"not",
"None",
")",
":",
"params",
".",
"append",
"(",
"'--{0}={1}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")"
] |
adds key and value as a command line parameter to params .
|
train
| true
|
52,766
|
def has_archive_provider(node, user):
return node.has_addon(settings.ARCHIVE_PROVIDER)
|
[
"def",
"has_archive_provider",
"(",
"node",
",",
"user",
")",
":",
"return",
"node",
".",
"has_addon",
"(",
"settings",
".",
"ARCHIVE_PROVIDER",
")"
] |
a generic function for checking whether or not some node .
|
train
| false
|
52,767
|
def _paragraphs(text):
paragraphs = [[]]
for line in text.splitlines():
if line.strip():
paragraphs[(-1)].append(line)
elif paragraphs[(-1)]:
paragraphs.append([])
return [u' '.join(lines).strip() for lines in paragraphs]
|
[
"def",
"_paragraphs",
"(",
"text",
")",
":",
"paragraphs",
"=",
"[",
"[",
"]",
"]",
"for",
"line",
"in",
"text",
".",
"splitlines",
"(",
")",
":",
"if",
"line",
".",
"strip",
"(",
")",
":",
"paragraphs",
"[",
"(",
"-",
"1",
")",
"]",
".",
"append",
"(",
"line",
")",
"elif",
"paragraphs",
"[",
"(",
"-",
"1",
")",
"]",
":",
"paragraphs",
".",
"append",
"(",
"[",
"]",
")",
"return",
"[",
"u' '",
".",
"join",
"(",
"lines",
")",
".",
"strip",
"(",
")",
"for",
"lines",
"in",
"paragraphs",
"]"
] |
splits a piece of text into paragraphs .
|
train
| false
|
52,768
|
def _get_capabilities():
request = file_service_pb.GetCapabilitiesRequest()
response = file_service_pb.GetCapabilitiesResponse()
_make_call('GetCapabilities', request, response)
return response
|
[
"def",
"_get_capabilities",
"(",
")",
":",
"request",
"=",
"file_service_pb",
".",
"GetCapabilitiesRequest",
"(",
")",
"response",
"=",
"file_service_pb",
".",
"GetCapabilitiesResponse",
"(",
")",
"_make_call",
"(",
"'GetCapabilities'",
",",
"request",
",",
"response",
")",
"return",
"response"
] |
get files api capabilities .
|
train
| false
|
52,769
|
def lvdisplay(lvname=''):
ret = {}
cmd = ['lvdisplay', '-c']
if lvname:
cmd.append(lvname)
cmd_ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if (cmd_ret['retcode'] != 0):
return {}
out = cmd_ret['stdout'].splitlines()
for line in out:
comps = line.strip().split(':')
ret[comps[0]] = {'Logical Volume Name': comps[0], 'Volume Group Name': comps[1], 'Logical Volume Access': comps[2], 'Logical Volume Status': comps[3], 'Internal Logical Volume Number': comps[4], 'Open Logical Volumes': comps[5], 'Logical Volume Size': comps[6], 'Current Logical Extents Associated': comps[7], 'Allocated Logical Extents': comps[8], 'Allocation Policy': comps[9], 'Read Ahead Sectors': comps[10], 'Major Device Number': comps[11], 'Minor Device Number': comps[12]}
return ret
|
[
"def",
"lvdisplay",
"(",
"lvname",
"=",
"''",
")",
":",
"ret",
"=",
"{",
"}",
"cmd",
"=",
"[",
"'lvdisplay'",
",",
"'-c'",
"]",
"if",
"lvname",
":",
"cmd",
".",
"append",
"(",
"lvname",
")",
"cmd_ret",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"if",
"(",
"cmd_ret",
"[",
"'retcode'",
"]",
"!=",
"0",
")",
":",
"return",
"{",
"}",
"out",
"=",
"cmd_ret",
"[",
"'stdout'",
"]",
".",
"splitlines",
"(",
")",
"for",
"line",
"in",
"out",
":",
"comps",
"=",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
"':'",
")",
"ret",
"[",
"comps",
"[",
"0",
"]",
"]",
"=",
"{",
"'Logical Volume Name'",
":",
"comps",
"[",
"0",
"]",
",",
"'Volume Group Name'",
":",
"comps",
"[",
"1",
"]",
",",
"'Logical Volume Access'",
":",
"comps",
"[",
"2",
"]",
",",
"'Logical Volume Status'",
":",
"comps",
"[",
"3",
"]",
",",
"'Internal Logical Volume Number'",
":",
"comps",
"[",
"4",
"]",
",",
"'Open Logical Volumes'",
":",
"comps",
"[",
"5",
"]",
",",
"'Logical Volume Size'",
":",
"comps",
"[",
"6",
"]",
",",
"'Current Logical Extents Associated'",
":",
"comps",
"[",
"7",
"]",
",",
"'Allocated Logical Extents'",
":",
"comps",
"[",
"8",
"]",
",",
"'Allocation Policy'",
":",
"comps",
"[",
"9",
"]",
",",
"'Read Ahead Sectors'",
":",
"comps",
"[",
"10",
"]",
",",
"'Major Device Number'",
":",
"comps",
"[",
"11",
"]",
",",
"'Minor Device Number'",
":",
"comps",
"[",
"12",
"]",
"}",
"return",
"ret"
] |
return information about the logical volume(s) cli examples: .
|
train
| true
|
52,772
|
def sign_certificate():
LOGGER.info('Signing certificate...')
proc = subprocess.Popen(['openssl req -in /tmp/domain.csr -outform DER'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(csr_der, err) = proc.communicate()
(code, result) = _send_signed_request((DEFAULT_CA + '/acme/new-cert'), {'resource': 'new-cert', 'csr': _b64(csr_der)})
if (code != 201):
raise ValueError('Error signing certificate: {0} {1}'.format(code, result))
LOGGER.info('Certificate signed!')
return result
|
[
"def",
"sign_certificate",
"(",
")",
":",
"LOGGER",
".",
"info",
"(",
"'Signing certificate...'",
")",
"proc",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"'openssl req -in /tmp/domain.csr -outform DER'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"shell",
"=",
"True",
")",
"(",
"csr_der",
",",
"err",
")",
"=",
"proc",
".",
"communicate",
"(",
")",
"(",
"code",
",",
"result",
")",
"=",
"_send_signed_request",
"(",
"(",
"DEFAULT_CA",
"+",
"'/acme/new-cert'",
")",
",",
"{",
"'resource'",
":",
"'new-cert'",
",",
"'csr'",
":",
"_b64",
"(",
"csr_der",
")",
"}",
")",
"if",
"(",
"code",
"!=",
"201",
")",
":",
"raise",
"ValueError",
"(",
"'Error signing certificate: {0} {1}'",
".",
"format",
"(",
"code",
",",
"result",
")",
")",
"LOGGER",
".",
"info",
"(",
"'Certificate signed!'",
")",
"return",
"result"
] |
get the new certificate .
|
train
| false
|
52,773
|
def _argsdicts(args, mydict):
if (len(args) == 0):
args = (None,)
elif (len(args) == 1):
args = _totuple(args[0])
else:
raise Exception('We should have never gotten here.')
mykeys = list(mydict.keys())
myvalues = list(map(_totuple, list(mydict.values())))
maxlength = max(list(map(len, ([args] + myvalues))))
for i in range(maxlength):
thisdict = {}
for (key, value) in zip(mykeys, myvalues):
try:
thisdict[key] = value[i]
except IndexError:
thisdict[key] = value[(-1)]
try:
thisarg = args[i]
except IndexError:
thisarg = args[(-1)]
(yield (thisarg, thisdict))
|
[
"def",
"_argsdicts",
"(",
"args",
",",
"mydict",
")",
":",
"if",
"(",
"len",
"(",
"args",
")",
"==",
"0",
")",
":",
"args",
"=",
"(",
"None",
",",
")",
"elif",
"(",
"len",
"(",
"args",
")",
"==",
"1",
")",
":",
"args",
"=",
"_totuple",
"(",
"args",
"[",
"0",
"]",
")",
"else",
":",
"raise",
"Exception",
"(",
"'We should have never gotten here.'",
")",
"mykeys",
"=",
"list",
"(",
"mydict",
".",
"keys",
"(",
")",
")",
"myvalues",
"=",
"list",
"(",
"map",
"(",
"_totuple",
",",
"list",
"(",
"mydict",
".",
"values",
"(",
")",
")",
")",
")",
"maxlength",
"=",
"max",
"(",
"list",
"(",
"map",
"(",
"len",
",",
"(",
"[",
"args",
"]",
"+",
"myvalues",
")",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"maxlength",
")",
":",
"thisdict",
"=",
"{",
"}",
"for",
"(",
"key",
",",
"value",
")",
"in",
"zip",
"(",
"mykeys",
",",
"myvalues",
")",
":",
"try",
":",
"thisdict",
"[",
"key",
"]",
"=",
"value",
"[",
"i",
"]",
"except",
"IndexError",
":",
"thisdict",
"[",
"key",
"]",
"=",
"value",
"[",
"(",
"-",
"1",
")",
"]",
"try",
":",
"thisarg",
"=",
"args",
"[",
"i",
"]",
"except",
"IndexError",
":",
"thisarg",
"=",
"args",
"[",
"(",
"-",
"1",
")",
"]",
"(",
"yield",
"(",
"thisarg",
",",
"thisdict",
")",
")"
] |
a utility generator that pads argument list and dictionary values .
|
train
| true
|
52,774
|
def tables(output_lines):
tables_ = []
table_ = []
label = None
start = False
header = False
if (not isinstance(output_lines, list)):
output_lines = output_lines.split('\n')
for line in output_lines:
if delimiter_line.match(line):
if (not start):
start = True
elif (not header):
header = True
else:
start = header = None
table_.append(line)
parsed = table(table_)
parsed['label'] = label
tables_.append(parsed)
table_ = []
label = None
continue
if start:
table_.append(line)
elif (label is None):
label = line
else:
LOG.warning('Invalid line between tables: %s', line)
if (len(table_) > 0):
LOG.warning('Missing end of table')
return tables_
|
[
"def",
"tables",
"(",
"output_lines",
")",
":",
"tables_",
"=",
"[",
"]",
"table_",
"=",
"[",
"]",
"label",
"=",
"None",
"start",
"=",
"False",
"header",
"=",
"False",
"if",
"(",
"not",
"isinstance",
"(",
"output_lines",
",",
"list",
")",
")",
":",
"output_lines",
"=",
"output_lines",
".",
"split",
"(",
"'\\n'",
")",
"for",
"line",
"in",
"output_lines",
":",
"if",
"delimiter_line",
".",
"match",
"(",
"line",
")",
":",
"if",
"(",
"not",
"start",
")",
":",
"start",
"=",
"True",
"elif",
"(",
"not",
"header",
")",
":",
"header",
"=",
"True",
"else",
":",
"start",
"=",
"header",
"=",
"None",
"table_",
".",
"append",
"(",
"line",
")",
"parsed",
"=",
"table",
"(",
"table_",
")",
"parsed",
"[",
"'label'",
"]",
"=",
"label",
"tables_",
".",
"append",
"(",
"parsed",
")",
"table_",
"=",
"[",
"]",
"label",
"=",
"None",
"continue",
"if",
"start",
":",
"table_",
".",
"append",
"(",
"line",
")",
"elif",
"(",
"label",
"is",
"None",
")",
":",
"label",
"=",
"line",
"else",
":",
"LOG",
".",
"warning",
"(",
"'Invalid line between tables: %s'",
",",
"line",
")",
"if",
"(",
"len",
"(",
"table_",
")",
">",
"0",
")",
":",
"LOG",
".",
"warning",
"(",
"'Missing end of table'",
")",
"return",
"tables_"
] |
find all ascii-tables in output and parse them .
|
train
| false
|
52,775
|
def add_package(package, ignore_check=False, prevent_pending=False, image=None, restart=False):
cmd = ['DISM', '/Quiet', ('/Image:{0}'.format(image) if image else '/Online'), '/Add-Package', '/PackagePath:{0}'.format(package)]
if ignore_check:
cmd.append('/IgnoreCheck')
if prevent_pending:
cmd.append('/PreventPending')
if (not restart):
cmd.append('/NoRestart')
return __salt__['cmd.run_all'](cmd)
|
[
"def",
"add_package",
"(",
"package",
",",
"ignore_check",
"=",
"False",
",",
"prevent_pending",
"=",
"False",
",",
"image",
"=",
"None",
",",
"restart",
"=",
"False",
")",
":",
"cmd",
"=",
"[",
"'DISM'",
",",
"'/Quiet'",
",",
"(",
"'/Image:{0}'",
".",
"format",
"(",
"image",
")",
"if",
"image",
"else",
"'/Online'",
")",
",",
"'/Add-Package'",
",",
"'/PackagePath:{0}'",
".",
"format",
"(",
"package",
")",
"]",
"if",
"ignore_check",
":",
"cmd",
".",
"append",
"(",
"'/IgnoreCheck'",
")",
"if",
"prevent_pending",
":",
"cmd",
".",
"append",
"(",
"'/PreventPending'",
")",
"if",
"(",
"not",
"restart",
")",
":",
"cmd",
".",
"append",
"(",
"'/NoRestart'",
")",
"return",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")"
] |
install a package using dism args: package : the package to install .
|
train
| true
|
52,776
|
def random_dense_design_matrix(rng, num_examples, dim, num_classes):
X = rng.randn(num_examples, dim)
if num_classes:
Y = rng.randint(0, num_classes, (num_examples, 1))
y_labels = num_classes
else:
Y = None
y_labels = None
return DenseDesignMatrix(X=X, y=Y, y_labels=y_labels)
|
[
"def",
"random_dense_design_matrix",
"(",
"rng",
",",
"num_examples",
",",
"dim",
",",
"num_classes",
")",
":",
"X",
"=",
"rng",
".",
"randn",
"(",
"num_examples",
",",
"dim",
")",
"if",
"num_classes",
":",
"Y",
"=",
"rng",
".",
"randint",
"(",
"0",
",",
"num_classes",
",",
"(",
"num_examples",
",",
"1",
")",
")",
"y_labels",
"=",
"num_classes",
"else",
":",
"Y",
"=",
"None",
"y_labels",
"=",
"None",
"return",
"DenseDesignMatrix",
"(",
"X",
"=",
"X",
",",
"y",
"=",
"Y",
",",
"y_labels",
"=",
"y_labels",
")"
] |
creates a random dense design matrix that has class labels .
|
train
| false
|
52,778
|
def _seqmatrix2strmatrix(matrix):
return dict(((t, str(matrix[t])) for t in matrix))
|
[
"def",
"_seqmatrix2strmatrix",
"(",
"matrix",
")",
":",
"return",
"dict",
"(",
"(",
"(",
"t",
",",
"str",
"(",
"matrix",
"[",
"t",
"]",
")",
")",
"for",
"t",
"in",
"matrix",
")",
")"
] |
converts a seq-object matrix to a plain sequence-string matrix .
|
train
| false
|
52,779
|
def no_select():
PCap.use_select = False
|
[
"def",
"no_select",
"(",
")",
":",
"PCap",
".",
"use_select",
"=",
"False"
] |
sets default pcap behavior to not try to use select() .
|
train
| false
|
52,781
|
def getVertexes(geometryOutput):
vertexes = []
addVertexes(geometryOutput, vertexes)
return vertexes
|
[
"def",
"getVertexes",
"(",
"geometryOutput",
")",
":",
"vertexes",
"=",
"[",
"]",
"addVertexes",
"(",
"geometryOutput",
",",
"vertexes",
")",
"return",
"vertexes"
] |
get the vertexes .
|
train
| false
|
52,782
|
def get_user_access(uid, channel=14, **kwargs):
with _IpmiCommand(**kwargs) as s:
return s.get_user_access(uid, channel=channel)
|
[
"def",
"get_user_access",
"(",
"uid",
",",
"channel",
"=",
"14",
",",
"**",
"kwargs",
")",
":",
"with",
"_IpmiCommand",
"(",
"**",
"kwargs",
")",
"as",
"s",
":",
"return",
"s",
".",
"get_user_access",
"(",
"uid",
",",
"channel",
"=",
"channel",
")"
] |
get user access .
|
train
| true
|
52,783
|
def _setOS():
if (not conf.os):
return
if (conf.os.lower() not in SUPPORTED_OS):
errMsg = 'you provided an unsupported back-end DBMS operating '
errMsg += 'system. The supported DBMS operating systems for OS '
errMsg += ('and file system access are %s. ' % ', '.join([o.capitalize() for o in SUPPORTED_OS]))
errMsg += 'If you do not know the back-end DBMS underlying OS, '
errMsg += 'do not provide it and sqlmap will fingerprint it for '
errMsg += 'you.'
raise SqlmapUnsupportedDBMSException(errMsg)
debugMsg = 'forcing back-end DBMS operating system to user defined '
debugMsg += ("value '%s'" % conf.os)
logger.debug(debugMsg)
Backend.setOs(conf.os)
|
[
"def",
"_setOS",
"(",
")",
":",
"if",
"(",
"not",
"conf",
".",
"os",
")",
":",
"return",
"if",
"(",
"conf",
".",
"os",
".",
"lower",
"(",
")",
"not",
"in",
"SUPPORTED_OS",
")",
":",
"errMsg",
"=",
"'you provided an unsupported back-end DBMS operating '",
"errMsg",
"+=",
"'system. The supported DBMS operating systems for OS '",
"errMsg",
"+=",
"(",
"'and file system access are %s. '",
"%",
"', '",
".",
"join",
"(",
"[",
"o",
".",
"capitalize",
"(",
")",
"for",
"o",
"in",
"SUPPORTED_OS",
"]",
")",
")",
"errMsg",
"+=",
"'If you do not know the back-end DBMS underlying OS, '",
"errMsg",
"+=",
"'do not provide it and sqlmap will fingerprint it for '",
"errMsg",
"+=",
"'you.'",
"raise",
"SqlmapUnsupportedDBMSException",
"(",
"errMsg",
")",
"debugMsg",
"=",
"'forcing back-end DBMS operating system to user defined '",
"debugMsg",
"+=",
"(",
"\"value '%s'\"",
"%",
"conf",
".",
"os",
")",
"logger",
".",
"debug",
"(",
"debugMsg",
")",
"Backend",
".",
"setOs",
"(",
"conf",
".",
"os",
")"
] |
force the back-end dbms operating system option .
|
train
| false
|
52,785
|
def test_num_peaks():
img_corners = corner_harris(rgb2gray(data.astronaut()))
for i in range(20):
n = np.random.randint(1, 21)
results = peak_local_max(img_corners, min_distance=10, threshold_rel=0, num_peaks=n)
assert (results.shape[0] == n)
|
[
"def",
"test_num_peaks",
"(",
")",
":",
"img_corners",
"=",
"corner_harris",
"(",
"rgb2gray",
"(",
"data",
".",
"astronaut",
"(",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"20",
")",
":",
"n",
"=",
"np",
".",
"random",
".",
"randint",
"(",
"1",
",",
"21",
")",
"results",
"=",
"peak_local_max",
"(",
"img_corners",
",",
"min_distance",
"=",
"10",
",",
"threshold_rel",
"=",
"0",
",",
"num_peaks",
"=",
"n",
")",
"assert",
"(",
"results",
".",
"shape",
"[",
"0",
"]",
"==",
"n",
")"
] |
for a bunch of different values of num_peaks .
|
train
| false
|
52,786
|
def inspect_getmembers(object, predicate=None):
results = []
for key in dir(object):
try:
value = getattr(object, key)
except AttributeError:
continue
if ((not predicate) or predicate(value)):
results.append((key, value))
results.sort()
return results
def _is_show_member(self, name):
if self.show_inherited_members:
return True
if (name not in self._cls.__dict__):
return False
return True
|
[
"def",
"inspect_getmembers",
"(",
"object",
",",
"predicate",
"=",
"None",
")",
":",
"results",
"=",
"[",
"]",
"for",
"key",
"in",
"dir",
"(",
"object",
")",
":",
"try",
":",
"value",
"=",
"getattr",
"(",
"object",
",",
"key",
")",
"except",
"AttributeError",
":",
"continue",
"if",
"(",
"(",
"not",
"predicate",
")",
"or",
"predicate",
"(",
"value",
")",
")",
":",
"results",
".",
"append",
"(",
"(",
"key",
",",
"value",
")",
")",
"results",
".",
"sort",
"(",
")",
"return",
"results",
"def",
"_is_show_member",
"(",
"self",
",",
"name",
")",
":",
"if",
"self",
".",
"show_inherited_members",
":",
"return",
"True",
"if",
"(",
"name",
"not",
"in",
"self",
".",
"_cls",
".",
"__dict__",
")",
":",
"return",
"False",
"return",
"True"
] |
return all members of an object as pairs sorted by name .
|
train
| false
|
52,787
|
def GzipEncode(s):
with closing(StringIO()) as sio:
with gzip.GzipFile(fileobj=sio, mode='wb') as gzfile:
gzfile.write(escape.utf8(s))
return sio.getvalue()
|
[
"def",
"GzipEncode",
"(",
"s",
")",
":",
"with",
"closing",
"(",
"StringIO",
"(",
")",
")",
"as",
"sio",
":",
"with",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"sio",
",",
"mode",
"=",
"'wb'",
")",
"as",
"gzfile",
":",
"gzfile",
".",
"write",
"(",
"escape",
".",
"utf8",
"(",
"s",
")",
")",
"return",
"sio",
".",
"getvalue",
"(",
")"
] |
compresses s with gzip and returns the result .
|
train
| false
|
52,788
|
def runTest(test_name, mode):
global OUTPUT_COUNT
OUTPUT_COUNT = (OUTPUT_COUNT + 1)
if (TIME_LVL == TIME_MIN):
(ec, errors) = runTestFast(test_name)
else:
(ec, errors) = runTestSlow(test_name, mode)
if ec:
if (OUTPUT_LVL == OUTPUT_MIN):
print ('X' + test_name),
else:
print ' DCTB ', test_name, 'FAIL'
if (OUTPUT_LVL == OUTPUT_MAX):
print >>sys.stderr, errors
print
FAILED_LIST.append(((test_name + '; Exit Code=') + str(ec)))
elif (OUTPUT_LVL == OUTPUT_MIN):
print '.',
else:
print ' DCTB ', test_name, 'PASS'
if ((OUTPUT_LVL == OUTPUT_MIN) and ((OUTPUT_COUNT % 25) == 0)):
print
|
[
"def",
"runTest",
"(",
"test_name",
",",
"mode",
")",
":",
"global",
"OUTPUT_COUNT",
"OUTPUT_COUNT",
"=",
"(",
"OUTPUT_COUNT",
"+",
"1",
")",
"if",
"(",
"TIME_LVL",
"==",
"TIME_MIN",
")",
":",
"(",
"ec",
",",
"errors",
")",
"=",
"runTestFast",
"(",
"test_name",
")",
"else",
":",
"(",
"ec",
",",
"errors",
")",
"=",
"runTestSlow",
"(",
"test_name",
",",
"mode",
")",
"if",
"ec",
":",
"if",
"(",
"OUTPUT_LVL",
"==",
"OUTPUT_MIN",
")",
":",
"print",
"(",
"'X'",
"+",
"test_name",
")",
",",
"else",
":",
"print",
"' DCTB '",
",",
"test_name",
",",
"'FAIL'",
"if",
"(",
"OUTPUT_LVL",
"==",
"OUTPUT_MAX",
")",
":",
"print",
">>",
"sys",
".",
"stderr",
",",
"errors",
"print",
"FAILED_LIST",
".",
"append",
"(",
"(",
"(",
"test_name",
"+",
"'; Exit Code='",
")",
"+",
"str",
"(",
"ec",
")",
")",
")",
"elif",
"(",
"OUTPUT_LVL",
"==",
"OUTPUT_MIN",
")",
":",
"print",
"'.'",
",",
"else",
":",
"print",
"' DCTB '",
",",
"test_name",
",",
"'PASS'",
"if",
"(",
"(",
"OUTPUT_LVL",
"==",
"OUTPUT_MIN",
")",
"and",
"(",
"(",
"OUTPUT_COUNT",
"%",
"25",
")",
"==",
"0",
")",
")",
":",
"print"
] |
runs an individual ironpython test .
|
train
| false
|
52,789
|
def _get_cached_file_name(bucket_name, saltenv, path):
file_path = os.path.join(_get_cache_dir(), saltenv, bucket_name, path)
if (not os.path.exists(os.path.dirname(file_path))):
os.makedirs(os.path.dirname(file_path))
return file_path
|
[
"def",
"_get_cached_file_name",
"(",
"bucket_name",
",",
"saltenv",
",",
"path",
")",
":",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"_get_cache_dir",
"(",
")",
",",
"saltenv",
",",
"bucket_name",
",",
"path",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"file_path",
")",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"file_path",
")",
")",
"return",
"file_path"
] |
return the cached file name for a bucket path file .
|
train
| true
|
52,790
|
def _satisfied_by(t, o):
return t.satisfied_by(o)
|
[
"def",
"_satisfied_by",
"(",
"t",
",",
"o",
")",
":",
"return",
"t",
".",
"satisfied_by",
"(",
"o",
")"
] |
pickleable type check function .
|
train
| false
|
52,791
|
def line_aa(r0, c0, r1, c1):
return _line_aa(r0, c0, r1, c1)
|
[
"def",
"line_aa",
"(",
"r0",
",",
"c0",
",",
"r1",
",",
"c1",
")",
":",
"return",
"_line_aa",
"(",
"r0",
",",
"c0",
",",
"r1",
",",
"c1",
")"
] |
generate anti-aliased line pixel coordinates .
|
train
| false
|
52,792
|
@newrelic.agent.function_trace()
def document_last_modified(request, document_slug, document_locale):
adhoc_natural_key = (document_locale, document_slug)
natural_key_hash = Document.natural_key_hash(adhoc_natural_key)
cache_key = (DOCUMENT_LAST_MODIFIED_CACHE_KEY_TMPL % natural_key_hash)
try:
last_mod = memcache.get(cache_key)
if (last_mod is None):
doc = Document.objects.get(locale=document_locale, slug=document_slug)
last_mod = doc.fill_last_modified_cache()
return datetime.fromtimestamp(float(last_mod))
except Document.DoesNotExist:
return None
|
[
"@",
"newrelic",
".",
"agent",
".",
"function_trace",
"(",
")",
"def",
"document_last_modified",
"(",
"request",
",",
"document_slug",
",",
"document_locale",
")",
":",
"adhoc_natural_key",
"=",
"(",
"document_locale",
",",
"document_slug",
")",
"natural_key_hash",
"=",
"Document",
".",
"natural_key_hash",
"(",
"adhoc_natural_key",
")",
"cache_key",
"=",
"(",
"DOCUMENT_LAST_MODIFIED_CACHE_KEY_TMPL",
"%",
"natural_key_hash",
")",
"try",
":",
"last_mod",
"=",
"memcache",
".",
"get",
"(",
"cache_key",
")",
"if",
"(",
"last_mod",
"is",
"None",
")",
":",
"doc",
"=",
"Document",
".",
"objects",
".",
"get",
"(",
"locale",
"=",
"document_locale",
",",
"slug",
"=",
"document_slug",
")",
"last_mod",
"=",
"doc",
".",
"fill_last_modified_cache",
"(",
")",
"return",
"datetime",
".",
"fromtimestamp",
"(",
"float",
"(",
"last_mod",
")",
")",
"except",
"Document",
".",
"DoesNotExist",
":",
"return",
"None"
] |
utility function to derive the last modified timestamp of a document .
|
train
| false
|
52,793
|
def complete_cd(prefix, line, start, end, ctx):
if ((start != 0) and (line.split(' ')[0] == 'cd')):
return complete_dir(prefix, line, start, end, ctx, True)
return set()
|
[
"def",
"complete_cd",
"(",
"prefix",
",",
"line",
",",
"start",
",",
"end",
",",
"ctx",
")",
":",
"if",
"(",
"(",
"start",
"!=",
"0",
")",
"and",
"(",
"line",
".",
"split",
"(",
"' '",
")",
"[",
"0",
"]",
"==",
"'cd'",
")",
")",
":",
"return",
"complete_dir",
"(",
"prefix",
",",
"line",
",",
"start",
",",
"end",
",",
"ctx",
",",
"True",
")",
"return",
"set",
"(",
")"
] |
completion for "cd" .
|
train
| false
|
52,794
|
def decoder(s, errors=None):
r = []
decode = []
for c in s:
if ((c == '&') and (not decode)):
decode.append('&')
elif ((c == '-') and decode):
if (len(decode) == 1):
r.append('&')
else:
r.append(modified_unbase64(''.join(decode[1:])))
decode = []
elif decode:
decode.append(c)
else:
r.append(c)
if decode:
r.append(modified_unbase64(''.join(decode[1:])))
return (''.join(r), len(s))
|
[
"def",
"decoder",
"(",
"s",
",",
"errors",
"=",
"None",
")",
":",
"r",
"=",
"[",
"]",
"decode",
"=",
"[",
"]",
"for",
"c",
"in",
"s",
":",
"if",
"(",
"(",
"c",
"==",
"'&'",
")",
"and",
"(",
"not",
"decode",
")",
")",
":",
"decode",
".",
"append",
"(",
"'&'",
")",
"elif",
"(",
"(",
"c",
"==",
"'-'",
")",
"and",
"decode",
")",
":",
"if",
"(",
"len",
"(",
"decode",
")",
"==",
"1",
")",
":",
"r",
".",
"append",
"(",
"'&'",
")",
"else",
":",
"r",
".",
"append",
"(",
"modified_unbase64",
"(",
"''",
".",
"join",
"(",
"decode",
"[",
"1",
":",
"]",
")",
")",
")",
"decode",
"=",
"[",
"]",
"elif",
"decode",
":",
"decode",
".",
"append",
"(",
"c",
")",
"else",
":",
"r",
".",
"append",
"(",
"c",
")",
"if",
"decode",
":",
"r",
".",
"append",
"(",
"modified_unbase64",
"(",
"''",
".",
"join",
"(",
"decode",
"[",
"1",
":",
"]",
")",
")",
")",
"return",
"(",
"''",
".",
"join",
"(",
"r",
")",
",",
"len",
"(",
"s",
")",
")"
] |
decode the given c{str} using the imap4 specific variation of utf-7 .
|
train
| false
|
52,795
|
def load_string(source, target):
target.update(flatten(yaml.load(source, Loader=yaml.Loader)))
|
[
"def",
"load_string",
"(",
"source",
",",
"target",
")",
":",
"target",
".",
"update",
"(",
"flatten",
"(",
"yaml",
".",
"load",
"(",
"source",
",",
"Loader",
"=",
"yaml",
".",
"Loader",
")",
")",
")"
] |
load dictionary implied by yaml text into target dictionary .
|
train
| false
|
52,796
|
def _run_svn(cmd, cwd, user, username, password, opts, **kwargs):
cmd = ['svn', '--non-interactive', cmd]
options = list(opts)
if username:
options.extend(['--username', username])
if password:
options.extend(['--password', password])
cmd.extend(options)
result = __salt__['cmd.run_all'](cmd, python_shell=False, cwd=cwd, runas=user, **kwargs)
retcode = result['retcode']
if (retcode == 0):
return result['stdout']
raise exceptions.CommandExecutionError(((result['stderr'] + '\n\n') + ' '.join(cmd)))
|
[
"def",
"_run_svn",
"(",
"cmd",
",",
"cwd",
",",
"user",
",",
"username",
",",
"password",
",",
"opts",
",",
"**",
"kwargs",
")",
":",
"cmd",
"=",
"[",
"'svn'",
",",
"'--non-interactive'",
",",
"cmd",
"]",
"options",
"=",
"list",
"(",
"opts",
")",
"if",
"username",
":",
"options",
".",
"extend",
"(",
"[",
"'--username'",
",",
"username",
"]",
")",
"if",
"password",
":",
"options",
".",
"extend",
"(",
"[",
"'--password'",
",",
"password",
"]",
")",
"cmd",
".",
"extend",
"(",
"options",
")",
"result",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
",",
"cwd",
"=",
"cwd",
",",
"runas",
"=",
"user",
",",
"**",
"kwargs",
")",
"retcode",
"=",
"result",
"[",
"'retcode'",
"]",
"if",
"(",
"retcode",
"==",
"0",
")",
":",
"return",
"result",
"[",
"'stdout'",
"]",
"raise",
"exceptions",
".",
"CommandExecutionError",
"(",
"(",
"(",
"result",
"[",
"'stderr'",
"]",
"+",
"'\\n\\n'",
")",
"+",
"' '",
".",
"join",
"(",
"cmd",
")",
")",
")"
] |
execute svn return the output of the command cmd the command to run .
|
train
| true
|
52,798
|
def make_safe_digest(string, hash_func=hashlib.sha1):
return force_text(hash_func(string.encode('utf-8')).hexdigest())
|
[
"def",
"make_safe_digest",
"(",
"string",
",",
"hash_func",
"=",
"hashlib",
".",
"sha1",
")",
":",
"return",
"force_text",
"(",
"hash_func",
"(",
"string",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
")"
] |
return a hex digest of string .
|
train
| false
|
52,799
|
def ipyfunc2():
return 'pyfunc2'
|
[
"def",
"ipyfunc2",
"(",
")",
":",
"return",
"'pyfunc2'"
] |
some pure python tests .
|
train
| false
|
52,800
|
def array_str(arr, max_line_width=None, precision=None, suppress_small=None):
return numpy.array_str(cupy.asnumpy(arr), max_line_width, precision, suppress_small)
|
[
"def",
"array_str",
"(",
"arr",
",",
"max_line_width",
"=",
"None",
",",
"precision",
"=",
"None",
",",
"suppress_small",
"=",
"None",
")",
":",
"return",
"numpy",
".",
"array_str",
"(",
"cupy",
".",
"asnumpy",
"(",
"arr",
")",
",",
"max_line_width",
",",
"precision",
",",
"suppress_small",
")"
] |
returns the string representation of the content of an array .
|
train
| false
|
52,802
|
def hash_all(strs, digest=None):
digest = (digest or hashlib.sha1())
for s in strs:
digest.update(s)
return digest.hexdigest()
|
[
"def",
"hash_all",
"(",
"strs",
",",
"digest",
"=",
"None",
")",
":",
"digest",
"=",
"(",
"digest",
"or",
"hashlib",
".",
"sha1",
"(",
")",
")",
"for",
"s",
"in",
"strs",
":",
"digest",
".",
"update",
"(",
"s",
")",
"return",
"digest",
".",
"hexdigest",
"(",
")"
] |
returns a hash of the concatenation of all the strings in strs .
|
train
| false
|
52,803
|
def find_failing_tests(result_images, source):
entries = []
for (root, dirs, files) in os.walk(result_images):
for fname in files:
(basename, ext) = os.path.splitext(fname)
if basename.endswith(u'-failed-diff'):
path = os.path.join(root, fname)
entry = Entry(path, result_images, source)
entries.append(entry)
entries.sort(key=(lambda x: x.name))
return entries
|
[
"def",
"find_failing_tests",
"(",
"result_images",
",",
"source",
")",
":",
"entries",
"=",
"[",
"]",
"for",
"(",
"root",
",",
"dirs",
",",
"files",
")",
"in",
"os",
".",
"walk",
"(",
"result_images",
")",
":",
"for",
"fname",
"in",
"files",
":",
"(",
"basename",
",",
"ext",
")",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fname",
")",
"if",
"basename",
".",
"endswith",
"(",
"u'-failed-diff'",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"fname",
")",
"entry",
"=",
"Entry",
"(",
"path",
",",
"result_images",
",",
"source",
")",
"entries",
".",
"append",
"(",
"entry",
")",
"entries",
".",
"sort",
"(",
"key",
"=",
"(",
"lambda",
"x",
":",
"x",
".",
"name",
")",
")",
"return",
"entries"
] |
find all of the failing tests by looking for files with -failed-diff at the end of the basename .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.