id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
49,925
|
@public
def gff(f, *gens, **args):
raise NotImplementedError('symbolic falling factorial')
|
[
"@",
"public",
"def",
"gff",
"(",
"f",
",",
"*",
"gens",
",",
"**",
"args",
")",
":",
"raise",
"NotImplementedError",
"(",
"'symbolic falling factorial'",
")"
] |
compute greatest factorial factorization of f .
|
train
| false
|
49,926
|
@decorator
def interruptable(func, *args, **opts):
while True:
try:
result = func(*args, **opts)
except IOError as e:
if (e.errno == errno.EINTR):
continue
raise e
except OSError as e:
if (e.errno in (errno.EINTR, errno.EINVAL)):
continue
raise e
else:
break
return result
|
[
"@",
"decorator",
"def",
"interruptable",
"(",
"func",
",",
"*",
"args",
",",
"**",
"opts",
")",
":",
"while",
"True",
":",
"try",
":",
"result",
"=",
"func",
"(",
"*",
"args",
",",
"**",
"opts",
")",
"except",
"IOError",
"as",
"e",
":",
"if",
"(",
"e",
".",
"errno",
"==",
"errno",
".",
"EINTR",
")",
":",
"continue",
"raise",
"e",
"except",
"OSError",
"as",
"e",
":",
"if",
"(",
"e",
".",
"errno",
"in",
"(",
"errno",
".",
"EINTR",
",",
"errno",
".",
"EINVAL",
")",
")",
":",
"continue",
"raise",
"e",
"else",
":",
"break",
"return",
"result"
] |
handle interruptable system calls osx and others are known to interrupt system calls URL URL#worse_is_better the @interruptable decorator handles this situation .
|
train
| false
|
49,928
|
def suspend_to_background_supported():
return hasattr(signal, u'SIGTSTP')
|
[
"def",
"suspend_to_background_supported",
"(",
")",
":",
"return",
"hasattr",
"(",
"signal",
",",
"u'SIGTSTP'",
")"
] |
returns true when the python implementation supports suspend-to-background .
|
train
| false
|
49,929
|
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
first_weekday = datetime_date(year, 1, 1).weekday()
if (not week_starts_Mon):
first_weekday = ((first_weekday + 1) % 7)
day_of_week = ((day_of_week + 1) % 7)
week_0_length = ((7 - first_weekday) % 7)
if (week_of_year == 0):
return ((1 + day_of_week) - first_weekday)
else:
days_to_week = (week_0_length + (7 * (week_of_year - 1)))
return ((1 + days_to_week) + day_of_week)
|
[
"def",
"_calc_julian_from_U_or_W",
"(",
"year",
",",
"week_of_year",
",",
"day_of_week",
",",
"week_starts_Mon",
")",
":",
"first_weekday",
"=",
"datetime_date",
"(",
"year",
",",
"1",
",",
"1",
")",
".",
"weekday",
"(",
")",
"if",
"(",
"not",
"week_starts_Mon",
")",
":",
"first_weekday",
"=",
"(",
"(",
"first_weekday",
"+",
"1",
")",
"%",
"7",
")",
"day_of_week",
"=",
"(",
"(",
"day_of_week",
"+",
"1",
")",
"%",
"7",
")",
"week_0_length",
"=",
"(",
"(",
"7",
"-",
"first_weekday",
")",
"%",
"7",
")",
"if",
"(",
"week_of_year",
"==",
"0",
")",
":",
"return",
"(",
"(",
"1",
"+",
"day_of_week",
")",
"-",
"first_weekday",
")",
"else",
":",
"days_to_week",
"=",
"(",
"week_0_length",
"+",
"(",
"7",
"*",
"(",
"week_of_year",
"-",
"1",
")",
")",
")",
"return",
"(",
"(",
"1",
"+",
"days_to_week",
")",
"+",
"day_of_week",
")"
] |
calculate the julian day based on the year .
|
train
| false
|
49,931
|
def pie_plot():
sizes = [15, 30, 45, 10]
explode = [0, 0.05, 0, 0]
labels = ['Frogs', 'Hogs', 'Dogs', 'Logs']
colors = ['yellowgreen', 'gold', 'lightskyblue', 'lightcoral']
plt.pie(sizes, explode=explode, labels=labels, colors=colors, autopct='%1.1f%%', shadow=True, startangle=90)
plt.axis('equal')
plt.show()
return
|
[
"def",
"pie_plot",
"(",
")",
":",
"sizes",
"=",
"[",
"15",
",",
"30",
",",
"45",
",",
"10",
"]",
"explode",
"=",
"[",
"0",
",",
"0.05",
",",
"0",
",",
"0",
"]",
"labels",
"=",
"[",
"'Frogs'",
",",
"'Hogs'",
",",
"'Dogs'",
",",
"'Logs'",
"]",
"colors",
"=",
"[",
"'yellowgreen'",
",",
"'gold'",
",",
"'lightskyblue'",
",",
"'lightcoral'",
"]",
"plt",
".",
"pie",
"(",
"sizes",
",",
"explode",
"=",
"explode",
",",
"labels",
"=",
"labels",
",",
"colors",
"=",
"colors",
",",
"autopct",
"=",
"'%1.1f%%'",
",",
"shadow",
"=",
"True",
",",
"startangle",
"=",
"90",
")",
"plt",
".",
"axis",
"(",
"'equal'",
")",
"plt",
".",
"show",
"(",
")",
"return"
] |
pie plot .
|
train
| false
|
49,932
|
def generate_process_statistics(collectl_playback_cli, pid, statistics=DEFAULT_STATISTICS):
with tempfile.NamedTemporaryFile() as tmp_tsv:
collectl_playback_cli.run(stdout=tmp_tsv)
with open(tmp_tsv.name, 'r') as tsv_file:
return _read_process_statistics(tsv_file, pid, statistics)
|
[
"def",
"generate_process_statistics",
"(",
"collectl_playback_cli",
",",
"pid",
",",
"statistics",
"=",
"DEFAULT_STATISTICS",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
")",
"as",
"tmp_tsv",
":",
"collectl_playback_cli",
".",
"run",
"(",
"stdout",
"=",
"tmp_tsv",
")",
"with",
"open",
"(",
"tmp_tsv",
".",
"name",
",",
"'r'",
")",
"as",
"tsv_file",
":",
"return",
"_read_process_statistics",
"(",
"tsv_file",
",",
"pid",
",",
"statistics",
")"
] |
playback collectl file and generate summary statistics .
|
train
| false
|
49,934
|
@with_open_mode('wb+')
@with_sizes('medium')
def read_modify_bytewise(f, source):
f.seek(0)
for i in xrange(0, len(source), 2):
f.read(1)
f.write(source[(i + 1):(i + 2)])
|
[
"@",
"with_open_mode",
"(",
"'wb+'",
")",
"@",
"with_sizes",
"(",
"'medium'",
")",
"def",
"read_modify_bytewise",
"(",
"f",
",",
"source",
")",
":",
"f",
".",
"seek",
"(",
"0",
")",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"len",
"(",
"source",
")",
",",
"2",
")",
":",
"f",
".",
"read",
"(",
"1",
")",
"f",
".",
"write",
"(",
"source",
"[",
"(",
"i",
"+",
"1",
")",
":",
"(",
"i",
"+",
"2",
")",
"]",
")"
] |
alternate read & write one unit .
|
train
| false
|
49,935
|
def _get_date(data, position, dummy0, opts, dummy1):
end = (position + 8)
millis = _UNPACK_LONG(data[position:end])[0]
return (_millis_to_datetime(millis, opts), end)
|
[
"def",
"_get_date",
"(",
"data",
",",
"position",
",",
"dummy0",
",",
"opts",
",",
"dummy1",
")",
":",
"end",
"=",
"(",
"position",
"+",
"8",
")",
"millis",
"=",
"_UNPACK_LONG",
"(",
"data",
"[",
"position",
":",
"end",
"]",
")",
"[",
"0",
"]",
"return",
"(",
"_millis_to_datetime",
"(",
"millis",
",",
"opts",
")",
",",
"end",
")"
] |
decode a bson datetime to python datetime .
|
train
| true
|
49,936
|
def _CreateMSVSUserFile(proj_path, version, spec):
(domain, username) = _GetDomainAndUserName()
vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
user_file = MSVSUserFile.Writer(vcuser_filename, version, spec['target_name'])
return user_file
|
[
"def",
"_CreateMSVSUserFile",
"(",
"proj_path",
",",
"version",
",",
"spec",
")",
":",
"(",
"domain",
",",
"username",
")",
"=",
"_GetDomainAndUserName",
"(",
")",
"vcuser_filename",
"=",
"'.'",
".",
"join",
"(",
"[",
"proj_path",
",",
"domain",
",",
"username",
",",
"'user'",
"]",
")",
"user_file",
"=",
"MSVSUserFile",
".",
"Writer",
"(",
"vcuser_filename",
",",
"version",
",",
"spec",
"[",
"'target_name'",
"]",
")",
"return",
"user_file"
] |
generates a .
|
train
| false
|
49,938
|
def walkfiles(folder, suffix=''):
return (os.path.join(basename, filename) for (basename, dirnames, filenames) in os.walk(folder) for filename in filenames if filename.endswith(suffix))
|
[
"def",
"walkfiles",
"(",
"folder",
",",
"suffix",
"=",
"''",
")",
":",
"return",
"(",
"os",
".",
"path",
".",
"join",
"(",
"basename",
",",
"filename",
")",
"for",
"(",
"basename",
",",
"dirnames",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"folder",
")",
"for",
"filename",
"in",
"filenames",
"if",
"filename",
".",
"endswith",
"(",
"suffix",
")",
")"
] |
iterator over files in folder .
|
train
| false
|
49,939
|
def readAllClips():
global settings
k = Kippt(settings['username'], password=utils.getKeyChainPassword())
clips = k.clips().all()
count = clips['meta']['total_count']
clips = k.clips().all(limit=count)
return clips['objects']
|
[
"def",
"readAllClips",
"(",
")",
":",
"global",
"settings",
"k",
"=",
"Kippt",
"(",
"settings",
"[",
"'username'",
"]",
",",
"password",
"=",
"utils",
".",
"getKeyChainPassword",
"(",
")",
")",
"clips",
"=",
"k",
".",
"clips",
"(",
")",
".",
"all",
"(",
")",
"count",
"=",
"clips",
"[",
"'meta'",
"]",
"[",
"'total_count'",
"]",
"clips",
"=",
"k",
".",
"clips",
"(",
")",
".",
"all",
"(",
"limit",
"=",
"count",
")",
"return",
"clips",
"[",
"'objects'",
"]"
] |
reads all clips from kippt .
|
train
| false
|
49,940
|
def test_contains_inf():
arr = np.random.random(100)
assert (not contains_inf(arr))
arr[0] = np.nan
assert (not contains_inf(arr))
arr[1] = np.inf
assert contains_inf(arr)
arr[1] = (- np.inf)
assert contains_inf(arr)
|
[
"def",
"test_contains_inf",
"(",
")",
":",
"arr",
"=",
"np",
".",
"random",
".",
"random",
"(",
"100",
")",
"assert",
"(",
"not",
"contains_inf",
"(",
"arr",
")",
")",
"arr",
"[",
"0",
"]",
"=",
"np",
".",
"nan",
"assert",
"(",
"not",
"contains_inf",
"(",
"arr",
")",
")",
"arr",
"[",
"1",
"]",
"=",
"np",
".",
"inf",
"assert",
"contains_inf",
"(",
"arr",
")",
"arr",
"[",
"1",
"]",
"=",
"(",
"-",
"np",
".",
"inf",
")",
"assert",
"contains_inf",
"(",
"arr",
")"
] |
tests that pylearn2 .
|
train
| false
|
49,941
|
def state_args(id_, state, high):
args = set()
if (id_ not in high):
return args
if (state not in high[id_]):
return args
for item in high[id_][state]:
if (not isinstance(item, dict)):
continue
if (len(item) != 1):
continue
args.add(next(iter(item)))
return args
|
[
"def",
"state_args",
"(",
"id_",
",",
"state",
",",
"high",
")",
":",
"args",
"=",
"set",
"(",
")",
"if",
"(",
"id_",
"not",
"in",
"high",
")",
":",
"return",
"args",
"if",
"(",
"state",
"not",
"in",
"high",
"[",
"id_",
"]",
")",
":",
"return",
"args",
"for",
"item",
"in",
"high",
"[",
"id_",
"]",
"[",
"state",
"]",
":",
"if",
"(",
"not",
"isinstance",
"(",
"item",
",",
"dict",
")",
")",
":",
"continue",
"if",
"(",
"len",
"(",
"item",
")",
"!=",
"1",
")",
":",
"continue",
"args",
".",
"add",
"(",
"next",
"(",
"iter",
"(",
"item",
")",
")",
")",
"return",
"args"
] |
return a set of the arguments passed to the named state .
|
train
| true
|
49,943
|
def _set_frame(frag):
frag.hit_frame = (((frag.hit_start % 3) + 1) * frag.hit_strand)
frag.query_frame = (((frag.query_start % 3) + 1) * frag.query_strand)
|
[
"def",
"_set_frame",
"(",
"frag",
")",
":",
"frag",
".",
"hit_frame",
"=",
"(",
"(",
"(",
"frag",
".",
"hit_start",
"%",
"3",
")",
"+",
"1",
")",
"*",
"frag",
".",
"hit_strand",
")",
"frag",
".",
"query_frame",
"=",
"(",
"(",
"(",
"frag",
".",
"query_start",
"%",
"3",
")",
"+",
"1",
")",
"*",
"frag",
".",
"query_strand",
")"
] |
sets the hspfragment frames .
|
train
| false
|
49,944
|
def getBatteryStats():
try:
from plyer import battery
return battery.status
except Exception as e:
return None
|
[
"def",
"getBatteryStats",
"(",
")",
":",
"try",
":",
"from",
"plyer",
"import",
"battery",
"return",
"battery",
".",
"status",
"except",
"Exception",
"as",
"e",
":",
"return",
"None"
] |
returns none if an error returns {percentage: 99 .
|
train
| false
|
49,945
|
def authorized_http(credentials):
if (HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials)):
return google_auth_httplib2.AuthorizedHttp(credentials, http=build_http())
else:
return credentials.authorize(build_http())
|
[
"def",
"authorized_http",
"(",
"credentials",
")",
":",
"if",
"(",
"HAS_GOOGLE_AUTH",
"and",
"isinstance",
"(",
"credentials",
",",
"google",
".",
"auth",
".",
"credentials",
".",
"Credentials",
")",
")",
":",
"return",
"google_auth_httplib2",
".",
"AuthorizedHttp",
"(",
"credentials",
",",
"http",
"=",
"build_http",
"(",
")",
")",
"else",
":",
"return",
"credentials",
".",
"authorize",
"(",
"build_http",
"(",
")",
")"
] |
returns an http client that is authorized with the given credentials .
|
train
| false
|
49,946
|
def test_through_scale_change():
t0 = Time(1.0, format='cxcsec')
t1 = Time(123456789000.0, format='cxcsec')
dt_tt = (t1 - t0)
dt_tai = (t1.tai - t0.tai)
assert allclose_jd(dt_tt.jd1, dt_tai.jd1)
assert allclose_jd2(dt_tt.jd2, dt_tai.jd2)
|
[
"def",
"test_through_scale_change",
"(",
")",
":",
"t0",
"=",
"Time",
"(",
"1.0",
",",
"format",
"=",
"'cxcsec'",
")",
"t1",
"=",
"Time",
"(",
"123456789000.0",
",",
"format",
"=",
"'cxcsec'",
")",
"dt_tt",
"=",
"(",
"t1",
"-",
"t0",
")",
"dt_tai",
"=",
"(",
"t1",
".",
"tai",
"-",
"t0",
".",
"tai",
")",
"assert",
"allclose_jd",
"(",
"dt_tt",
".",
"jd1",
",",
"dt_tai",
".",
"jd1",
")",
"assert",
"allclose_jd2",
"(",
"dt_tt",
".",
"jd2",
",",
"dt_tai",
".",
"jd2",
")"
] |
check that precision holds through scale change .
|
train
| false
|
49,947
|
def load_ssl_context(cert_file, pkey_file=None, protocol=None):
if (protocol is None):
protocol = ssl.PROTOCOL_SSLv23
ctx = _SSLContext(protocol)
ctx.load_cert_chain(cert_file, pkey_file)
return ctx
|
[
"def",
"load_ssl_context",
"(",
"cert_file",
",",
"pkey_file",
"=",
"None",
",",
"protocol",
"=",
"None",
")",
":",
"if",
"(",
"protocol",
"is",
"None",
")",
":",
"protocol",
"=",
"ssl",
".",
"PROTOCOL_SSLv23",
"ctx",
"=",
"_SSLContext",
"(",
"protocol",
")",
"ctx",
".",
"load_cert_chain",
"(",
"cert_file",
",",
"pkey_file",
")",
"return",
"ctx"
] |
loads ssl context from cert/private key files and optional protocol .
|
train
| true
|
49,949
|
def render_snippet(template_name, **kw):
cache_force = kw.pop('cache_force', None)
output = render(template_name, extra_vars=kw, cache_force=cache_force, renderer='snippet')
if config.get('debug'):
output = ('\n<!-- Snippet %s start -->\n%s\n<!-- Snippet %s end -->\n' % (template_name, output, template_name))
return literal(output)
|
[
"def",
"render_snippet",
"(",
"template_name",
",",
"**",
"kw",
")",
":",
"cache_force",
"=",
"kw",
".",
"pop",
"(",
"'cache_force'",
",",
"None",
")",
"output",
"=",
"render",
"(",
"template_name",
",",
"extra_vars",
"=",
"kw",
",",
"cache_force",
"=",
"cache_force",
",",
"renderer",
"=",
"'snippet'",
")",
"if",
"config",
".",
"get",
"(",
"'debug'",
")",
":",
"output",
"=",
"(",
"'\\n<!-- Snippet %s start -->\\n%s\\n<!-- Snippet %s end -->\\n'",
"%",
"(",
"template_name",
",",
"output",
",",
"template_name",
")",
")",
"return",
"literal",
"(",
"output",
")"
] |
helper function for rendering snippets .
|
train
| false
|
49,950
|
def seq_to_list(seq):
if (not isinstance(seq, (list, tuple))):
return [seq]
return list(seq)
|
[
"def",
"seq_to_list",
"(",
"seq",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"seq",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
":",
"return",
"[",
"seq",
"]",
"return",
"list",
"(",
"seq",
")"
] |
convert non-sequence to 1 element sequence .
|
train
| false
|
49,951
|
def get_masquerade(zone=None, permanent=True):
zone_info = list_all(zone, permanent)
if ('no' in [zone_info[i]['masquerade'][0] for i in zone_info.keys()]):
return False
return True
|
[
"def",
"get_masquerade",
"(",
"zone",
"=",
"None",
",",
"permanent",
"=",
"True",
")",
":",
"zone_info",
"=",
"list_all",
"(",
"zone",
",",
"permanent",
")",
"if",
"(",
"'no'",
"in",
"[",
"zone_info",
"[",
"i",
"]",
"[",
"'masquerade'",
"]",
"[",
"0",
"]",
"for",
"i",
"in",
"zone_info",
".",
"keys",
"(",
")",
"]",
")",
":",
"return",
"False",
"return",
"True"
] |
show if masquerading is enabled on a zone .
|
train
| true
|
49,952
|
def _map_redirects(mapper):
def forward_qs(environ, result):
qs_dict = parse_qs(environ['QUERY_STRING'])
for qs in qs_dict:
result[qs] = qs_dict[qs]
return True
mapper.redirect('/repository/status_for_installed_repository', '/api/repositories/updates/', _redirect_code='301 Moved Permanently', conditions=dict(function=forward_qs))
return mapper
|
[
"def",
"_map_redirects",
"(",
"mapper",
")",
":",
"def",
"forward_qs",
"(",
"environ",
",",
"result",
")",
":",
"qs_dict",
"=",
"parse_qs",
"(",
"environ",
"[",
"'QUERY_STRING'",
"]",
")",
"for",
"qs",
"in",
"qs_dict",
":",
"result",
"[",
"qs",
"]",
"=",
"qs_dict",
"[",
"qs",
"]",
"return",
"True",
"mapper",
".",
"redirect",
"(",
"'/repository/status_for_installed_repository'",
",",
"'/api/repositories/updates/'",
",",
"_redirect_code",
"=",
"'301 Moved Permanently'",
",",
"conditions",
"=",
"dict",
"(",
"function",
"=",
"forward_qs",
")",
")",
"return",
"mapper"
] |
add redirect to the routes mapper and forward the received query string .
|
train
| false
|
49,953
|
def req_item_packs():
req_item_id = None
args = request.args
if ((len(args) == 1) and args[0].isdigit()):
req_item_id = args[0]
else:
for v in request.vars:
if (('.' in v) and (v.split('.', 1)[1] == 'req_item_id')):
req_item_id = request.vars[v]
break
table = s3db.supply_item_pack
ritable = s3db.req_req_item
query = ((ritable.id == req_item_id) & (ritable.item_id == table.item_id))
response.headers['Content-Type'] = 'application/json'
return db(query).select(table.id, table.name, table.quantity).json()
|
[
"def",
"req_item_packs",
"(",
")",
":",
"req_item_id",
"=",
"None",
"args",
"=",
"request",
".",
"args",
"if",
"(",
"(",
"len",
"(",
"args",
")",
"==",
"1",
")",
"and",
"args",
"[",
"0",
"]",
".",
"isdigit",
"(",
")",
")",
":",
"req_item_id",
"=",
"args",
"[",
"0",
"]",
"else",
":",
"for",
"v",
"in",
"request",
".",
"vars",
":",
"if",
"(",
"(",
"'.'",
"in",
"v",
")",
"and",
"(",
"v",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"[",
"1",
"]",
"==",
"'req_item_id'",
")",
")",
":",
"req_item_id",
"=",
"request",
".",
"vars",
"[",
"v",
"]",
"break",
"table",
"=",
"s3db",
".",
"supply_item_pack",
"ritable",
"=",
"s3db",
".",
"req_req_item",
"query",
"=",
"(",
"(",
"ritable",
".",
"id",
"==",
"req_item_id",
")",
"&",
"(",
"ritable",
".",
"item_id",
"==",
"table",
".",
"item_id",
")",
")",
"response",
".",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"'application/json'",
"return",
"db",
"(",
"query",
")",
".",
"select",
"(",
"table",
".",
"id",
",",
"table",
".",
"name",
",",
"table",
".",
"quantity",
")",
".",
"json",
"(",
")"
] |
called by s3optionsfilter to provide the pack options for an item access via the .
|
train
| false
|
49,954
|
def track_from_md5(md5, timeout=DEFAULT_ASYNC_TIMEOUT):
param_dict = dict(md5=md5)
return _profile(param_dict, timeout)
|
[
"def",
"track_from_md5",
"(",
"md5",
",",
"timeout",
"=",
"DEFAULT_ASYNC_TIMEOUT",
")",
":",
"param_dict",
"=",
"dict",
"(",
"md5",
"=",
"md5",
")",
"return",
"_profile",
"(",
"param_dict",
",",
"timeout",
")"
] |
create a track object from an md5 hash .
|
train
| true
|
49,955
|
def set_configurator(previously, now):
if (not now):
return previously
if previously:
if (previously != now):
msg = 'Too many flags setting configurators/installers/authenticators {0} -> {1}'
raise errors.PluginSelectionError(msg.format(repr(previously), repr(now)))
return now
|
[
"def",
"set_configurator",
"(",
"previously",
",",
"now",
")",
":",
"if",
"(",
"not",
"now",
")",
":",
"return",
"previously",
"if",
"previously",
":",
"if",
"(",
"previously",
"!=",
"now",
")",
":",
"msg",
"=",
"'Too many flags setting configurators/installers/authenticators {0} -> {1}'",
"raise",
"errors",
".",
"PluginSelectionError",
"(",
"msg",
".",
"format",
"(",
"repr",
"(",
"previously",
")",
",",
"repr",
"(",
"now",
")",
")",
")",
"return",
"now"
] |
setting configurators multiple ways is okay .
|
train
| false
|
49,956
|
def check_tree(tree):
assert (tree.tag == 'codeintel'), 'can only check starting from <codeintel> element'
assert (tree.get('version') == CIX_VERSION), ('can only check CIX v%s trees' % CIX_VERSION)
file = tree[0]
if (not file.get('lang')):
(yield ('error', "no 'lang' attr on <file> element"))
if file.get('language'):
(yield ('warning', "'language' attr on <file> element is obsolete,use 'lang'"))
for blob in file:
if (blob.get('ilk') != 'blob'):
(yield ('error', ('element under <file> is not ilk=blob: %r' % blob)))
if (not blob.get('lang')):
(yield ('error', ("no 'lang' attr on <blob> element: %r" % blob)))
for class_elem in blob.getiterator('scope'):
if (class_elem.get('ilk') != 'class'):
continue
classrefs = class_elem.get('classrefs')
if (not classrefs):
continue
if (',' in classrefs):
(yield ('warning', ("multiple class references in 'classrefs' attr on class scopes must be space-separated: %r may be using comma-separation: %r" % (class_elem, classrefs))))
|
[
"def",
"check_tree",
"(",
"tree",
")",
":",
"assert",
"(",
"tree",
".",
"tag",
"==",
"'codeintel'",
")",
",",
"'can only check starting from <codeintel> element'",
"assert",
"(",
"tree",
".",
"get",
"(",
"'version'",
")",
"==",
"CIX_VERSION",
")",
",",
"(",
"'can only check CIX v%s trees'",
"%",
"CIX_VERSION",
")",
"file",
"=",
"tree",
"[",
"0",
"]",
"if",
"(",
"not",
"file",
".",
"get",
"(",
"'lang'",
")",
")",
":",
"(",
"yield",
"(",
"'error'",
",",
"\"no 'lang' attr on <file> element\"",
")",
")",
"if",
"file",
".",
"get",
"(",
"'language'",
")",
":",
"(",
"yield",
"(",
"'warning'",
",",
"\"'language' attr on <file> element is obsolete,use 'lang'\"",
")",
")",
"for",
"blob",
"in",
"file",
":",
"if",
"(",
"blob",
".",
"get",
"(",
"'ilk'",
")",
"!=",
"'blob'",
")",
":",
"(",
"yield",
"(",
"'error'",
",",
"(",
"'element under <file> is not ilk=blob: %r'",
"%",
"blob",
")",
")",
")",
"if",
"(",
"not",
"blob",
".",
"get",
"(",
"'lang'",
")",
")",
":",
"(",
"yield",
"(",
"'error'",
",",
"(",
"\"no 'lang' attr on <blob> element: %r\"",
"%",
"blob",
")",
")",
")",
"for",
"class_elem",
"in",
"blob",
".",
"getiterator",
"(",
"'scope'",
")",
":",
"if",
"(",
"class_elem",
".",
"get",
"(",
"'ilk'",
")",
"!=",
"'class'",
")",
":",
"continue",
"classrefs",
"=",
"class_elem",
".",
"get",
"(",
"'classrefs'",
")",
"if",
"(",
"not",
"classrefs",
")",
":",
"continue",
"if",
"(",
"','",
"in",
"classrefs",
")",
":",
"(",
"yield",
"(",
"'warning'",
",",
"(",
"\"multiple class references in 'classrefs' attr on class scopes must be space-separated: %r may be using comma-separation: %r\"",
"%",
"(",
"class_elem",
",",
"classrefs",
")",
")",
")",
")"
] |
generate warnings/errors for common mistakes in cix trees .
|
train
| false
|
49,958
|
def compareLayerSequence(first, second):
if (first.layerZoneIndex < second.layerZoneIndex):
return (-1)
if (first.layerZoneIndex > second.layerZoneIndex):
return 1
if (first.sequenceIndex < second.sequenceIndex):
return (-1)
return int((first.sequenceIndex > second.sequenceIndex))
|
[
"def",
"compareLayerSequence",
"(",
"first",
",",
"second",
")",
":",
"if",
"(",
"first",
".",
"layerZoneIndex",
"<",
"second",
".",
"layerZoneIndex",
")",
":",
"return",
"(",
"-",
"1",
")",
"if",
"(",
"first",
".",
"layerZoneIndex",
">",
"second",
".",
"layerZoneIndex",
")",
":",
"return",
"1",
"if",
"(",
"first",
".",
"sequenceIndex",
"<",
"second",
".",
"sequenceIndex",
")",
":",
"return",
"(",
"-",
"1",
")",
"return",
"int",
"(",
"(",
"first",
".",
"sequenceIndex",
">",
"second",
".",
"sequenceIndex",
")",
")"
] |
get comparison in order to sort skein panes in ascending order of layer zone index then sequence index .
|
train
| false
|
49,959
|
def replace_stdout():
stdout = sys.stdout
sys.stdout = open(stdout.fileno(), 'w', encoding=stdout.encoding, errors='backslashreplace', closefd=False, newline='\n')
def restore_stdout():
sys.stdout.close()
sys.stdout = stdout
atexit.register(restore_stdout)
|
[
"def",
"replace_stdout",
"(",
")",
":",
"stdout",
"=",
"sys",
".",
"stdout",
"sys",
".",
"stdout",
"=",
"open",
"(",
"stdout",
".",
"fileno",
"(",
")",
",",
"'w'",
",",
"encoding",
"=",
"stdout",
".",
"encoding",
",",
"errors",
"=",
"'backslashreplace'",
",",
"closefd",
"=",
"False",
",",
"newline",
"=",
"'\\n'",
")",
"def",
"restore_stdout",
"(",
")",
":",
"sys",
".",
"stdout",
".",
"close",
"(",
")",
"sys",
".",
"stdout",
"=",
"stdout",
"atexit",
".",
"register",
"(",
"restore_stdout",
")"
] |
set stdout encoder error handler to backslashreplace to avoid unicodeencodeerror when printing a traceback .
|
train
| false
|
49,960
|
def subsample_fasta(input_fasta_fp, output_fp, percent_subsample):
input_fasta = open(input_fasta_fp, 'U')
output_fasta = open(output_fp, 'w')
for (label, seq) in parse_fasta(input_fasta):
if (random() < percent_subsample):
output_fasta.write(('>%s\n%s\n' % (label, seq)))
input_fasta.close()
output_fasta.close()
|
[
"def",
"subsample_fasta",
"(",
"input_fasta_fp",
",",
"output_fp",
",",
"percent_subsample",
")",
":",
"input_fasta",
"=",
"open",
"(",
"input_fasta_fp",
",",
"'U'",
")",
"output_fasta",
"=",
"open",
"(",
"output_fp",
",",
"'w'",
")",
"for",
"(",
"label",
",",
"seq",
")",
"in",
"parse_fasta",
"(",
"input_fasta",
")",
":",
"if",
"(",
"random",
"(",
")",
"<",
"percent_subsample",
")",
":",
"output_fasta",
".",
"write",
"(",
"(",
"'>%s\\n%s\\n'",
"%",
"(",
"label",
",",
"seq",
")",
")",
")",
"input_fasta",
".",
"close",
"(",
")",
"output_fasta",
".",
"close",
"(",
")"
] |
writes random percent_sample of sequences from input fasta filepath input_fasta_fp: input fasta filepath output_fp: output fasta filepath percent_subsample: percent of sequences to write .
|
train
| false
|
49,961
|
def _osquery_cmd(table, attrs=None, where=None, format='json'):
ret = {'result': True}
if attrs:
if isinstance(attrs, list):
valid_attrs = _table_attrs(table)
if valid_attrs:
for a in attrs:
if (a not in valid_attrs):
ret['result'] = False
ret['comment'] = '{0} is not a valid attribute for table {1}'.format(a, table)
return ret
_attrs = ','.join(attrs)
else:
ret['result'] = False
ret['comment'] = 'Invalid table {0}.'.format(table)
return ret
else:
ret['comment'] = 'attrs must be specified as a list.'
ret['result'] = False
return ret
else:
_attrs = '*'
sql = 'select {0} from {1}'.format(_attrs, table)
if where:
sql = '{0} where {1}'.format(sql, where)
sql = '{0};'.format(sql)
res = _osquery(sql)
if res['result']:
ret['data'] = res['data']
else:
ret['comment'] = res['error']
return ret
|
[
"def",
"_osquery_cmd",
"(",
"table",
",",
"attrs",
"=",
"None",
",",
"where",
"=",
"None",
",",
"format",
"=",
"'json'",
")",
":",
"ret",
"=",
"{",
"'result'",
":",
"True",
"}",
"if",
"attrs",
":",
"if",
"isinstance",
"(",
"attrs",
",",
"list",
")",
":",
"valid_attrs",
"=",
"_table_attrs",
"(",
"table",
")",
"if",
"valid_attrs",
":",
"for",
"a",
"in",
"attrs",
":",
"if",
"(",
"a",
"not",
"in",
"valid_attrs",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'comment'",
"]",
"=",
"'{0} is not a valid attribute for table {1}'",
".",
"format",
"(",
"a",
",",
"table",
")",
"return",
"ret",
"_attrs",
"=",
"','",
".",
"join",
"(",
"attrs",
")",
"else",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'comment'",
"]",
"=",
"'Invalid table {0}.'",
".",
"format",
"(",
"table",
")",
"return",
"ret",
"else",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'attrs must be specified as a list.'",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"return",
"ret",
"else",
":",
"_attrs",
"=",
"'*'",
"sql",
"=",
"'select {0} from {1}'",
".",
"format",
"(",
"_attrs",
",",
"table",
")",
"if",
"where",
":",
"sql",
"=",
"'{0} where {1}'",
".",
"format",
"(",
"sql",
",",
"where",
")",
"sql",
"=",
"'{0};'",
".",
"format",
"(",
"sql",
")",
"res",
"=",
"_osquery",
"(",
"sql",
")",
"if",
"res",
"[",
"'result'",
"]",
":",
"ret",
"[",
"'data'",
"]",
"=",
"res",
"[",
"'data'",
"]",
"else",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"res",
"[",
"'error'",
"]",
"return",
"ret"
] |
helper function to run osquery queries .
|
train
| true
|
49,962
|
def set_taxes(quotation, cart_settings):
from erpnext.accounts.party import set_taxes
customer_group = frappe.db.get_value(u'Customer', quotation.customer, u'customer_group')
quotation.taxes_and_charges = set_taxes(quotation.customer, u'Customer', quotation.transaction_date, quotation.company, customer_group, None, quotation.customer_address, quotation.shipping_address_name, 1)
quotation.set(u'taxes', [])
quotation.append_taxes_from_master()
|
[
"def",
"set_taxes",
"(",
"quotation",
",",
"cart_settings",
")",
":",
"from",
"erpnext",
".",
"accounts",
".",
"party",
"import",
"set_taxes",
"customer_group",
"=",
"frappe",
".",
"db",
".",
"get_value",
"(",
"u'Customer'",
",",
"quotation",
".",
"customer",
",",
"u'customer_group'",
")",
"quotation",
".",
"taxes_and_charges",
"=",
"set_taxes",
"(",
"quotation",
".",
"customer",
",",
"u'Customer'",
",",
"quotation",
".",
"transaction_date",
",",
"quotation",
".",
"company",
",",
"customer_group",
",",
"None",
",",
"quotation",
".",
"customer_address",
",",
"quotation",
".",
"shipping_address_name",
",",
"1",
")",
"quotation",
".",
"set",
"(",
"u'taxes'",
",",
"[",
"]",
")",
"quotation",
".",
"append_taxes_from_master",
"(",
")"
] |
set taxes based on billing territory .
|
train
| false
|
49,963
|
def get_unused_localhost_port():
usock = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM)
usock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
usock.bind(('127.0.0.1', 0))
port = usock.getsockname()[1]
if (port in (54505, 54506, 64505, 64506, 64510, 64511, 64520, 64521)):
port = get_unused_localhost_port()
usock.close()
return port
DARWIN = (True if sys.platform.startswith('darwin') else False)
BSD = (True if ('bsd' in sys.platform) else False)
if (DARWIN and (port in _RUNTESTS_PORTS)):
port = get_unused_localhost_port()
usock.close()
return port
_RUNTESTS_PORTS[port] = usock
if (DARWIN or BSD):
usock.close()
return port
|
[
"def",
"get_unused_localhost_port",
"(",
")",
":",
"usock",
"=",
"socket",
".",
"socket",
"(",
"family",
"=",
"socket",
".",
"AF_INET",
",",
"type",
"=",
"socket",
".",
"SOCK_STREAM",
")",
"usock",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"usock",
".",
"bind",
"(",
"(",
"'127.0.0.1'",
",",
"0",
")",
")",
"port",
"=",
"usock",
".",
"getsockname",
"(",
")",
"[",
"1",
"]",
"if",
"(",
"port",
"in",
"(",
"54505",
",",
"54506",
",",
"64505",
",",
"64506",
",",
"64510",
",",
"64511",
",",
"64520",
",",
"64521",
")",
")",
":",
"port",
"=",
"get_unused_localhost_port",
"(",
")",
"usock",
".",
"close",
"(",
")",
"return",
"port",
"DARWIN",
"=",
"(",
"True",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'darwin'",
")",
"else",
"False",
")",
"BSD",
"=",
"(",
"True",
"if",
"(",
"'bsd'",
"in",
"sys",
".",
"platform",
")",
"else",
"False",
")",
"if",
"(",
"DARWIN",
"and",
"(",
"port",
"in",
"_RUNTESTS_PORTS",
")",
")",
":",
"port",
"=",
"get_unused_localhost_port",
"(",
")",
"usock",
".",
"close",
"(",
")",
"return",
"port",
"_RUNTESTS_PORTS",
"[",
"port",
"]",
"=",
"usock",
"if",
"(",
"DARWIN",
"or",
"BSD",
")",
":",
"usock",
".",
"close",
"(",
")",
"return",
"port"
] |
return a random unused port on localhost .
|
train
| false
|
49,964
|
def tar_and_gzip(dir, out_path, filter=None, prefix=''):
log.warning('tar_and_gzip() is deprecated and will be removed in v0.6.0')
if (not os.path.isdir(dir)):
raise IOError(('Not a directory: %r' % (dir,)))
if (not filter):
filter = (lambda path: True)
tar_gz = tarfile.open(out_path, mode='w:gz')
for (dirpath, dirnames, filenames) in os.walk(dir, followlinks=True):
for filename in filenames:
path = os.path.join(dirpath, filename)
rel_path = path[len(os.path.join(dir, '')):]
if filter(rel_path):
real_path = os.path.realpath(path)
path_in_tar_gz = os.path.join(prefix, rel_path)
tar_gz.add(real_path, arcname=path_in_tar_gz, recursive=False)
tar_gz.close()
|
[
"def",
"tar_and_gzip",
"(",
"dir",
",",
"out_path",
",",
"filter",
"=",
"None",
",",
"prefix",
"=",
"''",
")",
":",
"log",
".",
"warning",
"(",
"'tar_and_gzip() is deprecated and will be removed in v0.6.0'",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"dir",
")",
")",
":",
"raise",
"IOError",
"(",
"(",
"'Not a directory: %r'",
"%",
"(",
"dir",
",",
")",
")",
")",
"if",
"(",
"not",
"filter",
")",
":",
"filter",
"=",
"(",
"lambda",
"path",
":",
"True",
")",
"tar_gz",
"=",
"tarfile",
".",
"open",
"(",
"out_path",
",",
"mode",
"=",
"'w:gz'",
")",
"for",
"(",
"dirpath",
",",
"dirnames",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"dir",
",",
"followlinks",
"=",
"True",
")",
":",
"for",
"filename",
"in",
"filenames",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"filename",
")",
"rel_path",
"=",
"path",
"[",
"len",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dir",
",",
"''",
")",
")",
":",
"]",
"if",
"filter",
"(",
"rel_path",
")",
":",
"real_path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"path",
")",
"path_in_tar_gz",
"=",
"os",
".",
"path",
".",
"join",
"(",
"prefix",
",",
"rel_path",
")",
"tar_gz",
".",
"add",
"(",
"real_path",
",",
"arcname",
"=",
"path_in_tar_gz",
",",
"recursive",
"=",
"False",
")",
"tar_gz",
".",
"close",
"(",
")"
] |
tar and gzip the given *dir* to a tarball at *out_path* .
|
train
| false
|
49,965
|
@cache_permission
def can_download_changes(user, project):
return check_permission(user, project, 'trans.download_changes')
|
[
"@",
"cache_permission",
"def",
"can_download_changes",
"(",
"user",
",",
"project",
")",
":",
"return",
"check_permission",
"(",
"user",
",",
"project",
",",
"'trans.download_changes'",
")"
] |
checks whether user can download csv for changes on given project .
|
train
| false
|
49,966
|
def test_dict_equality_lookup():
class x(object, ):
def __eq__(self, other):
return False
def __ne__(self, other):
return True
a = x()
d = {}
d[a] = 42
AreEqual(d[a], 42)
|
[
"def",
"test_dict_equality_lookup",
"(",
")",
":",
"class",
"x",
"(",
"object",
",",
")",
":",
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"return",
"False",
"def",
"__ne__",
"(",
"self",
",",
"other",
")",
":",
"return",
"True",
"a",
"=",
"x",
"(",
")",
"d",
"=",
"{",
"}",
"d",
"[",
"a",
"]",
"=",
"42",
"AreEqual",
"(",
"d",
"[",
"a",
"]",
",",
"42",
")"
] |
dictionaries check object equality before running normal equality .
|
train
| false
|
49,967
|
def get_unicode_index(symbol, math=True):
if (not math):
return ord(symbol)
if (symbol == u'-'):
return 8722
try:
return ord(symbol)
except TypeError:
pass
try:
return tex2uni[symbol.strip(u'\\')]
except KeyError:
message = (u"'%(symbol)s' is not a valid Unicode character or\nTeX/Type1 symbol" % locals())
raise ValueError(message)
|
[
"def",
"get_unicode_index",
"(",
"symbol",
",",
"math",
"=",
"True",
")",
":",
"if",
"(",
"not",
"math",
")",
":",
"return",
"ord",
"(",
"symbol",
")",
"if",
"(",
"symbol",
"==",
"u'-'",
")",
":",
"return",
"8722",
"try",
":",
"return",
"ord",
"(",
"symbol",
")",
"except",
"TypeError",
":",
"pass",
"try",
":",
"return",
"tex2uni",
"[",
"symbol",
".",
"strip",
"(",
"u'\\\\'",
")",
"]",
"except",
"KeyError",
":",
"message",
"=",
"(",
"u\"'%(symbol)s' is not a valid Unicode character or\\nTeX/Type1 symbol\"",
"%",
"locals",
"(",
")",
")",
"raise",
"ValueError",
"(",
"message",
")"
] |
get_unicode_index -> integer return the integer index of symbol .
|
train
| false
|
49,968
|
def get_ninja_json_file(path):
extension = '.json'
return get_ninja_file(path, extension, only_first=True)
|
[
"def",
"get_ninja_json_file",
"(",
"path",
")",
":",
"extension",
"=",
"'.json'",
"return",
"get_ninja_file",
"(",
"path",
",",
"extension",
",",
"only_first",
"=",
"True",
")"
] |
return the list of json files inside the directory: path .
|
train
| false
|
49,970
|
def _dummy_save_config():
return True
|
[
"def",
"_dummy_save_config",
"(",
")",
":",
"return",
"True"
] |
override the sickbeard save_config which gets called during a db upgrade .
|
train
| false
|
49,971
|
def idd_frm(n, w, x):
return _id.idd_frm(n, w, x)
|
[
"def",
"idd_frm",
"(",
"n",
",",
"w",
",",
"x",
")",
":",
"return",
"_id",
".",
"idd_frm",
"(",
"n",
",",
"w",
",",
"x",
")"
] |
transform real vector via a composition of rokhlins random transform .
|
train
| false
|
49,972
|
def buildMessageRegistry(seq, suffixes=[''], bases=(Message,)):
for (name, args) in sorted(seq):
for suffix in suffixes:
typename = (toTypeName(name) + suffix)
typens = {'__slots__': args, '__assoc__': name, 'typeName': name}
msgtype = type(typename, bases, typens)
if (name in registry):
registry[name] = (registry[name] + (msgtype,))
else:
registry[name] = (msgtype,)
|
[
"def",
"buildMessageRegistry",
"(",
"seq",
",",
"suffixes",
"=",
"[",
"''",
"]",
",",
"bases",
"=",
"(",
"Message",
",",
")",
")",
":",
"for",
"(",
"name",
",",
"args",
")",
"in",
"sorted",
"(",
"seq",
")",
":",
"for",
"suffix",
"in",
"suffixes",
":",
"typename",
"=",
"(",
"toTypeName",
"(",
"name",
")",
"+",
"suffix",
")",
"typens",
"=",
"{",
"'__slots__'",
":",
"args",
",",
"'__assoc__'",
":",
"name",
",",
"'typeName'",
":",
"name",
"}",
"msgtype",
"=",
"type",
"(",
"typename",
",",
"bases",
",",
"typens",
")",
"if",
"(",
"name",
"in",
"registry",
")",
":",
"registry",
"[",
"name",
"]",
"=",
"(",
"registry",
"[",
"name",
"]",
"+",
"(",
"msgtype",
",",
")",
")",
"else",
":",
"registry",
"[",
"name",
"]",
"=",
"(",
"msgtype",
",",
")"
] |
construct message types and add to given mapping .
|
train
| false
|
49,973
|
def wrap_exception(notifier=None, get_notifier=None, binary=None):
def inner(f):
def wrapped(self, context, *args, **kw):
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if (notifier or get_notifier):
call_dict = _get_call_dict(f, self, context, *args, **kw)
function_name = f.__name__
_emit_exception_notification((notifier or get_notifier()), context, e, function_name, call_dict, binary)
return functools.wraps(f)(wrapped)
return inner
|
[
"def",
"wrap_exception",
"(",
"notifier",
"=",
"None",
",",
"get_notifier",
"=",
"None",
",",
"binary",
"=",
"None",
")",
":",
"def",
"inner",
"(",
"f",
")",
":",
"def",
"wrapped",
"(",
"self",
",",
"context",
",",
"*",
"args",
",",
"**",
"kw",
")",
":",
"try",
":",
"return",
"f",
"(",
"self",
",",
"context",
",",
"*",
"args",
",",
"**",
"kw",
")",
"except",
"Exception",
"as",
"e",
":",
"with",
"excutils",
".",
"save_and_reraise_exception",
"(",
")",
":",
"if",
"(",
"notifier",
"or",
"get_notifier",
")",
":",
"call_dict",
"=",
"_get_call_dict",
"(",
"f",
",",
"self",
",",
"context",
",",
"*",
"args",
",",
"**",
"kw",
")",
"function_name",
"=",
"f",
".",
"__name__",
"_emit_exception_notification",
"(",
"(",
"notifier",
"or",
"get_notifier",
"(",
")",
")",
",",
"context",
",",
"e",
",",
"function_name",
",",
"call_dict",
",",
"binary",
")",
"return",
"functools",
".",
"wraps",
"(",
"f",
")",
"(",
"wrapped",
")",
"return",
"inner"
] |
this decorator wraps a method to catch any exceptions that may get thrown .
|
train
| false
|
49,975
|
def emerge_default_opts_contains(value):
return var_contains('EMERGE_DEFAULT_OPTS', value)
|
[
"def",
"emerge_default_opts_contains",
"(",
"value",
")",
":",
"return",
"var_contains",
"(",
"'EMERGE_DEFAULT_OPTS'",
",",
"value",
")"
] |
verify if emerge_default_opts variable contains a value in make .
|
train
| false
|
49,977
|
def create_treeitem(filename, staged=False, deleted=False, untracked=False):
icon_name = icons.status(filename, deleted, staged, untracked)
return TreeWidgetItem(filename, icons.name_from_basename(icon_name), deleted=deleted)
|
[
"def",
"create_treeitem",
"(",
"filename",
",",
"staged",
"=",
"False",
",",
"deleted",
"=",
"False",
",",
"untracked",
"=",
"False",
")",
":",
"icon_name",
"=",
"icons",
".",
"status",
"(",
"filename",
",",
"deleted",
",",
"staged",
",",
"untracked",
")",
"return",
"TreeWidgetItem",
"(",
"filename",
",",
"icons",
".",
"name_from_basename",
"(",
"icon_name",
")",
",",
"deleted",
"=",
"deleted",
")"
] |
given a filename .
|
train
| false
|
49,978
|
def get_method_attr(method, cls, attr_name, default=False):
Missing = object()
value = getattr(method, attr_name, Missing)
if ((value is Missing) and (cls is not None)):
value = getattr(cls, attr_name, Missing)
if (value is Missing):
return default
return value
|
[
"def",
"get_method_attr",
"(",
"method",
",",
"cls",
",",
"attr_name",
",",
"default",
"=",
"False",
")",
":",
"Missing",
"=",
"object",
"(",
")",
"value",
"=",
"getattr",
"(",
"method",
",",
"attr_name",
",",
"Missing",
")",
"if",
"(",
"(",
"value",
"is",
"Missing",
")",
"and",
"(",
"cls",
"is",
"not",
"None",
")",
")",
":",
"value",
"=",
"getattr",
"(",
"cls",
",",
"attr_name",
",",
"Missing",
")",
"if",
"(",
"value",
"is",
"Missing",
")",
":",
"return",
"default",
"return",
"value"
] |
look up an attribute on a method/ function .
|
train
| true
|
49,979
|
def compute_sigma_level(trace1, trace2, nbins=20):
(L, xbins, ybins) = np.histogram2d(trace1, trace2, nbins)
L[(L == 0)] = 1e-16
logL = np.log(L)
shape = L.shape
L = L.ravel()
i_sort = np.argsort(L)[::(-1)]
i_unsort = np.argsort(i_sort)
L_cumsum = L[i_sort].cumsum()
L_cumsum /= L_cumsum[(-1)]
xbins = (0.5 * (xbins[1:] + xbins[:(-1)]))
ybins = (0.5 * (ybins[1:] + ybins[:(-1)]))
return (xbins, ybins, L_cumsum[i_unsort].reshape(shape))
|
[
"def",
"compute_sigma_level",
"(",
"trace1",
",",
"trace2",
",",
"nbins",
"=",
"20",
")",
":",
"(",
"L",
",",
"xbins",
",",
"ybins",
")",
"=",
"np",
".",
"histogram2d",
"(",
"trace1",
",",
"trace2",
",",
"nbins",
")",
"L",
"[",
"(",
"L",
"==",
"0",
")",
"]",
"=",
"1e-16",
"logL",
"=",
"np",
".",
"log",
"(",
"L",
")",
"shape",
"=",
"L",
".",
"shape",
"L",
"=",
"L",
".",
"ravel",
"(",
")",
"i_sort",
"=",
"np",
".",
"argsort",
"(",
"L",
")",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
"i_unsort",
"=",
"np",
".",
"argsort",
"(",
"i_sort",
")",
"L_cumsum",
"=",
"L",
"[",
"i_sort",
"]",
".",
"cumsum",
"(",
")",
"L_cumsum",
"/=",
"L_cumsum",
"[",
"(",
"-",
"1",
")",
"]",
"xbins",
"=",
"(",
"0.5",
"*",
"(",
"xbins",
"[",
"1",
":",
"]",
"+",
"xbins",
"[",
":",
"(",
"-",
"1",
")",
"]",
")",
")",
"ybins",
"=",
"(",
"0.5",
"*",
"(",
"ybins",
"[",
"1",
":",
"]",
"+",
"ybins",
"[",
":",
"(",
"-",
"1",
")",
"]",
")",
")",
"return",
"(",
"xbins",
",",
"ybins",
",",
"L_cumsum",
"[",
"i_unsort",
"]",
".",
"reshape",
"(",
"shape",
")",
")"
] |
from a set of traces .
|
train
| false
|
49,980
|
def choose(a, choices, out=None, mode='raise'):
assert (out is None)
return Choose(mode)(a, choices)
|
[
"def",
"choose",
"(",
"a",
",",
"choices",
",",
"out",
"=",
"None",
",",
"mode",
"=",
"'raise'",
")",
":",
"assert",
"(",
"out",
"is",
"None",
")",
"return",
"Choose",
"(",
"mode",
")",
"(",
"a",
",",
"choices",
")"
] |
construct an array from an index array and a set of arrays to choose from .
|
train
| false
|
49,981
|
def monitored(name, device_class=None, collector='localhost', prod_state=None):
ret = {}
ret['name'] = name
device = __salt__['zenoss.find_device'](name)
if device:
ret['result'] = True
ret['changes'] = None
ret['comment'] = '{0} is already monitored'.format(name)
if prod_state:
if (device['productionState'] != prod_state):
__salt__['zenoss.set_prod_state'](prod_state, name)
ret['changes'] = {'old': 'prodState == {0}'.format(device['productionState']), 'new': 'prodState == {0}'.format(prod_state)}
ret['comment'] = '{0} is already monitored but prodState was incorrect, setting to Production'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'The state of "{0}" will be changed.'.format(name)
ret['changes'] = {'old': 'monitored == False', 'new': 'monitored == True'}
ret['result'] = None
return ret
if __salt__['zenoss.add_device'](name, device_class, collector, prod_state):
ret['result'] = True
ret['changes'] = {'old': 'monitored == False', 'new': 'monitored == True'}
ret['comment'] = '{0} has been added to Zenoss'.format(name)
else:
ret['result'] = False
ret['changes'] = None
ret['comment'] = 'Unable to add {0} to Zenoss'.format(name)
return ret
|
[
"def",
"monitored",
"(",
"name",
",",
"device_class",
"=",
"None",
",",
"collector",
"=",
"'localhost'",
",",
"prod_state",
"=",
"None",
")",
":",
"ret",
"=",
"{",
"}",
"ret",
"[",
"'name'",
"]",
"=",
"name",
"device",
"=",
"__salt__",
"[",
"'zenoss.find_device'",
"]",
"(",
"name",
")",
"if",
"device",
":",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'changes'",
"]",
"=",
"None",
"ret",
"[",
"'comment'",
"]",
"=",
"'{0} is already monitored'",
".",
"format",
"(",
"name",
")",
"if",
"prod_state",
":",
"if",
"(",
"device",
"[",
"'productionState'",
"]",
"!=",
"prod_state",
")",
":",
"__salt__",
"[",
"'zenoss.set_prod_state'",
"]",
"(",
"prod_state",
",",
"name",
")",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'old'",
":",
"'prodState == {0}'",
".",
"format",
"(",
"device",
"[",
"'productionState'",
"]",
")",
",",
"'new'",
":",
"'prodState == {0}'",
".",
"format",
"(",
"prod_state",
")",
"}",
"ret",
"[",
"'comment'",
"]",
"=",
"'{0} is already monitored but prodState was incorrect, setting to Production'",
".",
"format",
"(",
"name",
")",
"return",
"ret",
"if",
"__opts__",
"[",
"'test'",
"]",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'The state of \"{0}\" will be changed.'",
".",
"format",
"(",
"name",
")",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'old'",
":",
"'monitored == False'",
",",
"'new'",
":",
"'monitored == True'",
"}",
"ret",
"[",
"'result'",
"]",
"=",
"None",
"return",
"ret",
"if",
"__salt__",
"[",
"'zenoss.add_device'",
"]",
"(",
"name",
",",
"device_class",
",",
"collector",
",",
"prod_state",
")",
":",
"ret",
"[",
"'result'",
"]",
"=",
"True",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'old'",
":",
"'monitored == False'",
",",
"'new'",
":",
"'monitored == True'",
"}",
"ret",
"[",
"'comment'",
"]",
"=",
"'{0} has been added to Zenoss'",
".",
"format",
"(",
"name",
")",
"else",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'changes'",
"]",
"=",
"None",
"ret",
"[",
"'comment'",
"]",
"=",
"'Unable to add {0} to Zenoss'",
".",
"format",
"(",
"name",
")",
"return",
"ret"
] |
ensure a device is monitored .
|
train
| true
|
49,982
|
def volume_data_get_for_project(context, project_id, session=None):
return IMPL.volume_data_get_for_project(context, project_id, session)
|
[
"def",
"volume_data_get_for_project",
"(",
"context",
",",
"project_id",
",",
"session",
"=",
"None",
")",
":",
"return",
"IMPL",
".",
"volume_data_get_for_project",
"(",
"context",
",",
"project_id",
",",
"session",
")"
] |
get for project .
|
train
| false
|
49,983
|
def _obj_to_dict(obj):
return {key: getattr(obj, key) for key in dir(obj) if ((key[0] != '_') and (not hasattr(getattr(obj, key), '__call__')))}
|
[
"def",
"_obj_to_dict",
"(",
"obj",
")",
":",
"return",
"{",
"key",
":",
"getattr",
"(",
"obj",
",",
"key",
")",
"for",
"key",
"in",
"dir",
"(",
"obj",
")",
"if",
"(",
"(",
"key",
"[",
"0",
"]",
"!=",
"'_'",
")",
"and",
"(",
"not",
"hasattr",
"(",
"getattr",
"(",
"obj",
",",
"key",
")",
",",
"'__call__'",
")",
")",
")",
"}"
] |
convert an object into a hash for debug .
|
train
| false
|
49,985
|
def getdefaulttimeout():
return _default_timeout
|
[
"def",
"getdefaulttimeout",
"(",
")",
":",
"return",
"_default_timeout"
] |
return the global timeout setting to connect .
|
train
| false
|
49,986
|
def test_defined(value):
return (not isinstance(value, Undefined))
|
[
"def",
"test_defined",
"(",
"value",
")",
":",
"return",
"(",
"not",
"isinstance",
"(",
"value",
",",
"Undefined",
")",
")"
] |
return true if the variable is defined: .
|
train
| false
|
49,988
|
def random_integer_partition(n, seed=None):
from sympy.utilities.randtest import _randint
n = as_int(n)
if (n < 1):
raise ValueError('n must be a positive integer')
randint = _randint(seed)
partition = []
while (n > 0):
k = randint(1, n)
mult = randint(1, (n // k))
partition.append((k, mult))
n -= (k * mult)
partition.sort(reverse=True)
partition = flatten([([k] * m) for (k, m) in partition])
return partition
|
[
"def",
"random_integer_partition",
"(",
"n",
",",
"seed",
"=",
"None",
")",
":",
"from",
"sympy",
".",
"utilities",
".",
"randtest",
"import",
"_randint",
"n",
"=",
"as_int",
"(",
"n",
")",
"if",
"(",
"n",
"<",
"1",
")",
":",
"raise",
"ValueError",
"(",
"'n must be a positive integer'",
")",
"randint",
"=",
"_randint",
"(",
"seed",
")",
"partition",
"=",
"[",
"]",
"while",
"(",
"n",
">",
"0",
")",
":",
"k",
"=",
"randint",
"(",
"1",
",",
"n",
")",
"mult",
"=",
"randint",
"(",
"1",
",",
"(",
"n",
"//",
"k",
")",
")",
"partition",
".",
"append",
"(",
"(",
"k",
",",
"mult",
")",
")",
"n",
"-=",
"(",
"k",
"*",
"mult",
")",
"partition",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"partition",
"=",
"flatten",
"(",
"[",
"(",
"[",
"k",
"]",
"*",
"m",
")",
"for",
"(",
"k",
",",
"m",
")",
"in",
"partition",
"]",
")",
"return",
"partition"
] |
generates a random integer partition summing to n as a list of reverse-sorted integers .
|
train
| false
|
49,989
|
def article(word, function=INDEFINITE):
return (((function == DEFINITE) and definite_article(word)) or indefinite_article(word))
|
[
"def",
"article",
"(",
"word",
",",
"function",
"=",
"INDEFINITE",
")",
":",
"return",
"(",
"(",
"(",
"function",
"==",
"DEFINITE",
")",
"and",
"definite_article",
"(",
"word",
")",
")",
"or",
"indefinite_article",
"(",
"word",
")",
")"
] |
returns the indefinite or definite article for the given word .
|
train
| false
|
49,992
|
def face(gray=False):
import bz2
import os
with open(os.path.join(os.path.dirname(__file__), 'face.dat'), 'rb') as f:
rawdata = f.read()
data = bz2.decompress(rawdata)
face = fromstring(data, dtype='uint8')
face.shape = (768, 1024, 3)
if (gray is True):
face = (((0.21 * face[:, :, 0]) + (0.71 * face[:, :, 1])) + (0.07 * face[:, :, 2])).astype('uint8')
return face
|
[
"def",
"face",
"(",
"gray",
"=",
"False",
")",
":",
"import",
"bz2",
"import",
"os",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'face.dat'",
")",
",",
"'rb'",
")",
"as",
"f",
":",
"rawdata",
"=",
"f",
".",
"read",
"(",
")",
"data",
"=",
"bz2",
".",
"decompress",
"(",
"rawdata",
")",
"face",
"=",
"fromstring",
"(",
"data",
",",
"dtype",
"=",
"'uint8'",
")",
"face",
".",
"shape",
"=",
"(",
"768",
",",
"1024",
",",
"3",
")",
"if",
"(",
"gray",
"is",
"True",
")",
":",
"face",
"=",
"(",
"(",
"(",
"0.21",
"*",
"face",
"[",
":",
",",
":",
",",
"0",
"]",
")",
"+",
"(",
"0.71",
"*",
"face",
"[",
":",
",",
":",
",",
"1",
"]",
")",
")",
"+",
"(",
"0.07",
"*",
"face",
"[",
":",
",",
":",
",",
"2",
"]",
")",
")",
".",
"astype",
"(",
"'uint8'",
")",
"return",
"face"
] |
get a 1024 x 768 .
|
train
| false
|
49,993
|
def justify(s, max_length, left_pad=0):
txt = textwrap.wrap(s, width=max_length, subsequent_indent=(' ' * left_pad))
return '\n'.join(txt)
|
[
"def",
"justify",
"(",
"s",
",",
"max_length",
",",
"left_pad",
"=",
"0",
")",
":",
"txt",
"=",
"textwrap",
".",
"wrap",
"(",
"s",
",",
"width",
"=",
"max_length",
",",
"subsequent_indent",
"=",
"(",
"' '",
"*",
"left_pad",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"txt",
")"
] |
re-wrap the string s so that each line is no more than max_length characters long .
|
train
| false
|
49,994
|
def bucket_load(self, *args, **kwargs):
response = self.meta.client.list_buckets()
for bucket_data in response['Buckets']:
if (bucket_data['Name'] == self.name):
self.meta.data = bucket_data
break
else:
raise ClientError({'Error': {'Code': '404', 'Message': 'NotFound'}}, 'ListBuckets')
|
[
"def",
"bucket_load",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"response",
"=",
"self",
".",
"meta",
".",
"client",
".",
"list_buckets",
"(",
")",
"for",
"bucket_data",
"in",
"response",
"[",
"'Buckets'",
"]",
":",
"if",
"(",
"bucket_data",
"[",
"'Name'",
"]",
"==",
"self",
".",
"name",
")",
":",
"self",
".",
"meta",
".",
"data",
"=",
"bucket_data",
"break",
"else",
":",
"raise",
"ClientError",
"(",
"{",
"'Error'",
":",
"{",
"'Code'",
":",
"'404'",
",",
"'Message'",
":",
"'NotFound'",
"}",
"}",
",",
"'ListBuckets'",
")"
] |
calls s3 .
|
train
| false
|
49,995
|
def generate_graphml(G, encoding='utf-8', prettyprint=True):
writer = GraphMLWriter(encoding=encoding, prettyprint=prettyprint)
writer.add_graph_element(G)
for line in str(writer).splitlines():
(yield line)
|
[
"def",
"generate_graphml",
"(",
"G",
",",
"encoding",
"=",
"'utf-8'",
",",
"prettyprint",
"=",
"True",
")",
":",
"writer",
"=",
"GraphMLWriter",
"(",
"encoding",
"=",
"encoding",
",",
"prettyprint",
"=",
"prettyprint",
")",
"writer",
".",
"add_graph_element",
"(",
"G",
")",
"for",
"line",
"in",
"str",
"(",
"writer",
")",
".",
"splitlines",
"(",
")",
":",
"(",
"yield",
"line",
")"
] |
generate graphml lines for g parameters g : graph a networkx graph encoding : string encoding for text data .
|
train
| false
|
49,996
|
def license():
from os.path import join
with open(join(__path__[0], 'LICENSE.txt')) as lic:
print(lic.read())
|
[
"def",
"license",
"(",
")",
":",
"from",
"os",
".",
"path",
"import",
"join",
"with",
"open",
"(",
"join",
"(",
"__path__",
"[",
"0",
"]",
",",
"'LICENSE.txt'",
")",
")",
"as",
"lic",
":",
"print",
"(",
"lic",
".",
"read",
"(",
")",
")"
] |
print the bokeh license to the console .
|
train
| true
|
49,997
|
def objective(layers, loss_function, target, aggregate=aggregate, deterministic=False, l1=0, l2=0, get_output_kw=None):
if (get_output_kw is None):
get_output_kw = {}
output_layer = layers[(-1)]
network_output = get_output(output_layer, deterministic=deterministic, **get_output_kw)
loss = aggregate(loss_function(network_output, target))
if l1:
loss += (regularization.regularize_layer_params(layers.values(), regularization.l1) * l1)
if l2:
loss += (regularization.regularize_layer_params(layers.values(), regularization.l2) * l2)
return loss
|
[
"def",
"objective",
"(",
"layers",
",",
"loss_function",
",",
"target",
",",
"aggregate",
"=",
"aggregate",
",",
"deterministic",
"=",
"False",
",",
"l1",
"=",
"0",
",",
"l2",
"=",
"0",
",",
"get_output_kw",
"=",
"None",
")",
":",
"if",
"(",
"get_output_kw",
"is",
"None",
")",
":",
"get_output_kw",
"=",
"{",
"}",
"output_layer",
"=",
"layers",
"[",
"(",
"-",
"1",
")",
"]",
"network_output",
"=",
"get_output",
"(",
"output_layer",
",",
"deterministic",
"=",
"deterministic",
",",
"**",
"get_output_kw",
")",
"loss",
"=",
"aggregate",
"(",
"loss_function",
"(",
"network_output",
",",
"target",
")",
")",
"if",
"l1",
":",
"loss",
"+=",
"(",
"regularization",
".",
"regularize_layer_params",
"(",
"layers",
".",
"values",
"(",
")",
",",
"regularization",
".",
"l1",
")",
"*",
"l1",
")",
"if",
"l2",
":",
"loss",
"+=",
"(",
"regularization",
".",
"regularize_layer_params",
"(",
"layers",
".",
"values",
"(",
")",
",",
"regularization",
".",
"l2",
")",
"*",
"l2",
")",
"return",
"loss"
] |
default implementation of the neuralnet objective .
|
train
| true
|
49,999
|
def _json_encode_datetime(o):
return {'isostr': o.strftime(_DATETIME_FORMAT)}
|
[
"def",
"_json_encode_datetime",
"(",
"o",
")",
":",
"return",
"{",
"'isostr'",
":",
"o",
".",
"strftime",
"(",
"_DATETIME_FORMAT",
")",
"}"
] |
json encode a datetime object .
|
train
| false
|
50,000
|
def custom_response_text_rewriter(raw_text, content_mime, remote_url):
raw_text = regex_ubb_img_rewriter.sub('<img src="\\g<image_url>" style="max-width: 100%;"></img>', raw_text)
regex_twitter_data_expanded.sub(demo__handle_expand_url, raw_text)
if (('search' in remote_url) and ((content_mime == 'text/html') or (content_mime == 'application/json'))):
raw_text = demo__google_result_open_in_new_tab(raw_text, content_mime)
raw_text = raw_text.replace('www.google-analytics.com/analytics.js', '')
if (content_mime == 'text/html'):
my_statistic_code = '<!--Your Own Statistic Code-->'
raw_text = raw_text.replace('</head>', (my_statistic_code + '</head>'), 1)
return raw_text
|
[
"def",
"custom_response_text_rewriter",
"(",
"raw_text",
",",
"content_mime",
",",
"remote_url",
")",
":",
"raw_text",
"=",
"regex_ubb_img_rewriter",
".",
"sub",
"(",
"'<img src=\"\\\\g<image_url>\" style=\"max-width: 100%;\"></img>'",
",",
"raw_text",
")",
"regex_twitter_data_expanded",
".",
"sub",
"(",
"demo__handle_expand_url",
",",
"raw_text",
")",
"if",
"(",
"(",
"'search'",
"in",
"remote_url",
")",
"and",
"(",
"(",
"content_mime",
"==",
"'text/html'",
")",
"or",
"(",
"content_mime",
"==",
"'application/json'",
")",
")",
")",
":",
"raw_text",
"=",
"demo__google_result_open_in_new_tab",
"(",
"raw_text",
",",
"content_mime",
")",
"raw_text",
"=",
"raw_text",
".",
"replace",
"(",
"'www.google-analytics.com/analytics.js'",
",",
"''",
")",
"if",
"(",
"content_mime",
"==",
"'text/html'",
")",
":",
"my_statistic_code",
"=",
"'<!--Your Own Statistic Code-->'",
"raw_text",
"=",
"raw_text",
".",
"replace",
"(",
"'</head>'",
",",
"(",
"my_statistic_code",
"+",
"'</head>'",
")",
",",
"1",
")",
"return",
"raw_text"
] |
allow you do some custom modifications/rewrites to the response content .
|
train
| false
|
50,001
|
@contextmanager
def logger_disabled(name=None):
log = logging.getLogger(name)
was_disabled = log.disabled
log.disabled = True
try:
(yield)
finally:
log.disabled = was_disabled
|
[
"@",
"contextmanager",
"def",
"logger_disabled",
"(",
"name",
"=",
"None",
")",
":",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"name",
")",
"was_disabled",
"=",
"log",
".",
"disabled",
"log",
".",
"disabled",
"=",
"True",
"try",
":",
"(",
"yield",
")",
"finally",
":",
"log",
".",
"disabled",
"=",
"was_disabled"
] |
temporarily disable a logger .
|
train
| false
|
50,006
|
def integrate_result(orig_expr, result, **options):
if (not isinstance(result, Expr)):
return result
options['replace_none'] = True
if (not ('basis' in options)):
arg = orig_expr.args[(-1)]
options['basis'] = get_basis(arg, **options)
elif (not isinstance(options['basis'], StateBase)):
options['basis'] = get_basis(orig_expr, **options)
basis = options.pop('basis', None)
if (basis is None):
return result
unities = options.pop('unities', [])
if (len(unities) == 0):
return result
kets = enumerate_states(basis, unities)
coords = [k.label[0] for k in kets]
for coord in coords:
if (coord in result.free_symbols):
basis_op = state_to_operators(basis)
start = basis_op.hilbert_space.interval.start
end = basis_op.hilbert_space.interval.end
result = integrate(result, (coord, start, end))
return result
|
[
"def",
"integrate_result",
"(",
"orig_expr",
",",
"result",
",",
"**",
"options",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"result",
",",
"Expr",
")",
")",
":",
"return",
"result",
"options",
"[",
"'replace_none'",
"]",
"=",
"True",
"if",
"(",
"not",
"(",
"'basis'",
"in",
"options",
")",
")",
":",
"arg",
"=",
"orig_expr",
".",
"args",
"[",
"(",
"-",
"1",
")",
"]",
"options",
"[",
"'basis'",
"]",
"=",
"get_basis",
"(",
"arg",
",",
"**",
"options",
")",
"elif",
"(",
"not",
"isinstance",
"(",
"options",
"[",
"'basis'",
"]",
",",
"StateBase",
")",
")",
":",
"options",
"[",
"'basis'",
"]",
"=",
"get_basis",
"(",
"orig_expr",
",",
"**",
"options",
")",
"basis",
"=",
"options",
".",
"pop",
"(",
"'basis'",
",",
"None",
")",
"if",
"(",
"basis",
"is",
"None",
")",
":",
"return",
"result",
"unities",
"=",
"options",
".",
"pop",
"(",
"'unities'",
",",
"[",
"]",
")",
"if",
"(",
"len",
"(",
"unities",
")",
"==",
"0",
")",
":",
"return",
"result",
"kets",
"=",
"enumerate_states",
"(",
"basis",
",",
"unities",
")",
"coords",
"=",
"[",
"k",
".",
"label",
"[",
"0",
"]",
"for",
"k",
"in",
"kets",
"]",
"for",
"coord",
"in",
"coords",
":",
"if",
"(",
"coord",
"in",
"result",
".",
"free_symbols",
")",
":",
"basis_op",
"=",
"state_to_operators",
"(",
"basis",
")",
"start",
"=",
"basis_op",
".",
"hilbert_space",
".",
"interval",
".",
"start",
"end",
"=",
"basis_op",
".",
"hilbert_space",
".",
"interval",
".",
"end",
"result",
"=",
"integrate",
"(",
"result",
",",
"(",
"coord",
",",
"start",
",",
"end",
")",
")",
"return",
"result"
] |
returns the result of integrating over any unities in the given expression .
|
train
| false
|
50,007
|
def test_pip_wheel_fails_without_wheel(script, data):
result = script.pip('wheel', '--no-index', '-f', data.find_links, 'simple==3.0', expect_error=True)
assert ("'pip wheel' requires the 'wheel' package" in result.stderr)
|
[
"def",
"test_pip_wheel_fails_without_wheel",
"(",
"script",
",",
"data",
")",
":",
"result",
"=",
"script",
".",
"pip",
"(",
"'wheel'",
",",
"'--no-index'",
",",
"'-f'",
",",
"data",
".",
"find_links",
",",
"'simple==3.0'",
",",
"expect_error",
"=",
"True",
")",
"assert",
"(",
"\"'pip wheel' requires the 'wheel' package\"",
"in",
"result",
".",
"stderr",
")"
] |
test pip wheel fails without wheel .
|
train
| false
|
50,008
|
def _example_short_number(region_code):
metadata = PhoneMetadata.short_metadata_for_region(region_code)
if (metadata is None):
return U_EMPTY_STRING
desc = metadata.short_code
if (desc.example_number is not None):
return desc.example_number
return U_EMPTY_STRING
|
[
"def",
"_example_short_number",
"(",
"region_code",
")",
":",
"metadata",
"=",
"PhoneMetadata",
".",
"short_metadata_for_region",
"(",
"region_code",
")",
"if",
"(",
"metadata",
"is",
"None",
")",
":",
"return",
"U_EMPTY_STRING",
"desc",
"=",
"metadata",
".",
"short_code",
"if",
"(",
"desc",
".",
"example_number",
"is",
"not",
"None",
")",
":",
"return",
"desc",
".",
"example_number",
"return",
"U_EMPTY_STRING"
] |
gets a valid short number for the specified region .
|
train
| true
|
50,012
|
def apply_target(target, args=(), kwargs={}, callback=None, accept_callback=None, pid=None, getpid=os.getpid, propagate=(), monotonic=monotonic, **_):
if accept_callback:
accept_callback((pid or getpid()), monotonic())
try:
ret = target(*args, **kwargs)
except propagate:
raise
except Exception:
raise
except (WorkerShutdown, WorkerTerminate):
raise
except BaseException as exc:
try:
reraise(WorkerLostError, WorkerLostError(repr(exc)), sys.exc_info()[2])
except WorkerLostError:
callback(ExceptionInfo())
else:
callback(ret)
|
[
"def",
"apply_target",
"(",
"target",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"{",
"}",
",",
"callback",
"=",
"None",
",",
"accept_callback",
"=",
"None",
",",
"pid",
"=",
"None",
",",
"getpid",
"=",
"os",
".",
"getpid",
",",
"propagate",
"=",
"(",
")",
",",
"monotonic",
"=",
"monotonic",
",",
"**",
"_",
")",
":",
"if",
"accept_callback",
":",
"accept_callback",
"(",
"(",
"pid",
"or",
"getpid",
"(",
")",
")",
",",
"monotonic",
"(",
")",
")",
"try",
":",
"ret",
"=",
"target",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"except",
"propagate",
":",
"raise",
"except",
"Exception",
":",
"raise",
"except",
"(",
"WorkerShutdown",
",",
"WorkerTerminate",
")",
":",
"raise",
"except",
"BaseException",
"as",
"exc",
":",
"try",
":",
"reraise",
"(",
"WorkerLostError",
",",
"WorkerLostError",
"(",
"repr",
"(",
"exc",
")",
")",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
")",
"except",
"WorkerLostError",
":",
"callback",
"(",
"ExceptionInfo",
"(",
")",
")",
"else",
":",
"callback",
"(",
"ret",
")"
] |
apply function within pool context .
|
train
| false
|
50,015
|
def _keep(msg, account):
if msg._deleted:
return False
if (msg._spam and (msg.author_id != account._id)):
return False
if (msg.author_id in account.enemies):
return False
if (isinstance(msg, Message) and (msg.to_id == account._id) and msg.del_on_recipient):
return False
if (msg.author_id == account._id):
return False
return True
|
[
"def",
"_keep",
"(",
"msg",
",",
"account",
")",
":",
"if",
"msg",
".",
"_deleted",
":",
"return",
"False",
"if",
"(",
"msg",
".",
"_spam",
"and",
"(",
"msg",
".",
"author_id",
"!=",
"account",
".",
"_id",
")",
")",
":",
"return",
"False",
"if",
"(",
"msg",
".",
"author_id",
"in",
"account",
".",
"enemies",
")",
":",
"return",
"False",
"if",
"(",
"isinstance",
"(",
"msg",
",",
"Message",
")",
"and",
"(",
"msg",
".",
"to_id",
"==",
"account",
".",
"_id",
")",
"and",
"msg",
".",
"del_on_recipient",
")",
":",
"return",
"False",
"if",
"(",
"msg",
".",
"author_id",
"==",
"account",
".",
"_id",
")",
":",
"return",
"False",
"return",
"True"
] |
adapted from listingcontroller .
|
train
| false
|
50,018
|
def browser(honor_time=True, max_time=2, mobile_browser=False, user_agent=None, use_robust_parser=False, verify_ssl_certificates=True):
from calibre.utils.browser import Browser
if use_robust_parser:
import mechanize
opener = Browser(factory=mechanize.RobustFactory(), verify_ssl=verify_ssl_certificates)
else:
opener = Browser(verify_ssl=verify_ssl_certificates)
opener.set_handle_refresh(True, max_time=max_time, honor_time=honor_time)
opener.set_handle_robots(False)
if (user_agent is None):
user_agent = (USER_AGENT_MOBILE if mobile_browser else USER_AGENT)
opener.addheaders = [('User-agent', user_agent)]
proxies = get_proxies()
to_add = {}
http_proxy = proxies.get('http', None)
if http_proxy:
to_add['http'] = http_proxy
https_proxy = proxies.get('https', None)
if https_proxy:
to_add['https'] = https_proxy
if to_add:
opener.set_proxies(to_add)
return opener
|
[
"def",
"browser",
"(",
"honor_time",
"=",
"True",
",",
"max_time",
"=",
"2",
",",
"mobile_browser",
"=",
"False",
",",
"user_agent",
"=",
"None",
",",
"use_robust_parser",
"=",
"False",
",",
"verify_ssl_certificates",
"=",
"True",
")",
":",
"from",
"calibre",
".",
"utils",
".",
"browser",
"import",
"Browser",
"if",
"use_robust_parser",
":",
"import",
"mechanize",
"opener",
"=",
"Browser",
"(",
"factory",
"=",
"mechanize",
".",
"RobustFactory",
"(",
")",
",",
"verify_ssl",
"=",
"verify_ssl_certificates",
")",
"else",
":",
"opener",
"=",
"Browser",
"(",
"verify_ssl",
"=",
"verify_ssl_certificates",
")",
"opener",
".",
"set_handle_refresh",
"(",
"True",
",",
"max_time",
"=",
"max_time",
",",
"honor_time",
"=",
"honor_time",
")",
"opener",
".",
"set_handle_robots",
"(",
"False",
")",
"if",
"(",
"user_agent",
"is",
"None",
")",
":",
"user_agent",
"=",
"(",
"USER_AGENT_MOBILE",
"if",
"mobile_browser",
"else",
"USER_AGENT",
")",
"opener",
".",
"addheaders",
"=",
"[",
"(",
"'User-agent'",
",",
"user_agent",
")",
"]",
"proxies",
"=",
"get_proxies",
"(",
")",
"to_add",
"=",
"{",
"}",
"http_proxy",
"=",
"proxies",
".",
"get",
"(",
"'http'",
",",
"None",
")",
"if",
"http_proxy",
":",
"to_add",
"[",
"'http'",
"]",
"=",
"http_proxy",
"https_proxy",
"=",
"proxies",
".",
"get",
"(",
"'https'",
",",
"None",
")",
"if",
"https_proxy",
":",
"to_add",
"[",
"'https'",
"]",
"=",
"https_proxy",
"if",
"to_add",
":",
"opener",
".",
"set_proxies",
"(",
"to_add",
")",
"return",
"opener"
] |
create a mechanize browser for web scraping .
|
train
| false
|
50,019
|
def read_charlist(filename):
with open(filename) as datafile:
charlist = []
for line in datafile:
if ('#' in line):
line = line[:line.index('#')]
line = line.strip()
if (not line):
continue
if line.startswith('U+'):
line = line[2:]
char = int(line, 16)
charlist.append(char)
return charlist
|
[
"def",
"read_charlist",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"datafile",
":",
"charlist",
"=",
"[",
"]",
"for",
"line",
"in",
"datafile",
":",
"if",
"(",
"'#'",
"in",
"line",
")",
":",
"line",
"=",
"line",
"[",
":",
"line",
".",
"index",
"(",
"'#'",
")",
"]",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"(",
"not",
"line",
")",
":",
"continue",
"if",
"line",
".",
"startswith",
"(",
"'U+'",
")",
":",
"line",
"=",
"line",
"[",
"2",
":",
"]",
"char",
"=",
"int",
"(",
"line",
",",
"16",
")",
"charlist",
".",
"append",
"(",
"char",
")",
"return",
"charlist"
] |
returns a list of characters read from a charset text file .
|
train
| false
|
50,020
|
def _binning(values, limits=(0, 0), bin_num=10):
if (limits == (0, 0)):
(min_val, max_val) = ((min(values) - _EPS), (max(values) + _EPS))
else:
(min_val, max_val) = limits
bin_size = ((max_val - min_val) / float(bin_num))
bins = ([0] * bin_num)
out_points = 0
for value in values:
try:
if ((value - min_val) < 0):
out_points += 1
else:
index = int(((value - min_val) / float(bin_size)))
bins[index] += 1
except IndexError:
out_points += 1
result = []
center = ((bin_size / 2) + min_val)
for (i, y) in enumerate(bins):
x = (center + (bin_size * i))
result.append((x, y))
return result
|
[
"def",
"_binning",
"(",
"values",
",",
"limits",
"=",
"(",
"0",
",",
"0",
")",
",",
"bin_num",
"=",
"10",
")",
":",
"if",
"(",
"limits",
"==",
"(",
"0",
",",
"0",
")",
")",
":",
"(",
"min_val",
",",
"max_val",
")",
"=",
"(",
"(",
"min",
"(",
"values",
")",
"-",
"_EPS",
")",
",",
"(",
"max",
"(",
"values",
")",
"+",
"_EPS",
")",
")",
"else",
":",
"(",
"min_val",
",",
"max_val",
")",
"=",
"limits",
"bin_size",
"=",
"(",
"(",
"max_val",
"-",
"min_val",
")",
"/",
"float",
"(",
"bin_num",
")",
")",
"bins",
"=",
"(",
"[",
"0",
"]",
"*",
"bin_num",
")",
"out_points",
"=",
"0",
"for",
"value",
"in",
"values",
":",
"try",
":",
"if",
"(",
"(",
"value",
"-",
"min_val",
")",
"<",
"0",
")",
":",
"out_points",
"+=",
"1",
"else",
":",
"index",
"=",
"int",
"(",
"(",
"(",
"value",
"-",
"min_val",
")",
"/",
"float",
"(",
"bin_size",
")",
")",
")",
"bins",
"[",
"index",
"]",
"+=",
"1",
"except",
"IndexError",
":",
"out_points",
"+=",
"1",
"result",
"=",
"[",
"]",
"center",
"=",
"(",
"(",
"bin_size",
"/",
"2",
")",
"+",
"min_val",
")",
"for",
"(",
"i",
",",
"y",
")",
"in",
"enumerate",
"(",
"bins",
")",
":",
"x",
"=",
"(",
"center",
"+",
"(",
"bin_size",
"*",
"i",
")",
")",
"result",
".",
"append",
"(",
"(",
"x",
",",
"y",
")",
")",
"return",
"result"
] |
bins data that falls between certain limits .
|
train
| true
|
50,022
|
@pytest.mark.network
def test_cleanup_after_install_editable_from_hg(script, tmpdir):
script.pip('install', '-e', ('%s#egg=ScriptTest' % local_checkout('hg+https://bitbucket.org/ianb/scripttest', tmpdir.join('cache'))), expect_error=True)
build = (script.venv_path / 'build')
src = (script.venv_path / 'src')
assert (not exists(build)), ('build/ dir still exists: %s' % build)
assert exists(src), ("expected src/ dir doesn't exist: %s" % src)
script.assert_no_temp()
|
[
"@",
"pytest",
".",
"mark",
".",
"network",
"def",
"test_cleanup_after_install_editable_from_hg",
"(",
"script",
",",
"tmpdir",
")",
":",
"script",
".",
"pip",
"(",
"'install'",
",",
"'-e'",
",",
"(",
"'%s#egg=ScriptTest'",
"%",
"local_checkout",
"(",
"'hg+https://bitbucket.org/ianb/scripttest'",
",",
"tmpdir",
".",
"join",
"(",
"'cache'",
")",
")",
")",
",",
"expect_error",
"=",
"True",
")",
"build",
"=",
"(",
"script",
".",
"venv_path",
"/",
"'build'",
")",
"src",
"=",
"(",
"script",
".",
"venv_path",
"/",
"'src'",
")",
"assert",
"(",
"not",
"exists",
"(",
"build",
")",
")",
",",
"(",
"'build/ dir still exists: %s'",
"%",
"build",
")",
"assert",
"exists",
"(",
"src",
")",
",",
"(",
"\"expected src/ dir doesn't exist: %s\"",
"%",
"src",
")",
"script",
".",
"assert_no_temp",
"(",
")"
] |
test clean up after cloning from mercurial .
|
train
| false
|
50,023
|
def organization_patch(context, data_dict):
_check_access('organization_patch', context, data_dict)
show_context = {'model': context['model'], 'session': context['session'], 'user': context['user'], 'auth_user_obj': context['auth_user_obj']}
organization_dict = _get_action('organization_show')(show_context, {'id': _get_or_bust(data_dict, 'id')})
patched = dict(organization_dict)
patched.pop('display_name', None)
patched.update(data_dict)
return _update.organization_update(context, patched)
|
[
"def",
"organization_patch",
"(",
"context",
",",
"data_dict",
")",
":",
"_check_access",
"(",
"'organization_patch'",
",",
"context",
",",
"data_dict",
")",
"show_context",
"=",
"{",
"'model'",
":",
"context",
"[",
"'model'",
"]",
",",
"'session'",
":",
"context",
"[",
"'session'",
"]",
",",
"'user'",
":",
"context",
"[",
"'user'",
"]",
",",
"'auth_user_obj'",
":",
"context",
"[",
"'auth_user_obj'",
"]",
"}",
"organization_dict",
"=",
"_get_action",
"(",
"'organization_show'",
")",
"(",
"show_context",
",",
"{",
"'id'",
":",
"_get_or_bust",
"(",
"data_dict",
",",
"'id'",
")",
"}",
")",
"patched",
"=",
"dict",
"(",
"organization_dict",
")",
"patched",
".",
"pop",
"(",
"'display_name'",
",",
"None",
")",
"patched",
".",
"update",
"(",
"data_dict",
")",
"return",
"_update",
".",
"organization_update",
"(",
"context",
",",
"patched",
")"
] |
patch an organization .
|
train
| false
|
50,025
|
def get_period_names(width='wide', context='stand-alone', locale=LC_TIME):
return Locale.parse(locale).day_periods[context][width]
|
[
"def",
"get_period_names",
"(",
"width",
"=",
"'wide'",
",",
"context",
"=",
"'stand-alone'",
",",
"locale",
"=",
"LC_TIME",
")",
":",
"return",
"Locale",
".",
"parse",
"(",
"locale",
")",
".",
"day_periods",
"[",
"context",
"]",
"[",
"width",
"]"
] |
return the names for day periods used by the locale .
|
train
| false
|
50,026
|
def shell_process(cmds, env=None, **kwds):
sys = kwds.get('sys', _sys)
popen_kwds = dict(shell=True)
if ((kwds.get('stdout', None) is None) and redirecting_io(sys=sys)):
popen_kwds['stdout'] = subprocess.PIPE
if ((kwds.get('stderr', None) is None) and redirecting_io(sys=sys)):
popen_kwds['stderr'] = subprocess.PIPE
popen_kwds.update(**kwds)
if env:
new_env = os.environ.copy()
new_env.update(env)
popen_kwds['env'] = new_env
p = subprocess.Popen(cmds, **popen_kwds)
return p
|
[
"def",
"shell_process",
"(",
"cmds",
",",
"env",
"=",
"None",
",",
"**",
"kwds",
")",
":",
"sys",
"=",
"kwds",
".",
"get",
"(",
"'sys'",
",",
"_sys",
")",
"popen_kwds",
"=",
"dict",
"(",
"shell",
"=",
"True",
")",
"if",
"(",
"(",
"kwds",
".",
"get",
"(",
"'stdout'",
",",
"None",
")",
"is",
"None",
")",
"and",
"redirecting_io",
"(",
"sys",
"=",
"sys",
")",
")",
":",
"popen_kwds",
"[",
"'stdout'",
"]",
"=",
"subprocess",
".",
"PIPE",
"if",
"(",
"(",
"kwds",
".",
"get",
"(",
"'stderr'",
",",
"None",
")",
"is",
"None",
")",
"and",
"redirecting_io",
"(",
"sys",
"=",
"sys",
")",
")",
":",
"popen_kwds",
"[",
"'stderr'",
"]",
"=",
"subprocess",
".",
"PIPE",
"popen_kwds",
".",
"update",
"(",
"**",
"kwds",
")",
"if",
"env",
":",
"new_env",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"new_env",
".",
"update",
"(",
"env",
")",
"popen_kwds",
"[",
"'env'",
"]",
"=",
"new_env",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"cmds",
",",
"**",
"popen_kwds",
")",
"return",
"p"
] |
a high-level method wrapping subprocess .
|
train
| false
|
50,027
|
def TR15(rv, max=4, pow=False):
def f(rv):
if (not (isinstance(rv, Pow) and (rv.base.func is sin))):
return rv
ia = (1 / rv)
a = _TR56(ia, sin, cot, (lambda x: (1 + x)), max=max, pow=pow)
if (a != ia):
rv = a
return rv
return bottom_up(rv, f)
|
[
"def",
"TR15",
"(",
"rv",
",",
"max",
"=",
"4",
",",
"pow",
"=",
"False",
")",
":",
"def",
"f",
"(",
"rv",
")",
":",
"if",
"(",
"not",
"(",
"isinstance",
"(",
"rv",
",",
"Pow",
")",
"and",
"(",
"rv",
".",
"base",
".",
"func",
"is",
"sin",
")",
")",
")",
":",
"return",
"rv",
"ia",
"=",
"(",
"1",
"/",
"rv",
")",
"a",
"=",
"_TR56",
"(",
"ia",
",",
"sin",
",",
"cot",
",",
"(",
"lambda",
"x",
":",
"(",
"1",
"+",
"x",
")",
")",
",",
"max",
"=",
"max",
",",
"pow",
"=",
"pow",
")",
"if",
"(",
"a",
"!=",
"ia",
")",
":",
"rv",
"=",
"a",
"return",
"rv",
"return",
"bottom_up",
"(",
"rv",
",",
"f",
")"
] |
convert sin(x)*-2 to 1 + cot(x)**2 .
|
train
| false
|
50,028
|
def _yes(context):
if ('chocolatey._yes' in __context__):
return context['chocolatey._yes']
if (_LooseVersion(chocolatey_version()) >= _LooseVersion('0.9.9')):
answer = ['--yes']
else:
answer = []
context['chocolatey._yes'] = answer
return answer
|
[
"def",
"_yes",
"(",
"context",
")",
":",
"if",
"(",
"'chocolatey._yes'",
"in",
"__context__",
")",
":",
"return",
"context",
"[",
"'chocolatey._yes'",
"]",
"if",
"(",
"_LooseVersion",
"(",
"chocolatey_version",
"(",
")",
")",
">=",
"_LooseVersion",
"(",
"'0.9.9'",
")",
")",
":",
"answer",
"=",
"[",
"'--yes'",
"]",
"else",
":",
"answer",
"=",
"[",
"]",
"context",
"[",
"'chocolatey._yes'",
"]",
"=",
"answer",
"return",
"answer"
] |
returns [--yes] if on v0 .
|
train
| true
|
50,029
|
def _report_invalid_cookie(data):
logging.error('invalid Cookie: %r', data)
|
[
"def",
"_report_invalid_cookie",
"(",
"data",
")",
":",
"logging",
".",
"error",
"(",
"'invalid Cookie: %r'",
",",
"data",
")"
] |
how this module logs a bad cookie when exception suppressed .
|
train
| false
|
50,030
|
def regularize_locales(locales):
locales = [regularize_locale(loc) for loc in locales]
locales_set = set(locales)
for loc in locales:
(yield loc)
parts = loc.split(u'_')
if ((len(parts) > 1) and (parts[0] not in locales_set)):
(yield parts[0])
alias = ALIASES.get(loc)
if (alias and (alias not in locales_set)):
(yield alias)
if ((u'en' not in locales_set) and (u'en_us' not in locales_set)):
(yield u'en')
(yield u'en_us')
|
[
"def",
"regularize_locales",
"(",
"locales",
")",
":",
"locales",
"=",
"[",
"regularize_locale",
"(",
"loc",
")",
"for",
"loc",
"in",
"locales",
"]",
"locales_set",
"=",
"set",
"(",
"locales",
")",
"for",
"loc",
"in",
"locales",
":",
"(",
"yield",
"loc",
")",
"parts",
"=",
"loc",
".",
"split",
"(",
"u'_'",
")",
"if",
"(",
"(",
"len",
"(",
"parts",
")",
">",
"1",
")",
"and",
"(",
"parts",
"[",
"0",
"]",
"not",
"in",
"locales_set",
")",
")",
":",
"(",
"yield",
"parts",
"[",
"0",
"]",
")",
"alias",
"=",
"ALIASES",
".",
"get",
"(",
"loc",
")",
"if",
"(",
"alias",
"and",
"(",
"alias",
"not",
"in",
"locales_set",
")",
")",
":",
"(",
"yield",
"alias",
")",
"if",
"(",
"(",
"u'en'",
"not",
"in",
"locales_set",
")",
"and",
"(",
"u'en_us'",
"not",
"in",
"locales_set",
")",
")",
":",
"(",
"yield",
"u'en'",
")",
"(",
"yield",
"u'en_us'",
")"
] |
yield locale strings in the same format as they are in locales .
|
train
| false
|
50,031
|
def activate_sandbox():
from google.appengine.tools.devappserver2.python import sandbox
for name in list(sys.modules):
if (name in sandbox.dist27.MODULE_OVERRIDES):
del sys.modules[name]
sys.meta_path.insert(0, sandbox.StubModuleImportHook())
sys.path_importer_cache = {}
|
[
"def",
"activate_sandbox",
"(",
")",
":",
"from",
"google",
".",
"appengine",
".",
"tools",
".",
"devappserver2",
".",
"python",
"import",
"sandbox",
"for",
"name",
"in",
"list",
"(",
"sys",
".",
"modules",
")",
":",
"if",
"(",
"name",
"in",
"sandbox",
".",
"dist27",
".",
"MODULE_OVERRIDES",
")",
":",
"del",
"sys",
".",
"modules",
"[",
"name",
"]",
"sys",
".",
"meta_path",
".",
"insert",
"(",
"0",
",",
"sandbox",
".",
"StubModuleImportHook",
"(",
")",
")",
"sys",
".",
"path_importer_cache",
"=",
"{",
"}"
] |
enables parts of the gae sandbox that are relevant .
|
train
| false
|
50,032
|
def _get_file_list(load):
if ('env' in load):
salt.utils.warn_until('Oxygen', "Parameter 'env' has been detected in the argument list. This parameter is no longer used and has been replaced by 'saltenv' as of Salt 2016.11.0. This warning will be removed in Salt Oxygen.")
load.pop('env')
if (('saltenv' not in load) or (load['saltenv'] not in envs())):
return []
ret = set()
for repo in init():
repo['repo'].open()
ref = _get_ref(repo, load['saltenv'])
if ref:
manifest = repo['repo'].manifest(rev=ref[1])
for tup in manifest:
relpath = os.path.relpath(tup[4], repo['root'])
if (not relpath.startswith('../')):
ret.add(os.path.join(repo['mountpoint'], relpath))
repo['repo'].close()
return sorted(ret)
|
[
"def",
"_get_file_list",
"(",
"load",
")",
":",
"if",
"(",
"'env'",
"in",
"load",
")",
":",
"salt",
".",
"utils",
".",
"warn_until",
"(",
"'Oxygen'",
",",
"\"Parameter 'env' has been detected in the argument list. This parameter is no longer used and has been replaced by 'saltenv' as of Salt 2016.11.0. This warning will be removed in Salt Oxygen.\"",
")",
"load",
".",
"pop",
"(",
"'env'",
")",
"if",
"(",
"(",
"'saltenv'",
"not",
"in",
"load",
")",
"or",
"(",
"load",
"[",
"'saltenv'",
"]",
"not",
"in",
"envs",
"(",
")",
")",
")",
":",
"return",
"[",
"]",
"ret",
"=",
"set",
"(",
")",
"for",
"repo",
"in",
"init",
"(",
")",
":",
"repo",
"[",
"'repo'",
"]",
".",
"open",
"(",
")",
"ref",
"=",
"_get_ref",
"(",
"repo",
",",
"load",
"[",
"'saltenv'",
"]",
")",
"if",
"ref",
":",
"manifest",
"=",
"repo",
"[",
"'repo'",
"]",
".",
"manifest",
"(",
"rev",
"=",
"ref",
"[",
"1",
"]",
")",
"for",
"tup",
"in",
"manifest",
":",
"relpath",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"tup",
"[",
"4",
"]",
",",
"repo",
"[",
"'root'",
"]",
")",
"if",
"(",
"not",
"relpath",
".",
"startswith",
"(",
"'../'",
")",
")",
":",
"ret",
".",
"add",
"(",
"os",
".",
"path",
".",
"join",
"(",
"repo",
"[",
"'mountpoint'",
"]",
",",
"relpath",
")",
")",
"repo",
"[",
"'repo'",
"]",
".",
"close",
"(",
")",
"return",
"sorted",
"(",
"ret",
")"
] |
get a list of all files on the file server in a specified environment .
|
train
| true
|
50,033
|
def ParseBool(text):
if (text in ('true', 't', '1')):
return True
elif (text in ('false', 'f', '0')):
return False
else:
raise ValueError('Expected "true" or "false".')
|
[
"def",
"ParseBool",
"(",
"text",
")",
":",
"if",
"(",
"text",
"in",
"(",
"'true'",
",",
"'t'",
",",
"'1'",
")",
")",
":",
"return",
"True",
"elif",
"(",
"text",
"in",
"(",
"'false'",
",",
"'f'",
",",
"'0'",
")",
")",
":",
"return",
"False",
"else",
":",
"raise",
"ValueError",
"(",
"'Expected \"true\" or \"false\".'",
")"
] |
parse a boolean value .
|
train
| false
|
50,035
|
def parse_anotations():
functions = {}
function = None
for line in open(__file__, 'rt').readlines():
if (('=' * 40) in line):
break
if line.startswith('def '):
name = line.split(' ')[1].split('(')[0]
args = line.split('(')[1].split(')')[0].split(', ')
args = [arg for arg in args if arg]
out = line.partition('->')[2].strip()
function = FunctionAnnotation(name, args, out)
functions[name] = function
continue
elif (not function):
continue
line = line.rstrip()
indent = (len(line) - len(line.strip()))
if (line.strip() and (indent >= 4)):
function.lines.append(line)
return functions
|
[
"def",
"parse_anotations",
"(",
")",
":",
"functions",
"=",
"{",
"}",
"function",
"=",
"None",
"for",
"line",
"in",
"open",
"(",
"__file__",
",",
"'rt'",
")",
".",
"readlines",
"(",
")",
":",
"if",
"(",
"(",
"'='",
"*",
"40",
")",
"in",
"line",
")",
":",
"break",
"if",
"line",
".",
"startswith",
"(",
"'def '",
")",
":",
"name",
"=",
"line",
".",
"split",
"(",
"' '",
")",
"[",
"1",
"]",
".",
"split",
"(",
"'('",
")",
"[",
"0",
"]",
"args",
"=",
"line",
".",
"split",
"(",
"'('",
")",
"[",
"1",
"]",
".",
"split",
"(",
"')'",
")",
"[",
"0",
"]",
".",
"split",
"(",
"', '",
")",
"args",
"=",
"[",
"arg",
"for",
"arg",
"in",
"args",
"if",
"arg",
"]",
"out",
"=",
"line",
".",
"partition",
"(",
"'->'",
")",
"[",
"2",
"]",
".",
"strip",
"(",
")",
"function",
"=",
"FunctionAnnotation",
"(",
"name",
",",
"args",
",",
"out",
")",
"functions",
"[",
"name",
"]",
"=",
"function",
"continue",
"elif",
"(",
"not",
"function",
")",
":",
"continue",
"line",
"=",
"line",
".",
"rstrip",
"(",
")",
"indent",
"=",
"(",
"len",
"(",
"line",
")",
"-",
"len",
"(",
"line",
".",
"strip",
"(",
")",
")",
")",
"if",
"(",
"line",
".",
"strip",
"(",
")",
"and",
"(",
"indent",
">=",
"4",
")",
")",
":",
"function",
".",
"lines",
".",
"append",
"(",
"line",
")",
"return",
"functions"
] |
parse this annotations file and produce a dictionary of functionannotation objects .
|
train
| false
|
50,036
|
def test_abort_typeerror(question, qtbot, mocker, caplog):
signal_mock = mocker.patch('qutebrowser.utils.usertypes.Question.aborted')
signal_mock.emit.side_effect = TypeError
with caplog.at_level(logging.ERROR, 'misc'):
question.abort()
assert (caplog.records[0].message == 'Error while aborting question')
|
[
"def",
"test_abort_typeerror",
"(",
"question",
",",
"qtbot",
",",
"mocker",
",",
"caplog",
")",
":",
"signal_mock",
"=",
"mocker",
".",
"patch",
"(",
"'qutebrowser.utils.usertypes.Question.aborted'",
")",
"signal_mock",
".",
"emit",
".",
"side_effect",
"=",
"TypeError",
"with",
"caplog",
".",
"at_level",
"(",
"logging",
".",
"ERROR",
",",
"'misc'",
")",
":",
"question",
".",
"abort",
"(",
")",
"assert",
"(",
"caplog",
".",
"records",
"[",
"0",
"]",
".",
"message",
"==",
"'Error while aborting question'",
")"
] |
test question .
|
train
| false
|
50,037
|
def direct_head_object(node, part, account, container, obj, conn_timeout=5, response_timeout=15):
path = ('/%s/%s/%s' % (account, container, obj))
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part, 'HEAD', path)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if (not is_success(resp.status)):
raise ClientException(('Object server %s:%s direct HEAD %s gave status %s' % (node['ip'], node['port'], repr(('/%s/%s%s' % (node['device'], part, path))), resp.status)), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason)
resp_headers = {}
for (header, value) in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers
|
[
"def",
"direct_head_object",
"(",
"node",
",",
"part",
",",
"account",
",",
"container",
",",
"obj",
",",
"conn_timeout",
"=",
"5",
",",
"response_timeout",
"=",
"15",
")",
":",
"path",
"=",
"(",
"'/%s/%s/%s'",
"%",
"(",
"account",
",",
"container",
",",
"obj",
")",
")",
"with",
"Timeout",
"(",
"conn_timeout",
")",
":",
"conn",
"=",
"http_connect",
"(",
"node",
"[",
"'ip'",
"]",
",",
"node",
"[",
"'port'",
"]",
",",
"node",
"[",
"'device'",
"]",
",",
"part",
",",
"'HEAD'",
",",
"path",
")",
"with",
"Timeout",
"(",
"response_timeout",
")",
":",
"resp",
"=",
"conn",
".",
"getresponse",
"(",
")",
"resp",
".",
"read",
"(",
")",
"if",
"(",
"not",
"is_success",
"(",
"resp",
".",
"status",
")",
")",
":",
"raise",
"ClientException",
"(",
"(",
"'Object server %s:%s direct HEAD %s gave status %s'",
"%",
"(",
"node",
"[",
"'ip'",
"]",
",",
"node",
"[",
"'port'",
"]",
",",
"repr",
"(",
"(",
"'/%s/%s%s'",
"%",
"(",
"node",
"[",
"'device'",
"]",
",",
"part",
",",
"path",
")",
")",
")",
",",
"resp",
".",
"status",
")",
")",
",",
"http_host",
"=",
"node",
"[",
"'ip'",
"]",
",",
"http_port",
"=",
"node",
"[",
"'port'",
"]",
",",
"http_device",
"=",
"node",
"[",
"'device'",
"]",
",",
"http_status",
"=",
"resp",
".",
"status",
",",
"http_reason",
"=",
"resp",
".",
"reason",
")",
"resp_headers",
"=",
"{",
"}",
"for",
"(",
"header",
",",
"value",
")",
"in",
"resp",
".",
"getheaders",
"(",
")",
":",
"resp_headers",
"[",
"header",
".",
"lower",
"(",
")",
"]",
"=",
"value",
"return",
"resp_headers"
] |
request object information directly from the object server .
|
train
| false
|
50,038
|
@pytest.fixture
def moderator_user(user, forum, default_groups):
user = User(username='test_mod', email='test_mod@example.org', password='test', primary_group=default_groups[2], activated=True)
user.save()
forum.moderators.append(user)
forum.save()
return user
|
[
"@",
"pytest",
".",
"fixture",
"def",
"moderator_user",
"(",
"user",
",",
"forum",
",",
"default_groups",
")",
":",
"user",
"=",
"User",
"(",
"username",
"=",
"'test_mod'",
",",
"email",
"=",
"'test_mod@example.org'",
",",
"password",
"=",
"'test'",
",",
"primary_group",
"=",
"default_groups",
"[",
"2",
"]",
",",
"activated",
"=",
"True",
")",
"user",
".",
"save",
"(",
")",
"forum",
".",
"moderators",
".",
"append",
"(",
"user",
")",
"forum",
".",
"save",
"(",
")",
"return",
"user"
] |
creates a test user with moderator permissions .
|
train
| false
|
50,039
|
def lazy(func, *resultclasses):
@total_ordering
class __proxy__(Promise, ):
'\n Encapsulate a function call and act as a proxy for methods that are\n called on the result of that function. The function is not evaluated\n until one of the methods on the result is called.\n '
__dispatch = None
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if (self.__dispatch is None):
self.__prepare_class__()
def __reduce__(self):
return (_lazy_proxy_unpickle, ((func, self.__args, self.__kw) + resultclasses))
@classmethod
def __prepare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
cls.__dispatch[resultclass] = {}
for type_ in reversed(resultclass.mro()):
for (k, v) in type_.__dict__.items():
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
cls._delegate_bytes = (bytes in resultclasses)
cls._delegate_text = (six.text_type in resultclasses)
assert (not (cls._delegate_bytes and cls._delegate_text)), 'Cannot call lazy() with both bytes and text return types.'
if cls._delegate_text:
if six.PY3:
cls.__str__ = cls.__text_cast
else:
cls.__unicode__ = cls.__text_cast
elif cls._delegate_bytes:
if six.PY3:
cls.__bytes__ = cls.__bytes_cast
else:
cls.__str__ = cls.__bytes_cast
@classmethod
def __promise__(cls, klass, funcname, method):
def __wrapper__(self, *args, **kw):
res = func(*self.__args, **self.__kw)
for t in type(res).mro():
if (t in self.__dispatch):
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError('Lazy object returned unexpected type.')
if (klass not in cls.__dispatch):
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = method
return __wrapper__
def __text_cast(self):
return func(*self.__args, **self.__kw)
def __bytes_cast(self):
return bytes(func(*self.__args, **self.__kw))
def __cast(self):
if self._delegate_bytes:
return self.__bytes_cast()
elif self._delegate_text:
return self.__text_cast()
else:
return func(*self.__args, **self.__kw)
def __ne__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return (self.__cast() != other)
def __eq__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return (self.__cast() == other)
def __lt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return (self.__cast() < other)
def __hash__(self):
return hash(self.__cast())
def __mod__(self, rhs):
if (self._delegate_bytes and six.PY2):
return (bytes(self) % rhs)
elif self._delegate_text:
return (six.text_type(self) % rhs)
return (self.__cast() % rhs)
def __deepcopy__(self, memo):
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
return __proxy__(args, kw)
return __wrapper__
|
[
"def",
"lazy",
"(",
"func",
",",
"*",
"resultclasses",
")",
":",
"@",
"total_ordering",
"class",
"__proxy__",
"(",
"Promise",
",",
")",
":",
"__dispatch",
"=",
"None",
"def",
"__init__",
"(",
"self",
",",
"args",
",",
"kw",
")",
":",
"self",
".",
"__args",
"=",
"args",
"self",
".",
"__kw",
"=",
"kw",
"if",
"(",
"self",
".",
"__dispatch",
"is",
"None",
")",
":",
"self",
".",
"__prepare_class__",
"(",
")",
"def",
"__reduce__",
"(",
"self",
")",
":",
"return",
"(",
"_lazy_proxy_unpickle",
",",
"(",
"(",
"func",
",",
"self",
".",
"__args",
",",
"self",
".",
"__kw",
")",
"+",
"resultclasses",
")",
")",
"@",
"classmethod",
"def",
"__prepare_class__",
"(",
"cls",
")",
":",
"cls",
".",
"__dispatch",
"=",
"{",
"}",
"for",
"resultclass",
"in",
"resultclasses",
":",
"cls",
".",
"__dispatch",
"[",
"resultclass",
"]",
"=",
"{",
"}",
"for",
"type_",
"in",
"reversed",
"(",
"resultclass",
".",
"mro",
"(",
")",
")",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"type_",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"meth",
"=",
"cls",
".",
"__promise__",
"(",
"resultclass",
",",
"k",
",",
"v",
")",
"if",
"hasattr",
"(",
"cls",
",",
"k",
")",
":",
"continue",
"setattr",
"(",
"cls",
",",
"k",
",",
"meth",
")",
"cls",
".",
"_delegate_bytes",
"=",
"(",
"bytes",
"in",
"resultclasses",
")",
"cls",
".",
"_delegate_text",
"=",
"(",
"six",
".",
"text_type",
"in",
"resultclasses",
")",
"assert",
"(",
"not",
"(",
"cls",
".",
"_delegate_bytes",
"and",
"cls",
".",
"_delegate_text",
")",
")",
",",
"'Cannot call lazy() with both bytes and text return types.'",
"if",
"cls",
".",
"_delegate_text",
":",
"if",
"six",
".",
"PY3",
":",
"cls",
".",
"__str__",
"=",
"cls",
".",
"__text_cast",
"else",
":",
"cls",
".",
"__unicode__",
"=",
"cls",
".",
"__text_cast",
"elif",
"cls",
".",
"_delegate_bytes",
":",
"if",
"six",
".",
"PY3",
":",
"cls",
".",
"__bytes__",
"=",
"cls",
".",
"__bytes_cast",
"else",
":",
"cls",
".",
"__str__",
"=",
"cls",
".",
"__bytes_cast",
"@",
"classmethod",
"def",
"__promise__",
"(",
"cls",
",",
"klass",
",",
"funcname",
",",
"method",
")",
":",
"def",
"__wrapper__",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kw",
")",
":",
"res",
"=",
"func",
"(",
"*",
"self",
".",
"__args",
",",
"**",
"self",
".",
"__kw",
")",
"for",
"t",
"in",
"type",
"(",
"res",
")",
".",
"mro",
"(",
")",
":",
"if",
"(",
"t",
"in",
"self",
".",
"__dispatch",
")",
":",
"return",
"self",
".",
"__dispatch",
"[",
"t",
"]",
"[",
"funcname",
"]",
"(",
"res",
",",
"*",
"args",
",",
"**",
"kw",
")",
"raise",
"TypeError",
"(",
"'Lazy object returned unexpected type.'",
")",
"if",
"(",
"klass",
"not",
"in",
"cls",
".",
"__dispatch",
")",
":",
"cls",
".",
"__dispatch",
"[",
"klass",
"]",
"=",
"{",
"}",
"cls",
".",
"__dispatch",
"[",
"klass",
"]",
"[",
"funcname",
"]",
"=",
"method",
"return",
"__wrapper__",
"def",
"__text_cast",
"(",
"self",
")",
":",
"return",
"func",
"(",
"*",
"self",
".",
"__args",
",",
"**",
"self",
".",
"__kw",
")",
"def",
"__bytes_cast",
"(",
"self",
")",
":",
"return",
"bytes",
"(",
"func",
"(",
"*",
"self",
".",
"__args",
",",
"**",
"self",
".",
"__kw",
")",
")",
"def",
"__cast",
"(",
"self",
")",
":",
"if",
"self",
".",
"_delegate_bytes",
":",
"return",
"self",
".",
"__bytes_cast",
"(",
")",
"elif",
"self",
".",
"_delegate_text",
":",
"return",
"self",
".",
"__text_cast",
"(",
")",
"else",
":",
"return",
"func",
"(",
"*",
"self",
".",
"__args",
",",
"**",
"self",
".",
"__kw",
")",
"def",
"__ne__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"Promise",
")",
":",
"other",
"=",
"other",
".",
"__cast",
"(",
")",
"return",
"(",
"self",
".",
"__cast",
"(",
")",
"!=",
"other",
")",
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"Promise",
")",
":",
"other",
"=",
"other",
".",
"__cast",
"(",
")",
"return",
"(",
"self",
".",
"__cast",
"(",
")",
"==",
"other",
")",
"def",
"__lt__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"Promise",
")",
":",
"other",
"=",
"other",
".",
"__cast",
"(",
")",
"return",
"(",
"self",
".",
"__cast",
"(",
")",
"<",
"other",
")",
"def",
"__hash__",
"(",
"self",
")",
":",
"return",
"hash",
"(",
"self",
".",
"__cast",
"(",
")",
")",
"def",
"__mod__",
"(",
"self",
",",
"rhs",
")",
":",
"if",
"(",
"self",
".",
"_delegate_bytes",
"and",
"six",
".",
"PY2",
")",
":",
"return",
"(",
"bytes",
"(",
"self",
")",
"%",
"rhs",
")",
"elif",
"self",
".",
"_delegate_text",
":",
"return",
"(",
"six",
".",
"text_type",
"(",
"self",
")",
"%",
"rhs",
")",
"return",
"(",
"self",
".",
"__cast",
"(",
")",
"%",
"rhs",
")",
"def",
"__deepcopy__",
"(",
"self",
",",
"memo",
")",
":",
"memo",
"[",
"id",
"(",
"self",
")",
"]",
"=",
"self",
"return",
"self",
"@",
"wraps",
"(",
"func",
")",
"def",
"__wrapper__",
"(",
"*",
"args",
",",
"**",
"kw",
")",
":",
"return",
"__proxy__",
"(",
"args",
",",
"kw",
")",
"return",
"__wrapper__"
] |
turns any callable into a lazy evaluated callable .
|
train
| false
|
50,040
|
def _execute_with_retries(conn, function, **kwargs):
r = {}
max_attempts = 18
max_retry_delay = 10
for attempt in range(max_attempts):
log.info('attempt: {0} function: {1}'.format(attempt, function))
try:
fn = getattr(conn, function)
r['result'] = fn(**kwargs)
return r
except botocore.exceptions.ClientError as e:
error_code = e.response['Error']['Code']
if (('LimitExceededException' in error_code) or ('ResourceInUseException' in error_code)):
log.debug('Retrying due to AWS exception {0}'.format(e))
time.sleep(_jittered_backoff(attempt, max_retry_delay))
else:
r['error'] = e.response['Error']
r['result'] = None
return r
r['error'] = 'Tried to execute function {0} {1} times, but was unable'.format(function, max_attempts)
return r
|
[
"def",
"_execute_with_retries",
"(",
"conn",
",",
"function",
",",
"**",
"kwargs",
")",
":",
"r",
"=",
"{",
"}",
"max_attempts",
"=",
"18",
"max_retry_delay",
"=",
"10",
"for",
"attempt",
"in",
"range",
"(",
"max_attempts",
")",
":",
"log",
".",
"info",
"(",
"'attempt: {0} function: {1}'",
".",
"format",
"(",
"attempt",
",",
"function",
")",
")",
"try",
":",
"fn",
"=",
"getattr",
"(",
"conn",
",",
"function",
")",
"r",
"[",
"'result'",
"]",
"=",
"fn",
"(",
"**",
"kwargs",
")",
"return",
"r",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"e",
":",
"error_code",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"if",
"(",
"(",
"'LimitExceededException'",
"in",
"error_code",
")",
"or",
"(",
"'ResourceInUseException'",
"in",
"error_code",
")",
")",
":",
"log",
".",
"debug",
"(",
"'Retrying due to AWS exception {0}'",
".",
"format",
"(",
"e",
")",
")",
"time",
".",
"sleep",
"(",
"_jittered_backoff",
"(",
"attempt",
",",
"max_retry_delay",
")",
")",
"else",
":",
"r",
"[",
"'error'",
"]",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"r",
"[",
"'result'",
"]",
"=",
"None",
"return",
"r",
"r",
"[",
"'error'",
"]",
"=",
"'Tried to execute function {0} {1} times, but was unable'",
".",
"format",
"(",
"function",
",",
"max_attempts",
")",
"return",
"r"
] |
retry if were rate limited by aws or blocked by another call .
|
train
| true
|
50,041
|
def is_pyflakes_available():
try:
import pyflakes.api
import pyflakes.reporter
return True
except ImportError:
return False
|
[
"def",
"is_pyflakes_available",
"(",
")",
":",
"try",
":",
"import",
"pyflakes",
".",
"api",
"import",
"pyflakes",
".",
"reporter",
"return",
"True",
"except",
"ImportError",
":",
"return",
"False"
] |
checks if pyflakes is availalbe .
|
train
| false
|
50,042
|
def get_unused_ip_addresses(ports_client, subnets_client, network_id, subnet_id, count):
ports = ports_client.list_ports(network_id=network_id)['ports']
subnet = subnets_client.show_subnet(subnet_id)
ip_net = netaddr.IPNetwork(subnet['subnet']['cidr'])
subnet_set = netaddr.IPSet(ip_net.iter_hosts())
alloc_set = netaddr.IPSet()
for port in ports:
for fixed_ip in port.get('fixed_ips'):
alloc_set.add(fixed_ip['ip_address'])
gateway_ip = subnet['subnet']['gateway_ip']
if gateway_ip:
alloc_set.add(gateway_ip)
av_set = (subnet_set - alloc_set)
addrs = []
for cidr in reversed(av_set.iter_cidrs()):
for ip in reversed(cidr):
addrs.append(str(ip))
if (len(addrs) == count):
return addrs
msg = 'Insufficient IP addresses available'
raise lib_exc.BadRequest(message=msg)
|
[
"def",
"get_unused_ip_addresses",
"(",
"ports_client",
",",
"subnets_client",
",",
"network_id",
",",
"subnet_id",
",",
"count",
")",
":",
"ports",
"=",
"ports_client",
".",
"list_ports",
"(",
"network_id",
"=",
"network_id",
")",
"[",
"'ports'",
"]",
"subnet",
"=",
"subnets_client",
".",
"show_subnet",
"(",
"subnet_id",
")",
"ip_net",
"=",
"netaddr",
".",
"IPNetwork",
"(",
"subnet",
"[",
"'subnet'",
"]",
"[",
"'cidr'",
"]",
")",
"subnet_set",
"=",
"netaddr",
".",
"IPSet",
"(",
"ip_net",
".",
"iter_hosts",
"(",
")",
")",
"alloc_set",
"=",
"netaddr",
".",
"IPSet",
"(",
")",
"for",
"port",
"in",
"ports",
":",
"for",
"fixed_ip",
"in",
"port",
".",
"get",
"(",
"'fixed_ips'",
")",
":",
"alloc_set",
".",
"add",
"(",
"fixed_ip",
"[",
"'ip_address'",
"]",
")",
"gateway_ip",
"=",
"subnet",
"[",
"'subnet'",
"]",
"[",
"'gateway_ip'",
"]",
"if",
"gateway_ip",
":",
"alloc_set",
".",
"add",
"(",
"gateway_ip",
")",
"av_set",
"=",
"(",
"subnet_set",
"-",
"alloc_set",
")",
"addrs",
"=",
"[",
"]",
"for",
"cidr",
"in",
"reversed",
"(",
"av_set",
".",
"iter_cidrs",
"(",
")",
")",
":",
"for",
"ip",
"in",
"reversed",
"(",
"cidr",
")",
":",
"addrs",
".",
"append",
"(",
"str",
"(",
"ip",
")",
")",
"if",
"(",
"len",
"(",
"addrs",
")",
"==",
"count",
")",
":",
"return",
"addrs",
"msg",
"=",
"'Insufficient IP addresses available'",
"raise",
"lib_exc",
".",
"BadRequest",
"(",
"message",
"=",
"msg",
")"
] |
return a list with the specified number of unused ip addresses this method uses the given ports_client to find the specified number of unused ip addresses on the given subnet using the supplied subnets_client .
|
train
| false
|
50,043
|
@frappe.whitelist(allow_guest=True)
def getpage():
page = frappe.form_dict.get(u'name')
doc = get(page)
if (frappe.lang != u'en'):
send_translations(frappe.get_lang_dict(u'page', page))
frappe.response.docs.append(doc)
|
[
"@",
"frappe",
".",
"whitelist",
"(",
"allow_guest",
"=",
"True",
")",
"def",
"getpage",
"(",
")",
":",
"page",
"=",
"frappe",
".",
"form_dict",
".",
"get",
"(",
"u'name'",
")",
"doc",
"=",
"get",
"(",
"page",
")",
"if",
"(",
"frappe",
".",
"lang",
"!=",
"u'en'",
")",
":",
"send_translations",
"(",
"frappe",
".",
"get_lang_dict",
"(",
"u'page'",
",",
"page",
")",
")",
"frappe",
".",
"response",
".",
"docs",
".",
"append",
"(",
"doc",
")"
] |
load the page from frappe .
|
train
| false
|
50,044
|
def alter_retention_policy(database, name, duration, replication, default=False, **client_args):
client = _client(**client_args)
client.alter_retention_policy(name, database, duration, replication, default)
return True
|
[
"def",
"alter_retention_policy",
"(",
"database",
",",
"name",
",",
"duration",
",",
"replication",
",",
"default",
"=",
"False",
",",
"**",
"client_args",
")",
":",
"client",
"=",
"_client",
"(",
"**",
"client_args",
")",
"client",
".",
"alter_retention_policy",
"(",
"name",
",",
"database",
",",
"duration",
",",
"replication",
",",
"default",
")",
"return",
"True"
] |
modify an existing retention policy .
|
train
| true
|
50,045
|
def weight_as_number(weight):
if isinstance(weight, six.string_types):
try:
weight = weight_dict[weight.lower()]
except KeyError:
weight = 400
elif (weight in range(100, 1000, 100)):
pass
else:
raise ValueError(u'weight not a valid integer')
return weight
|
[
"def",
"weight_as_number",
"(",
"weight",
")",
":",
"if",
"isinstance",
"(",
"weight",
",",
"six",
".",
"string_types",
")",
":",
"try",
":",
"weight",
"=",
"weight_dict",
"[",
"weight",
".",
"lower",
"(",
")",
"]",
"except",
"KeyError",
":",
"weight",
"=",
"400",
"elif",
"(",
"weight",
"in",
"range",
"(",
"100",
",",
"1000",
",",
"100",
")",
")",
":",
"pass",
"else",
":",
"raise",
"ValueError",
"(",
"u'weight not a valid integer'",
")",
"return",
"weight"
] |
return the weight property as a numeric value .
|
train
| false
|
50,046
|
def compute_sign(base, expo):
sb = sign(base)
if (sb == 1):
return 1
pe = (expo % 2)
if (pe == 0):
return (- sb)
else:
return sb
|
[
"def",
"compute_sign",
"(",
"base",
",",
"expo",
")",
":",
"sb",
"=",
"sign",
"(",
"base",
")",
"if",
"(",
"sb",
"==",
"1",
")",
":",
"return",
"1",
"pe",
"=",
"(",
"expo",
"%",
"2",
")",
"if",
"(",
"pe",
"==",
"0",
")",
":",
"return",
"(",
"-",
"sb",
")",
"else",
":",
"return",
"sb"
] |
base != 0 and expo >= 0 are integers; returns the sign of base**expo without evaluating the power itself! .
|
train
| false
|
50,048
|
def add_http_basic_auth(url, user=None, password=None, https_only=False):
if ((user is None) and (password is None)):
return url
else:
urltuple = urlparse(url)
if (https_only and (urltuple.scheme != 'https')):
raise ValueError('Basic Auth only supported for HTTPS')
if (password is None):
netloc = '{0}@{1}'.format(user, urltuple.netloc)
urltuple = urltuple._replace(netloc=netloc)
return urlunparse(urltuple)
else:
netloc = '{0}:{1}@{2}'.format(user, password, urltuple.netloc)
urltuple = urltuple._replace(netloc=netloc)
return urlunparse(urltuple)
|
[
"def",
"add_http_basic_auth",
"(",
"url",
",",
"user",
"=",
"None",
",",
"password",
"=",
"None",
",",
"https_only",
"=",
"False",
")",
":",
"if",
"(",
"(",
"user",
"is",
"None",
")",
"and",
"(",
"password",
"is",
"None",
")",
")",
":",
"return",
"url",
"else",
":",
"urltuple",
"=",
"urlparse",
"(",
"url",
")",
"if",
"(",
"https_only",
"and",
"(",
"urltuple",
".",
"scheme",
"!=",
"'https'",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Basic Auth only supported for HTTPS'",
")",
"if",
"(",
"password",
"is",
"None",
")",
":",
"netloc",
"=",
"'{0}@{1}'",
".",
"format",
"(",
"user",
",",
"urltuple",
".",
"netloc",
")",
"urltuple",
"=",
"urltuple",
".",
"_replace",
"(",
"netloc",
"=",
"netloc",
")",
"return",
"urlunparse",
"(",
"urltuple",
")",
"else",
":",
"netloc",
"=",
"'{0}:{1}@{2}'",
".",
"format",
"(",
"user",
",",
"password",
",",
"urltuple",
".",
"netloc",
")",
"urltuple",
"=",
"urltuple",
".",
"_replace",
"(",
"netloc",
"=",
"netloc",
")",
"return",
"urlunparse",
"(",
"urltuple",
")"
] |
return a string with http basic auth incorporated into it .
|
train
| true
|
50,049
|
def _convert_nn(val):
if (val < 20):
return to_19[val]
for (dcap, dval) in ((k, (20 + (10 * v))) for (v, k) in enumerate(tens)):
if ((dval + 10) > val):
if (val % 10):
return ((dcap + '-') + to_19[(val % 10)])
return dcap
|
[
"def",
"_convert_nn",
"(",
"val",
")",
":",
"if",
"(",
"val",
"<",
"20",
")",
":",
"return",
"to_19",
"[",
"val",
"]",
"for",
"(",
"dcap",
",",
"dval",
")",
"in",
"(",
"(",
"k",
",",
"(",
"20",
"+",
"(",
"10",
"*",
"v",
")",
")",
")",
"for",
"(",
"v",
",",
"k",
")",
"in",
"enumerate",
"(",
"tens",
")",
")",
":",
"if",
"(",
"(",
"dval",
"+",
"10",
")",
">",
"val",
")",
":",
"if",
"(",
"val",
"%",
"10",
")",
":",
"return",
"(",
"(",
"dcap",
"+",
"'-'",
")",
"+",
"to_19",
"[",
"(",
"val",
"%",
"10",
")",
"]",
")",
"return",
"dcap"
] |
convert a value < 100 to english .
|
train
| false
|
50,050
|
def homogeneity_completeness_v_measure(labels_true, labels_pred):
(labels_true, labels_pred) = check_clusterings(labels_true, labels_pred)
if (len(labels_true) == 0):
return (1.0, 1.0, 1.0)
entropy_C = entropy(labels_true)
entropy_K = entropy(labels_pred)
contingency = contingency_matrix(labels_true, labels_pred, sparse=True)
MI = mutual_info_score(None, None, contingency=contingency)
homogeneity = ((MI / entropy_C) if entropy_C else 1.0)
completeness = ((MI / entropy_K) if entropy_K else 1.0)
if ((homogeneity + completeness) == 0.0):
v_measure_score = 0.0
else:
v_measure_score = (((2.0 * homogeneity) * completeness) / (homogeneity + completeness))
return (homogeneity, completeness, v_measure_score)
|
[
"def",
"homogeneity_completeness_v_measure",
"(",
"labels_true",
",",
"labels_pred",
")",
":",
"(",
"labels_true",
",",
"labels_pred",
")",
"=",
"check_clusterings",
"(",
"labels_true",
",",
"labels_pred",
")",
"if",
"(",
"len",
"(",
"labels_true",
")",
"==",
"0",
")",
":",
"return",
"(",
"1.0",
",",
"1.0",
",",
"1.0",
")",
"entropy_C",
"=",
"entropy",
"(",
"labels_true",
")",
"entropy_K",
"=",
"entropy",
"(",
"labels_pred",
")",
"contingency",
"=",
"contingency_matrix",
"(",
"labels_true",
",",
"labels_pred",
",",
"sparse",
"=",
"True",
")",
"MI",
"=",
"mutual_info_score",
"(",
"None",
",",
"None",
",",
"contingency",
"=",
"contingency",
")",
"homogeneity",
"=",
"(",
"(",
"MI",
"/",
"entropy_C",
")",
"if",
"entropy_C",
"else",
"1.0",
")",
"completeness",
"=",
"(",
"(",
"MI",
"/",
"entropy_K",
")",
"if",
"entropy_K",
"else",
"1.0",
")",
"if",
"(",
"(",
"homogeneity",
"+",
"completeness",
")",
"==",
"0.0",
")",
":",
"v_measure_score",
"=",
"0.0",
"else",
":",
"v_measure_score",
"=",
"(",
"(",
"(",
"2.0",
"*",
"homogeneity",
")",
"*",
"completeness",
")",
"/",
"(",
"homogeneity",
"+",
"completeness",
")",
")",
"return",
"(",
"homogeneity",
",",
"completeness",
",",
"v_measure_score",
")"
] |
compute the homogeneity and completeness and v-measure scores at once .
|
train
| false
|
50,051
|
def file_changes(attrs=None, where=None):
return _osquery_cmd(table='file_changes', attrs=attrs, where=where)
|
[
"def",
"file_changes",
"(",
"attrs",
"=",
"None",
",",
"where",
"=",
"None",
")",
":",
"return",
"_osquery_cmd",
"(",
"table",
"=",
"'file_changes'",
",",
"attrs",
"=",
"attrs",
",",
"where",
"=",
"where",
")"
] |
return file_changes information from osquery cli example: .
|
train
| false
|
50,052
|
def get_course_in_cache(course_key):
return get_block_structure_manager(course_key).get_collected()
|
[
"def",
"get_course_in_cache",
"(",
"course_key",
")",
":",
"return",
"get_block_structure_manager",
"(",
"course_key",
")",
".",
"get_collected",
"(",
")"
] |
a higher order function implemented on top of the block_structure .
|
train
| false
|
50,053
|
def strip_quoted_strings(string):
return re.sub('\\"(.*)\\"', '', string)
|
[
"def",
"strip_quoted_strings",
"(",
"string",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'\\\\\"(.*)\\\\\"'",
",",
"''",
",",
"string",
")"
] |
strips out data in between double quotes .
|
train
| false
|
50,054
|
def _get_ecg_channel_index(ch_name, inst):
if (ch_name is None):
ecg_idx = pick_types(inst.info, meg=False, eeg=False, stim=False, eog=False, ecg=True, emg=False, ref_meg=False, exclude='bads')
else:
if (ch_name not in inst.ch_names):
raise ValueError(('%s not in channel list (%s)' % (ch_name, inst.ch_names)))
ecg_idx = pick_channels(inst.ch_names, include=[ch_name])
if (len(ecg_idx) == 0):
return None
if (len(ecg_idx) > 1):
warn(('More than one ECG channel found. Using only %s.' % inst.ch_names[ecg_idx[0]]))
return ecg_idx[0]
|
[
"def",
"_get_ecg_channel_index",
"(",
"ch_name",
",",
"inst",
")",
":",
"if",
"(",
"ch_name",
"is",
"None",
")",
":",
"ecg_idx",
"=",
"pick_types",
"(",
"inst",
".",
"info",
",",
"meg",
"=",
"False",
",",
"eeg",
"=",
"False",
",",
"stim",
"=",
"False",
",",
"eog",
"=",
"False",
",",
"ecg",
"=",
"True",
",",
"emg",
"=",
"False",
",",
"ref_meg",
"=",
"False",
",",
"exclude",
"=",
"'bads'",
")",
"else",
":",
"if",
"(",
"ch_name",
"not",
"in",
"inst",
".",
"ch_names",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'%s not in channel list (%s)'",
"%",
"(",
"ch_name",
",",
"inst",
".",
"ch_names",
")",
")",
")",
"ecg_idx",
"=",
"pick_channels",
"(",
"inst",
".",
"ch_names",
",",
"include",
"=",
"[",
"ch_name",
"]",
")",
"if",
"(",
"len",
"(",
"ecg_idx",
")",
"==",
"0",
")",
":",
"return",
"None",
"if",
"(",
"len",
"(",
"ecg_idx",
")",
">",
"1",
")",
":",
"warn",
"(",
"(",
"'More than one ECG channel found. Using only %s.'",
"%",
"inst",
".",
"ch_names",
"[",
"ecg_idx",
"[",
"0",
"]",
"]",
")",
")",
"return",
"ecg_idx",
"[",
"0",
"]"
] |
get ecg channel index .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.