id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
listlengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
|---|---|---|---|---|---|
4,846
|
def get_user_password(sockfile):
return ('root', '')
|
[
"def",
"get_user_password",
"(",
"sockfile",
")",
":",
"return",
"(",
"'root'",
",",
"''",
")"
] |
given the path of a socket file .
|
train
| false
|
4,848
|
def get_cache():
return requests.Session().cache
|
[
"def",
"get_cache",
"(",
")",
":",
"return",
"requests",
".",
"Session",
"(",
")",
".",
"cache"
] |
return cache or the default cache if cache is not specified or cache is not configured .
|
train
| false
|
4,849
|
def get_dynamic_links():
df = []
for query in dynamic_link_queries:
df += frappe.db.sql(query, as_dict=True)
return df
|
[
"def",
"get_dynamic_links",
"(",
")",
":",
"df",
"=",
"[",
"]",
"for",
"query",
"in",
"dynamic_link_queries",
":",
"df",
"+=",
"frappe",
".",
"db",
".",
"sql",
"(",
"query",
",",
"as_dict",
"=",
"True",
")",
"return",
"df"
] |
return list of dynamic link fields as docfield .
|
train
| false
|
4,851
|
def get_page_draft(page):
if page:
if page.publisher_is_draft:
return page
else:
return page.publisher_draft
else:
return None
|
[
"def",
"get_page_draft",
"(",
"page",
")",
":",
"if",
"page",
":",
"if",
"page",
".",
"publisher_is_draft",
":",
"return",
"page",
"else",
":",
"return",
"page",
".",
"publisher_draft",
"else",
":",
"return",
"None"
] |
returns the draft version of a page .
|
train
| false
|
4,852
|
def addPluginsParentToMenu(directoryPath, menu, parentPath, pluginFileNames):
ToolDialog().addPluginToMenu(menu, parentPath[:parentPath.rfind('.')])
menu.add_separator()
addPluginsToMenu(directoryPath, menu, pluginFileNames)
|
[
"def",
"addPluginsParentToMenu",
"(",
"directoryPath",
",",
"menu",
",",
"parentPath",
",",
"pluginFileNames",
")",
":",
"ToolDialog",
"(",
")",
".",
"addPluginToMenu",
"(",
"menu",
",",
"parentPath",
"[",
":",
"parentPath",
".",
"rfind",
"(",
"'.'",
")",
"]",
")",
"menu",
".",
"add_separator",
"(",
")",
"addPluginsToMenu",
"(",
"directoryPath",
",",
"menu",
",",
"pluginFileNames",
")"
] |
add plugins and the parent to the menu .
|
train
| false
|
4,853
|
def training_updates(visible_batch, model, sampler, optimizer):
sampler_updates = sampler.updates()
pos_v = visible_batch
neg_v = sampler.particles
grads = model.ml_gradients(pos_v, neg_v)
ups = optimizer.updates(gradients=grads)
safe_update(ups, sampler_updates)
return ups
|
[
"def",
"training_updates",
"(",
"visible_batch",
",",
"model",
",",
"sampler",
",",
"optimizer",
")",
":",
"sampler_updates",
"=",
"sampler",
".",
"updates",
"(",
")",
"pos_v",
"=",
"visible_batch",
"neg_v",
"=",
"sampler",
".",
"particles",
"grads",
"=",
"model",
".",
"ml_gradients",
"(",
"pos_v",
",",
"neg_v",
")",
"ups",
"=",
"optimizer",
".",
"updates",
"(",
"gradients",
"=",
"grads",
")",
"safe_update",
"(",
"ups",
",",
"sampler_updates",
")",
"return",
"ups"
] |
combine together updates from various sources for rbm training .
|
train
| false
|
4,854
|
def release_local(local):
local.__release_local__()
|
[
"def",
"release_local",
"(",
"local",
")",
":",
"local",
".",
"__release_local__",
"(",
")"
] |
releases the contents of the local for the current context .
|
train
| false
|
4,855
|
@pytest.fixture
def webpage(qnam):
QtWebKitWidgets = pytest.importorskip('PyQt5.QtWebKitWidgets')
page = QtWebKitWidgets.QWebPage()
page.networkAccessManager().deleteLater()
page.setNetworkAccessManager(qnam)
return page
|
[
"@",
"pytest",
".",
"fixture",
"def",
"webpage",
"(",
"qnam",
")",
":",
"QtWebKitWidgets",
"=",
"pytest",
".",
"importorskip",
"(",
"'PyQt5.QtWebKitWidgets'",
")",
"page",
"=",
"QtWebKitWidgets",
".",
"QWebPage",
"(",
")",
"page",
".",
"networkAccessManager",
"(",
")",
".",
"deleteLater",
"(",
")",
"page",
".",
"setNetworkAccessManager",
"(",
"qnam",
")",
"return",
"page"
] |
get a new qwebpage object .
|
train
| false
|
4,856
|
def _truncate_to_field(model, field_name, value):
field = model._meta.get_field(field_name)
if (len(value) > field.max_length):
midpoint = (field.max_length // 2)
len_after_midpoint = (field.max_length - midpoint)
first = value[:midpoint]
sep = u'...'
last = value[((len(value) - len_after_midpoint) + len(sep)):]
value = sep.join([first, last])
return value
|
[
"def",
"_truncate_to_field",
"(",
"model",
",",
"field_name",
",",
"value",
")",
":",
"field",
"=",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field_name",
")",
"if",
"(",
"len",
"(",
"value",
")",
">",
"field",
".",
"max_length",
")",
":",
"midpoint",
"=",
"(",
"field",
".",
"max_length",
"//",
"2",
")",
"len_after_midpoint",
"=",
"(",
"field",
".",
"max_length",
"-",
"midpoint",
")",
"first",
"=",
"value",
"[",
":",
"midpoint",
"]",
"sep",
"=",
"u'...'",
"last",
"=",
"value",
"[",
"(",
"(",
"len",
"(",
"value",
")",
"-",
"len_after_midpoint",
")",
"+",
"len",
"(",
"sep",
")",
")",
":",
"]",
"value",
"=",
"sep",
".",
"join",
"(",
"[",
"first",
",",
"last",
"]",
")",
"return",
"value"
] |
if data is too big for the field .
|
train
| true
|
4,860
|
def pathCheck(*args, **kwargs):
moduleName = kwargs.get('moduleName', 'it')
for arg in args:
if (not quietRun(('which ' + arg))):
error(((('Cannot find required executable %s.\n' % arg) + ('Please make sure that %s is installed ' % moduleName)) + ('and available in your $PATH:\n(%s)\n' % environ['PATH'])))
exit(1)
|
[
"def",
"pathCheck",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"moduleName",
"=",
"kwargs",
".",
"get",
"(",
"'moduleName'",
",",
"'it'",
")",
"for",
"arg",
"in",
"args",
":",
"if",
"(",
"not",
"quietRun",
"(",
"(",
"'which '",
"+",
"arg",
")",
")",
")",
":",
"error",
"(",
"(",
"(",
"(",
"'Cannot find required executable %s.\\n'",
"%",
"arg",
")",
"+",
"(",
"'Please make sure that %s is installed '",
"%",
"moduleName",
")",
")",
"+",
"(",
"'and available in your $PATH:\\n(%s)\\n'",
"%",
"environ",
"[",
"'PATH'",
"]",
")",
")",
")",
"exit",
"(",
"1",
")"
] |
make sure each program in *args can be found in $path .
|
train
| false
|
4,861
|
def boundary_slice(df, start, stop, right_boundary=True, left_boundary=True, kind='loc'):
result = getattr(df, kind)[start:stop]
if (not right_boundary):
right_index = result.index.get_slice_bound(stop, 'left', kind)
result = result.iloc[:right_index]
if (not left_boundary):
left_index = result.index.get_slice_bound(start, 'right', kind)
result = result.iloc[left_index:]
return result
|
[
"def",
"boundary_slice",
"(",
"df",
",",
"start",
",",
"stop",
",",
"right_boundary",
"=",
"True",
",",
"left_boundary",
"=",
"True",
",",
"kind",
"=",
"'loc'",
")",
":",
"result",
"=",
"getattr",
"(",
"df",
",",
"kind",
")",
"[",
"start",
":",
"stop",
"]",
"if",
"(",
"not",
"right_boundary",
")",
":",
"right_index",
"=",
"result",
".",
"index",
".",
"get_slice_bound",
"(",
"stop",
",",
"'left'",
",",
"kind",
")",
"result",
"=",
"result",
".",
"iloc",
"[",
":",
"right_index",
"]",
"if",
"(",
"not",
"left_boundary",
")",
":",
"left_index",
"=",
"result",
".",
"index",
".",
"get_slice_bound",
"(",
"start",
",",
"'right'",
",",
"kind",
")",
"result",
"=",
"result",
".",
"iloc",
"[",
"left_index",
":",
"]",
"return",
"result"
] |
index slice start/stop .
|
train
| false
|
4,862
|
def get_mig(gce, name, zone):
try:
return gce.ex_get_instancegroupmanager(name=name, zone=zone)
except ResourceNotFoundError:
return None
|
[
"def",
"get_mig",
"(",
"gce",
",",
"name",
",",
"zone",
")",
":",
"try",
":",
"return",
"gce",
".",
"ex_get_instancegroupmanager",
"(",
"name",
"=",
"name",
",",
"zone",
"=",
"zone",
")",
"except",
"ResourceNotFoundError",
":",
"return",
"None"
] |
get a managed instance group from gce .
|
train
| false
|
4,863
|
def assert_allclose(x, y, atol=1e-05, rtol=0.0001, verbose=True):
x = cuda.to_cpu(utils.force_array(x))
y = cuda.to_cpu(utils.force_array(y))
try:
numpy.testing.assert_allclose(x, y, atol=atol, rtol=rtol, verbose=verbose)
except Exception:
print ('error:', numpy.abs((x - y)).max())
raise
|
[
"def",
"assert_allclose",
"(",
"x",
",",
"y",
",",
"atol",
"=",
"1e-05",
",",
"rtol",
"=",
"0.0001",
",",
"verbose",
"=",
"True",
")",
":",
"x",
"=",
"cuda",
".",
"to_cpu",
"(",
"utils",
".",
"force_array",
"(",
"x",
")",
")",
"y",
"=",
"cuda",
".",
"to_cpu",
"(",
"utils",
".",
"force_array",
"(",
"y",
")",
")",
"try",
":",
"numpy",
".",
"testing",
".",
"assert_allclose",
"(",
"x",
",",
"y",
",",
"atol",
"=",
"atol",
",",
"rtol",
"=",
"rtol",
",",
"verbose",
"=",
"verbose",
")",
"except",
"Exception",
":",
"print",
"(",
"'error:'",
",",
"numpy",
".",
"abs",
"(",
"(",
"x",
"-",
"y",
")",
")",
".",
"max",
"(",
")",
")",
"raise"
] |
asserts if some corresponding element of x and y differs too much .
|
train
| false
|
4,865
|
def foreign_keys(model):
return [column.name for column in foreign_key_columns(model)]
|
[
"def",
"foreign_keys",
"(",
"model",
")",
":",
"return",
"[",
"column",
".",
"name",
"for",
"column",
"in",
"foreign_key_columns",
"(",
"model",
")",
"]"
] |
returns a list of the names of columns that contain foreign keys for relationships in the specified model class .
|
train
| false
|
4,868
|
def _deduplicate_loggers(loggers):
return ('{}:{}'.format(logger, level) for (logger, level) in dict((it.split(':') for it in loggers)).iteritems())
|
[
"def",
"_deduplicate_loggers",
"(",
"loggers",
")",
":",
"return",
"(",
"'{}:{}'",
".",
"format",
"(",
"logger",
",",
"level",
")",
"for",
"(",
"logger",
",",
"level",
")",
"in",
"dict",
"(",
"(",
"it",
".",
"split",
"(",
"':'",
")",
"for",
"it",
"in",
"loggers",
")",
")",
".",
"iteritems",
"(",
")",
")"
] |
avoid saving multiple logging levels for the same loggers to a save file .
|
train
| false
|
4,870
|
def module_report():
ret = {'functions': [], 'function_attrs': [], 'function_subs': [], 'modules': [], 'module_attrs': [], 'missing_attrs': [], 'missing_subs': []}
for ref in __salt__:
if ('.' in ref):
ret['functions'].append(ref)
else:
ret['modules'].append(ref)
if hasattr(__salt__, ref):
ret['module_attrs'].append(ref)
for func in __salt__[ref]:
full = '{0}.{1}'.format(ref, func)
if hasattr(getattr(__salt__, ref), func):
ret['function_attrs'].append(full)
if (func in __salt__[ref]):
ret['function_subs'].append(full)
for func in ret['functions']:
if (func not in ret['function_attrs']):
ret['missing_attrs'].append(func)
if (func not in ret['function_subs']):
ret['missing_subs'].append(func)
return ret
|
[
"def",
"module_report",
"(",
")",
":",
"ret",
"=",
"{",
"'functions'",
":",
"[",
"]",
",",
"'function_attrs'",
":",
"[",
"]",
",",
"'function_subs'",
":",
"[",
"]",
",",
"'modules'",
":",
"[",
"]",
",",
"'module_attrs'",
":",
"[",
"]",
",",
"'missing_attrs'",
":",
"[",
"]",
",",
"'missing_subs'",
":",
"[",
"]",
"}",
"for",
"ref",
"in",
"__salt__",
":",
"if",
"(",
"'.'",
"in",
"ref",
")",
":",
"ret",
"[",
"'functions'",
"]",
".",
"append",
"(",
"ref",
")",
"else",
":",
"ret",
"[",
"'modules'",
"]",
".",
"append",
"(",
"ref",
")",
"if",
"hasattr",
"(",
"__salt__",
",",
"ref",
")",
":",
"ret",
"[",
"'module_attrs'",
"]",
".",
"append",
"(",
"ref",
")",
"for",
"func",
"in",
"__salt__",
"[",
"ref",
"]",
":",
"full",
"=",
"'{0}.{1}'",
".",
"format",
"(",
"ref",
",",
"func",
")",
"if",
"hasattr",
"(",
"getattr",
"(",
"__salt__",
",",
"ref",
")",
",",
"func",
")",
":",
"ret",
"[",
"'function_attrs'",
"]",
".",
"append",
"(",
"full",
")",
"if",
"(",
"func",
"in",
"__salt__",
"[",
"ref",
"]",
")",
":",
"ret",
"[",
"'function_subs'",
"]",
".",
"append",
"(",
"full",
")",
"for",
"func",
"in",
"ret",
"[",
"'functions'",
"]",
":",
"if",
"(",
"func",
"not",
"in",
"ret",
"[",
"'function_attrs'",
"]",
")",
":",
"ret",
"[",
"'missing_attrs'",
"]",
".",
"append",
"(",
"func",
")",
"if",
"(",
"func",
"not",
"in",
"ret",
"[",
"'function_subs'",
"]",
")",
":",
"ret",
"[",
"'missing_subs'",
"]",
".",
"append",
"(",
"func",
")",
"return",
"ret"
] |
return a dict containing all of the execution modules with a report on the overall availability via different references cli example: .
|
train
| false
|
4,871
|
def guess_prefix(directory=None):
def check_candidate(path, directory=None):
'Auxilliary function that checks whether a particular\n path is a good candidate.\n\n '
candidate = os.path.join(path, 'share', 'ivre')
if (directory is not None):
candidate = os.path.join(candidate, directory)
try:
if stat.S_ISDIR(os.stat(candidate).st_mode):
return candidate
except OSError:
pass
if __file__.startswith('/'):
path = '/'
for elt in __file__.split('/')[1:]:
if (elt in ['lib', 'lib32', 'lib64']):
candidate = check_candidate(path, directory=directory)
if (candidate is not None):
return candidate
path = os.path.join(path, elt)
for path in ['/usr', '/usr/local', '/opt', '/opt/ivre']:
candidate = check_candidate(path, directory=directory)
if (candidate is not None):
return candidate
|
[
"def",
"guess_prefix",
"(",
"directory",
"=",
"None",
")",
":",
"def",
"check_candidate",
"(",
"path",
",",
"directory",
"=",
"None",
")",
":",
"candidate",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'share'",
",",
"'ivre'",
")",
"if",
"(",
"directory",
"is",
"not",
"None",
")",
":",
"candidate",
"=",
"os",
".",
"path",
".",
"join",
"(",
"candidate",
",",
"directory",
")",
"try",
":",
"if",
"stat",
".",
"S_ISDIR",
"(",
"os",
".",
"stat",
"(",
"candidate",
")",
".",
"st_mode",
")",
":",
"return",
"candidate",
"except",
"OSError",
":",
"pass",
"if",
"__file__",
".",
"startswith",
"(",
"'/'",
")",
":",
"path",
"=",
"'/'",
"for",
"elt",
"in",
"__file__",
".",
"split",
"(",
"'/'",
")",
"[",
"1",
":",
"]",
":",
"if",
"(",
"elt",
"in",
"[",
"'lib'",
",",
"'lib32'",
",",
"'lib64'",
"]",
")",
":",
"candidate",
"=",
"check_candidate",
"(",
"path",
",",
"directory",
"=",
"directory",
")",
"if",
"(",
"candidate",
"is",
"not",
"None",
")",
":",
"return",
"candidate",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"elt",
")",
"for",
"path",
"in",
"[",
"'/usr'",
",",
"'/usr/local'",
",",
"'/opt'",
",",
"'/opt/ivre'",
"]",
":",
"candidate",
"=",
"check_candidate",
"(",
"path",
",",
"directory",
"=",
"directory",
")",
"if",
"(",
"candidate",
"is",
"not",
"None",
")",
":",
"return",
"candidate"
] |
attempts to find the base directory where ivre components are installed .
|
train
| false
|
4,872
|
def float_nan(n):
if (n != n):
return None
else:
return float(n)
|
[
"def",
"float_nan",
"(",
"n",
")",
":",
"if",
"(",
"n",
"!=",
"n",
")",
":",
"return",
"None",
"else",
":",
"return",
"float",
"(",
"n",
")"
] |
return none instead of nan to pass jquery 1 .
|
train
| false
|
4,873
|
def remove_bgp_config(net_connect, cmd='no router bgp', as_number=''):
bgp_cmd = '{} {}'.format(cmd, str(as_number))
cmd_list = [bgp_cmd]
output = net_connect.send_config_set(cmd_list)
if (net_connect.device_type == 'cisco_xr_ssh'):
output += net_connect.commit()
print output
|
[
"def",
"remove_bgp_config",
"(",
"net_connect",
",",
"cmd",
"=",
"'no router bgp'",
",",
"as_number",
"=",
"''",
")",
":",
"bgp_cmd",
"=",
"'{} {}'",
".",
"format",
"(",
"cmd",
",",
"str",
"(",
"as_number",
")",
")",
"cmd_list",
"=",
"[",
"bgp_cmd",
"]",
"output",
"=",
"net_connect",
".",
"send_config_set",
"(",
"cmd_list",
")",
"if",
"(",
"net_connect",
".",
"device_type",
"==",
"'cisco_xr_ssh'",
")",
":",
"output",
"+=",
"net_connect",
".",
"commit",
"(",
")",
"print",
"output"
] |
remove bgp from the config .
|
train
| false
|
4,874
|
def convert_remote_torrent_to_json(torrent):
torrent_name = torrent['name']
if ((torrent_name is None) or (len(torrent_name.strip()) == 0)):
torrent_name = 'Unnamed torrent'
relevance_score = relevance_score_remote_torrent(torrent_name)
return {'id': torrent['torrent_id'], 'infohash': torrent['infohash'].encode('hex'), 'name': torrent_name, 'size': torrent['length'], 'category': torrent['category'], 'num_seeders': torrent['num_seeders'], 'num_leechers': torrent['num_leechers'], 'last_tracker_check': 0, 'relevance_score': relevance_score}
|
[
"def",
"convert_remote_torrent_to_json",
"(",
"torrent",
")",
":",
"torrent_name",
"=",
"torrent",
"[",
"'name'",
"]",
"if",
"(",
"(",
"torrent_name",
"is",
"None",
")",
"or",
"(",
"len",
"(",
"torrent_name",
".",
"strip",
"(",
")",
")",
"==",
"0",
")",
")",
":",
"torrent_name",
"=",
"'Unnamed torrent'",
"relevance_score",
"=",
"relevance_score_remote_torrent",
"(",
"torrent_name",
")",
"return",
"{",
"'id'",
":",
"torrent",
"[",
"'torrent_id'",
"]",
",",
"'infohash'",
":",
"torrent",
"[",
"'infohash'",
"]",
".",
"encode",
"(",
"'hex'",
")",
",",
"'name'",
":",
"torrent_name",
",",
"'size'",
":",
"torrent",
"[",
"'length'",
"]",
",",
"'category'",
":",
"torrent",
"[",
"'category'",
"]",
",",
"'num_seeders'",
":",
"torrent",
"[",
"'num_seeders'",
"]",
",",
"'num_leechers'",
":",
"torrent",
"[",
"'num_leechers'",
"]",
",",
"'last_tracker_check'",
":",
"0",
",",
"'relevance_score'",
":",
"relevance_score",
"}"
] |
this method converts a torrent that has been received by remote peers in the network to a json dictionary .
|
train
| false
|
4,875
|
def AES_enc(cipher, data):
if HAVE_AES:
return cipher.encrypt(data)
else:
encrypter = PYAES.Encrypter(cipher)
enc = encrypter.feed(data)
enc += encrypter.feed()
return enc
|
[
"def",
"AES_enc",
"(",
"cipher",
",",
"data",
")",
":",
"if",
"HAVE_AES",
":",
"return",
"cipher",
".",
"encrypt",
"(",
"data",
")",
"else",
":",
"encrypter",
"=",
"PYAES",
".",
"Encrypter",
"(",
"cipher",
")",
"enc",
"=",
"encrypter",
".",
"feed",
"(",
"data",
")",
"enc",
"+=",
"encrypter",
".",
"feed",
"(",
")",
"return",
"enc"
] |
encrypt data with the cipher .
|
train
| false
|
4,876
|
@declared
def get_objects(ref):
if (not isinstance(ref, basestring)):
return ref
name = ref
if ('*' not in name):
return get_object(name)
hname = ''
sdesc = ''
if ('/' not in name):
hname = name
else:
elts = name.split('/', 1)
hname = elts[0]
sdesc = elts[1]
logger.debug('[trigger get_objects] Look for %s %s', hname, sdesc)
res = []
hosts = []
services = []
if ('*' not in hname):
h = objs['hosts'].find_by_name(hname)
if h:
hosts.append(h)
else:
hname = hname.replace('*', '.*')
p = re.compile(hname)
for h in objs['hosts']:
logger.debug('[trigger] Compare %s with %s', hname, h.get_name())
if p.search(h.get_name()):
hosts.append(h)
if (not sdesc):
return hosts
for h in hosts:
if ('*' not in sdesc):
s = h.find_service_by_name(sdesc)
if s:
services.append(s)
else:
sdesc = sdesc.replace('*', '.*')
p = re.compile(sdesc)
for s in h.services:
logger.debug('[trigger] Compare %s with %s', s.service_description, sdesc)
if p.search(s.service_description):
services.append(s)
logger.debug('Found the following services: %s', services)
return services
|
[
"@",
"declared",
"def",
"get_objects",
"(",
"ref",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"ref",
",",
"basestring",
")",
")",
":",
"return",
"ref",
"name",
"=",
"ref",
"if",
"(",
"'*'",
"not",
"in",
"name",
")",
":",
"return",
"get_object",
"(",
"name",
")",
"hname",
"=",
"''",
"sdesc",
"=",
"''",
"if",
"(",
"'/'",
"not",
"in",
"name",
")",
":",
"hname",
"=",
"name",
"else",
":",
"elts",
"=",
"name",
".",
"split",
"(",
"'/'",
",",
"1",
")",
"hname",
"=",
"elts",
"[",
"0",
"]",
"sdesc",
"=",
"elts",
"[",
"1",
"]",
"logger",
".",
"debug",
"(",
"'[trigger get_objects] Look for %s %s'",
",",
"hname",
",",
"sdesc",
")",
"res",
"=",
"[",
"]",
"hosts",
"=",
"[",
"]",
"services",
"=",
"[",
"]",
"if",
"(",
"'*'",
"not",
"in",
"hname",
")",
":",
"h",
"=",
"objs",
"[",
"'hosts'",
"]",
".",
"find_by_name",
"(",
"hname",
")",
"if",
"h",
":",
"hosts",
".",
"append",
"(",
"h",
")",
"else",
":",
"hname",
"=",
"hname",
".",
"replace",
"(",
"'*'",
",",
"'.*'",
")",
"p",
"=",
"re",
".",
"compile",
"(",
"hname",
")",
"for",
"h",
"in",
"objs",
"[",
"'hosts'",
"]",
":",
"logger",
".",
"debug",
"(",
"'[trigger] Compare %s with %s'",
",",
"hname",
",",
"h",
".",
"get_name",
"(",
")",
")",
"if",
"p",
".",
"search",
"(",
"h",
".",
"get_name",
"(",
")",
")",
":",
"hosts",
".",
"append",
"(",
"h",
")",
"if",
"(",
"not",
"sdesc",
")",
":",
"return",
"hosts",
"for",
"h",
"in",
"hosts",
":",
"if",
"(",
"'*'",
"not",
"in",
"sdesc",
")",
":",
"s",
"=",
"h",
".",
"find_service_by_name",
"(",
"sdesc",
")",
"if",
"s",
":",
"services",
".",
"append",
"(",
"s",
")",
"else",
":",
"sdesc",
"=",
"sdesc",
".",
"replace",
"(",
"'*'",
",",
"'.*'",
")",
"p",
"=",
"re",
".",
"compile",
"(",
"sdesc",
")",
"for",
"s",
"in",
"h",
".",
"services",
":",
"logger",
".",
"debug",
"(",
"'[trigger] Compare %s with %s'",
",",
"s",
".",
"service_description",
",",
"sdesc",
")",
"if",
"p",
".",
"search",
"(",
"s",
".",
"service_description",
")",
":",
"services",
".",
"append",
"(",
"s",
")",
"logger",
".",
"debug",
"(",
"'Found the following services: %s'",
",",
"services",
")",
"return",
"services"
] |
return a dict containing the necessary objects for deployment .
|
train
| false
|
4,877
|
def building():
ctable = s3db.gis_config
config = db((ctable.name == 'Queens')).select(ctable.id, limitby=(0, 1)).first()
if config:
gis.set_config(config.id)
def prep(r):
if r.interactive:
if (r.method == 'map'):
s3db.configure('assess_building', marker_fn=building_marker_fn)
elif (r.representation == 'geojson'):
mtable = s3db.gis_marker
s3db.configure('assess_building', marker_fn=building_marker_fn)
return True
s3.prep = prep
return s3_rest_controller(rheader=s3db.assess_building_rheader)
|
[
"def",
"building",
"(",
")",
":",
"ctable",
"=",
"s3db",
".",
"gis_config",
"config",
"=",
"db",
"(",
"(",
"ctable",
".",
"name",
"==",
"'Queens'",
")",
")",
".",
"select",
"(",
"ctable",
".",
"id",
",",
"limitby",
"=",
"(",
"0",
",",
"1",
")",
")",
".",
"first",
"(",
")",
"if",
"config",
":",
"gis",
".",
"set_config",
"(",
"config",
".",
"id",
")",
"def",
"prep",
"(",
"r",
")",
":",
"if",
"r",
".",
"interactive",
":",
"if",
"(",
"r",
".",
"method",
"==",
"'map'",
")",
":",
"s3db",
".",
"configure",
"(",
"'assess_building'",
",",
"marker_fn",
"=",
"building_marker_fn",
")",
"elif",
"(",
"r",
".",
"representation",
"==",
"'geojson'",
")",
":",
"mtable",
"=",
"s3db",
".",
"gis_marker",
"s3db",
".",
"configure",
"(",
"'assess_building'",
",",
"marker_fn",
"=",
"building_marker_fn",
")",
"return",
"True",
"s3",
".",
"prep",
"=",
"prep",
"return",
"s3_rest_controller",
"(",
"rheader",
"=",
"s3db",
".",
"assess_building_rheader",
")"
] |
restful crud controller .
|
train
| false
|
4,878
|
def test_preprocessor_expression():
obj = macroexpand(tokenize('(test (test "one" "two"))')[0], HyASTCompiler(__name__))
assert (type(obj) == HyList)
assert (type(obj[0]) == HyExpression)
assert (obj[0] == HyExpression([HySymbol('test'), HyString('one'), HyString('two')]))
obj = HyList([HyString('one'), HyString('two')])
obj = tokenize('(shill ["one" "two"])')[0][1]
assert (obj == macroexpand(obj, HyASTCompiler('')))
|
[
"def",
"test_preprocessor_expression",
"(",
")",
":",
"obj",
"=",
"macroexpand",
"(",
"tokenize",
"(",
"'(test (test \"one\" \"two\"))'",
")",
"[",
"0",
"]",
",",
"HyASTCompiler",
"(",
"__name__",
")",
")",
"assert",
"(",
"type",
"(",
"obj",
")",
"==",
"HyList",
")",
"assert",
"(",
"type",
"(",
"obj",
"[",
"0",
"]",
")",
"==",
"HyExpression",
")",
"assert",
"(",
"obj",
"[",
"0",
"]",
"==",
"HyExpression",
"(",
"[",
"HySymbol",
"(",
"'test'",
")",
",",
"HyString",
"(",
"'one'",
")",
",",
"HyString",
"(",
"'two'",
")",
"]",
")",
")",
"obj",
"=",
"HyList",
"(",
"[",
"HyString",
"(",
"'one'",
")",
",",
"HyString",
"(",
"'two'",
")",
"]",
")",
"obj",
"=",
"tokenize",
"(",
"'(shill [\"one\" \"two\"])'",
")",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"(",
"obj",
"==",
"macroexpand",
"(",
"obj",
",",
"HyASTCompiler",
"(",
"''",
")",
")",
")"
] |
test that macro expansion doesnt recurse .
|
train
| false
|
4,879
|
@bundles.register('quandl')
def quandl_bundle(environ, asset_db_writer, minute_bar_writer, daily_bar_writer, adjustment_writer, calendar, start_session, end_session, cache, show_progress, output_dir):
api_key = environ.get('QUANDL_API_KEY')
metadata = fetch_symbol_metadata_frame(api_key, cache=cache, show_progress=show_progress)
symbol_map = metadata.symbol
splits = []
dividends = []
asset_db_writer.write(metadata)
daily_bar_writer.write(gen_symbol_data(api_key, cache, symbol_map, calendar, start_session, end_session, splits, dividends, environ.get('QUANDL_DOWNLOAD_ATTEMPTS', 5)), show_progress=show_progress)
adjustment_writer.write(splits=pd.concat(splits, ignore_index=True), dividends=pd.concat(dividends, ignore_index=True))
|
[
"@",
"bundles",
".",
"register",
"(",
"'quandl'",
")",
"def",
"quandl_bundle",
"(",
"environ",
",",
"asset_db_writer",
",",
"minute_bar_writer",
",",
"daily_bar_writer",
",",
"adjustment_writer",
",",
"calendar",
",",
"start_session",
",",
"end_session",
",",
"cache",
",",
"show_progress",
",",
"output_dir",
")",
":",
"api_key",
"=",
"environ",
".",
"get",
"(",
"'QUANDL_API_KEY'",
")",
"metadata",
"=",
"fetch_symbol_metadata_frame",
"(",
"api_key",
",",
"cache",
"=",
"cache",
",",
"show_progress",
"=",
"show_progress",
")",
"symbol_map",
"=",
"metadata",
".",
"symbol",
"splits",
"=",
"[",
"]",
"dividends",
"=",
"[",
"]",
"asset_db_writer",
".",
"write",
"(",
"metadata",
")",
"daily_bar_writer",
".",
"write",
"(",
"gen_symbol_data",
"(",
"api_key",
",",
"cache",
",",
"symbol_map",
",",
"calendar",
",",
"start_session",
",",
"end_session",
",",
"splits",
",",
"dividends",
",",
"environ",
".",
"get",
"(",
"'QUANDL_DOWNLOAD_ATTEMPTS'",
",",
"5",
")",
")",
",",
"show_progress",
"=",
"show_progress",
")",
"adjustment_writer",
".",
"write",
"(",
"splits",
"=",
"pd",
".",
"concat",
"(",
"splits",
",",
"ignore_index",
"=",
"True",
")",
",",
"dividends",
"=",
"pd",
".",
"concat",
"(",
"dividends",
",",
"ignore_index",
"=",
"True",
")",
")"
] |
build a zipline data bundle from the quandl wiki dataset .
|
train
| false
|
4,881
|
def _api_config_undefined(output, kwargs):
return report(output, _MSG_NOT_IMPLEMENTED)
|
[
"def",
"_api_config_undefined",
"(",
"output",
",",
"kwargs",
")",
":",
"return",
"report",
"(",
"output",
",",
"_MSG_NOT_IMPLEMENTED",
")"
] |
api: accepts output .
|
train
| false
|
4,884
|
@pytest.mark.parametrize(u'model', [((Shift(0) + Shift(0)) | Shift(0)), ((Shift(0) - Shift(0)) | Shift(0)), ((Shift(0) * Shift(0)) | Shift(0)), ((Shift(0) / Shift(0)) | Shift(0)), ((Shift(0) ** Shift(0)) | Shift(0)), (Gaussian1D(1, 2, 3) | Gaussian1D(4, 5, 6))])
def test_compound_unsupported_inverse(model):
with pytest.raises(NotImplementedError):
model.inverse
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"u'model'",
",",
"[",
"(",
"(",
"Shift",
"(",
"0",
")",
"+",
"Shift",
"(",
"0",
")",
")",
"|",
"Shift",
"(",
"0",
")",
")",
",",
"(",
"(",
"Shift",
"(",
"0",
")",
"-",
"Shift",
"(",
"0",
")",
")",
"|",
"Shift",
"(",
"0",
")",
")",
",",
"(",
"(",
"Shift",
"(",
"0",
")",
"*",
"Shift",
"(",
"0",
")",
")",
"|",
"Shift",
"(",
"0",
")",
")",
",",
"(",
"(",
"Shift",
"(",
"0",
")",
"/",
"Shift",
"(",
"0",
")",
")",
"|",
"Shift",
"(",
"0",
")",
")",
",",
"(",
"(",
"Shift",
"(",
"0",
")",
"**",
"Shift",
"(",
"0",
")",
")",
"|",
"Shift",
"(",
"0",
")",
")",
",",
"(",
"Gaussian1D",
"(",
"1",
",",
"2",
",",
"3",
")",
"|",
"Gaussian1D",
"(",
"4",
",",
"5",
",",
"6",
")",
")",
"]",
")",
"def",
"test_compound_unsupported_inverse",
"(",
"model",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"NotImplementedError",
")",
":",
"model",
".",
"inverse"
] |
ensure inverses arent supported in cases where it shouldnt be .
|
train
| false
|
4,886
|
def extend_data(data, length, offset):
if (length >= offset):
new_data = (data[(- offset):] * (alignValue(length, offset) // offset))
return (data + new_data[:length])
else:
return (data + data[(- offset):((- offset) + length)])
|
[
"def",
"extend_data",
"(",
"data",
",",
"length",
",",
"offset",
")",
":",
"if",
"(",
"length",
">=",
"offset",
")",
":",
"new_data",
"=",
"(",
"data",
"[",
"(",
"-",
"offset",
")",
":",
"]",
"*",
"(",
"alignValue",
"(",
"length",
",",
"offset",
")",
"//",
"offset",
")",
")",
"return",
"(",
"data",
"+",
"new_data",
"[",
":",
"length",
"]",
")",
"else",
":",
"return",
"(",
"data",
"+",
"data",
"[",
"(",
"-",
"offset",
")",
":",
"(",
"(",
"-",
"offset",
")",
"+",
"length",
")",
"]",
")"
] |
extend data using a length and an offset .
|
train
| false
|
4,887
|
@curry
def leaves_of_type(types, expr):
if (not isinstance(expr, types)):
return set([expr])
else:
return set.union(*map(leaves_of_type(types), expr._inputs))
|
[
"@",
"curry",
"def",
"leaves_of_type",
"(",
"types",
",",
"expr",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"expr",
",",
"types",
")",
")",
":",
"return",
"set",
"(",
"[",
"expr",
"]",
")",
"else",
":",
"return",
"set",
".",
"union",
"(",
"*",
"map",
"(",
"leaves_of_type",
"(",
"types",
")",
",",
"expr",
".",
"_inputs",
")",
")"
] |
leaves of an expression skipping all operations of type types .
|
train
| false
|
4,888
|
def _get_al_mohy_higham_2012_experiment_1():
A = np.array([[0.32346, 30000.0, 30000.0, 30000.0], [0, 0.30089, 30000.0, 30000.0], [0, 0, 0.3221, 30000.0], [0, 0, 0, 0.30744]], dtype=float)
return A
|
[
"def",
"_get_al_mohy_higham_2012_experiment_1",
"(",
")",
":",
"A",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"0.32346",
",",
"30000.0",
",",
"30000.0",
",",
"30000.0",
"]",
",",
"[",
"0",
",",
"0.30089",
",",
"30000.0",
",",
"30000.0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0.3221",
",",
"30000.0",
"]",
",",
"[",
"0",
",",
"0",
",",
"0",
",",
"0.30744",
"]",
"]",
",",
"dtype",
"=",
"float",
")",
"return",
"A"
] |
return the test matrix from experiment (1) of [1]_ .
|
train
| false
|
4,889
|
def _set_binops_check_strict(self, obj):
return isinstance(obj, (_set_binop_bases + (self.__class__,)))
|
[
"def",
"_set_binops_check_strict",
"(",
"self",
",",
"obj",
")",
":",
"return",
"isinstance",
"(",
"obj",
",",
"(",
"_set_binop_bases",
"+",
"(",
"self",
".",
"__class__",
",",
")",
")",
")"
] |
allow only set .
|
train
| false
|
4,891
|
def update_org_user(userid, orgname=None, profile='grafana', **kwargs):
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile)
if orgname:
switch_org(orgname, profile)
response = requests.patch('{0}/api/org/users/{1}'.format(profile['grafana_url'], userid), json=kwargs, auth=_get_auth(profile), headers=_get_headers(profile), timeout=profile.get('grafana_timeout', 3))
if (response.status_code >= 400):
response.raise_for_status()
return response.json()
|
[
"def",
"update_org_user",
"(",
"userid",
",",
"orgname",
"=",
"None",
",",
"profile",
"=",
"'grafana'",
",",
"**",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"profile",
",",
"string_types",
")",
":",
"profile",
"=",
"__salt__",
"[",
"'config.option'",
"]",
"(",
"profile",
")",
"if",
"orgname",
":",
"switch_org",
"(",
"orgname",
",",
"profile",
")",
"response",
"=",
"requests",
".",
"patch",
"(",
"'{0}/api/org/users/{1}'",
".",
"format",
"(",
"profile",
"[",
"'grafana_url'",
"]",
",",
"userid",
")",
",",
"json",
"=",
"kwargs",
",",
"auth",
"=",
"_get_auth",
"(",
"profile",
")",
",",
"headers",
"=",
"_get_headers",
"(",
"profile",
")",
",",
"timeout",
"=",
"profile",
".",
"get",
"(",
"'grafana_timeout'",
",",
"3",
")",
")",
"if",
"(",
"response",
".",
"status_code",
">=",
"400",
")",
":",
"response",
".",
"raise_for_status",
"(",
")",
"return",
"response",
".",
"json",
"(",
")"
] |
update user role in the organization .
|
train
| true
|
4,892
|
def replaceBads(s):
bads = [' ', '(', ')']
x = s
for bad in bads:
x = x.replace(bad, '_')
return x
|
[
"def",
"replaceBads",
"(",
"s",
")",
":",
"bads",
"=",
"[",
"' '",
",",
"'('",
",",
"')'",
"]",
"x",
"=",
"s",
"for",
"bad",
"in",
"bads",
":",
"x",
"=",
"x",
".",
"replace",
"(",
"bad",
",",
"'_'",
")",
"return",
"x"
] |
replaces bad characters with good characters! .
|
train
| false
|
4,893
|
def delete_group(group_name, region=None, key=None, keyid=None, profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if (not conn):
return False
_group = get_group(group_name, region, key, keyid, profile)
if (not _group):
return True
try:
conn.delete_group(group_name)
msg = 'Successfully deleted group {0}.'
log.info(msg.format(group_name))
return True
except boto.exception.BotoServerError as e:
log.debug(e)
msg = 'Failed to delete group {0}.'
log.error(msg.format(group_name))
return False
|
[
"def",
"delete_group",
"(",
"group_name",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"if",
"(",
"not",
"conn",
")",
":",
"return",
"False",
"_group",
"=",
"get_group",
"(",
"group_name",
",",
"region",
",",
"key",
",",
"keyid",
",",
"profile",
")",
"if",
"(",
"not",
"_group",
")",
":",
"return",
"True",
"try",
":",
"conn",
".",
"delete_group",
"(",
"group_name",
")",
"msg",
"=",
"'Successfully deleted group {0}.'",
"log",
".",
"info",
"(",
"msg",
".",
"format",
"(",
"group_name",
")",
")",
"return",
"True",
"except",
"boto",
".",
"exception",
".",
"BotoServerError",
"as",
"e",
":",
"log",
".",
"debug",
"(",
"e",
")",
"msg",
"=",
"'Failed to delete group {0}.'",
"log",
".",
"error",
"(",
"msg",
".",
"format",
"(",
"group_name",
")",
")",
"return",
"False"
] |
delete a group policy .
|
train
| true
|
4,894
|
def is_clique(graph):
return (graph.density == 1.0)
|
[
"def",
"is_clique",
"(",
"graph",
")",
":",
"return",
"(",
"graph",
".",
"density",
"==",
"1.0",
")"
] |
returns true if and only if nodes is an independent set in g .
|
train
| false
|
4,896
|
def categorical_order(values, order=None):
if (order is None):
if hasattr(values, 'categories'):
order = values.categories
else:
try:
order = values.cat.categories
except (TypeError, AttributeError):
try:
order = values.unique()
except AttributeError:
order = pd.unique(values)
try:
np.asarray(values).astype(np.float)
order = np.sort(order)
except (ValueError, TypeError):
order = order
order = filter(pd.notnull, order)
return list(order)
|
[
"def",
"categorical_order",
"(",
"values",
",",
"order",
"=",
"None",
")",
":",
"if",
"(",
"order",
"is",
"None",
")",
":",
"if",
"hasattr",
"(",
"values",
",",
"'categories'",
")",
":",
"order",
"=",
"values",
".",
"categories",
"else",
":",
"try",
":",
"order",
"=",
"values",
".",
"cat",
".",
"categories",
"except",
"(",
"TypeError",
",",
"AttributeError",
")",
":",
"try",
":",
"order",
"=",
"values",
".",
"unique",
"(",
")",
"except",
"AttributeError",
":",
"order",
"=",
"pd",
".",
"unique",
"(",
"values",
")",
"try",
":",
"np",
".",
"asarray",
"(",
"values",
")",
".",
"astype",
"(",
"np",
".",
"float",
")",
"order",
"=",
"np",
".",
"sort",
"(",
"order",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"order",
"=",
"order",
"order",
"=",
"filter",
"(",
"pd",
".",
"notnull",
",",
"order",
")",
"return",
"list",
"(",
"order",
")"
] |
return a list of unique data values .
|
train
| false
|
4,898
|
def to_join_key(obj):
if (obj is None):
return JoinKey(None, None, None)
if isinstance(obj, compat.string_type):
obj = obj.split('.')
if isinstance(obj, (tuple, list)):
if (len(obj) == 1):
column = obj[0]
table = None
schema = None
elif (len(obj) == 2):
(table, column) = obj
schema = None
elif (len(obj) == 3):
(schema, table, column) = obj
else:
raise ArgumentError('Join key can have 1 to 3 items has {}: {}'.format(len(obj), obj))
elif hasattr(obj, 'get'):
schema = obj.get('schema')
table = obj.get('table')
column = obj.get('column')
else:
schema = obj.schema
table = obj.table
column = obj.column
if isinstance(column, list):
column = tuple(column)
return JoinKey(schema, table, column)
|
[
"def",
"to_join_key",
"(",
"obj",
")",
":",
"if",
"(",
"obj",
"is",
"None",
")",
":",
"return",
"JoinKey",
"(",
"None",
",",
"None",
",",
"None",
")",
"if",
"isinstance",
"(",
"obj",
",",
"compat",
".",
"string_type",
")",
":",
"obj",
"=",
"obj",
".",
"split",
"(",
"'.'",
")",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"if",
"(",
"len",
"(",
"obj",
")",
"==",
"1",
")",
":",
"column",
"=",
"obj",
"[",
"0",
"]",
"table",
"=",
"None",
"schema",
"=",
"None",
"elif",
"(",
"len",
"(",
"obj",
")",
"==",
"2",
")",
":",
"(",
"table",
",",
"column",
")",
"=",
"obj",
"schema",
"=",
"None",
"elif",
"(",
"len",
"(",
"obj",
")",
"==",
"3",
")",
":",
"(",
"schema",
",",
"table",
",",
"column",
")",
"=",
"obj",
"else",
":",
"raise",
"ArgumentError",
"(",
"'Join key can have 1 to 3 items has {}: {}'",
".",
"format",
"(",
"len",
"(",
"obj",
")",
",",
"obj",
")",
")",
"elif",
"hasattr",
"(",
"obj",
",",
"'get'",
")",
":",
"schema",
"=",
"obj",
".",
"get",
"(",
"'schema'",
")",
"table",
"=",
"obj",
".",
"get",
"(",
"'table'",
")",
"column",
"=",
"obj",
".",
"get",
"(",
"'column'",
")",
"else",
":",
"schema",
"=",
"obj",
".",
"schema",
"table",
"=",
"obj",
".",
"table",
"column",
"=",
"obj",
".",
"column",
"if",
"isinstance",
"(",
"column",
",",
"list",
")",
":",
"column",
"=",
"tuple",
"(",
"column",
")",
"return",
"JoinKey",
"(",
"schema",
",",
"table",
",",
"column",
")"
] |
utility function that will create joinkey tuple from an anonymous tuple .
|
train
| false
|
4,900
|
def test_getitem_representation():
sc = SkyCoord(([1, 1] * u.deg), ([2, 2] * u.deg))
sc.representation = u'cartesian'
assert (sc[0].representation is CartesianRepresentation)
|
[
"def",
"test_getitem_representation",
"(",
")",
":",
"sc",
"=",
"SkyCoord",
"(",
"(",
"[",
"1",
",",
"1",
"]",
"*",
"u",
".",
"deg",
")",
",",
"(",
"[",
"2",
",",
"2",
"]",
"*",
"u",
".",
"deg",
")",
")",
"sc",
".",
"representation",
"=",
"u'cartesian'",
"assert",
"(",
"sc",
"[",
"0",
"]",
".",
"representation",
"is",
"CartesianRepresentation",
")"
] |
make sure current representation survives __getitem__ even if different from data representation .
|
train
| false
|
4,901
|
@log_call
def metadef_property_get_by_id(context, namespace_name, property_id):
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
for property in DATA['metadef_properties']:
if ((property['namespace_id'] == namespace['id']) and (property['id'] == property_id)):
return property
else:
msg = (_('Metadata definition property not found for id=%s') % property_id)
LOG.warn(msg)
raise exception.MetadefPropertyNotFound(msg)
|
[
"@",
"log_call",
"def",
"metadef_property_get_by_id",
"(",
"context",
",",
"namespace_name",
",",
"property_id",
")",
":",
"namespace",
"=",
"metadef_namespace_get",
"(",
"context",
",",
"namespace_name",
")",
"_check_namespace_visibility",
"(",
"context",
",",
"namespace",
",",
"namespace_name",
")",
"for",
"property",
"in",
"DATA",
"[",
"'metadef_properties'",
"]",
":",
"if",
"(",
"(",
"property",
"[",
"'namespace_id'",
"]",
"==",
"namespace",
"[",
"'id'",
"]",
")",
"and",
"(",
"property",
"[",
"'id'",
"]",
"==",
"property_id",
")",
")",
":",
"return",
"property",
"else",
":",
"msg",
"=",
"(",
"_",
"(",
"'Metadata definition property not found for id=%s'",
")",
"%",
"property_id",
")",
"LOG",
".",
"warn",
"(",
"msg",
")",
"raise",
"exception",
".",
"MetadefPropertyNotFound",
"(",
"msg",
")"
] |
get a metadef property .
|
train
| false
|
4,902
|
def download_mission():
print ' Download mission from vehicle'
missionlist = []
cmds = vehicle.commands
cmds.download()
cmds.wait_ready()
for cmd in cmds:
missionlist.append(cmd)
return missionlist
|
[
"def",
"download_mission",
"(",
")",
":",
"print",
"' Download mission from vehicle'",
"missionlist",
"=",
"[",
"]",
"cmds",
"=",
"vehicle",
".",
"commands",
"cmds",
".",
"download",
"(",
")",
"cmds",
".",
"wait_ready",
"(",
")",
"for",
"cmd",
"in",
"cmds",
":",
"missionlist",
".",
"append",
"(",
"cmd",
")",
"return",
"missionlist"
] |
download the current mission from the vehicle .
|
train
| true
|
4,904
|
def guessit(string, options=None):
return default_api.guessit(string, options)
|
[
"def",
"guessit",
"(",
"string",
",",
"options",
"=",
"None",
")",
":",
"return",
"default_api",
".",
"guessit",
"(",
"string",
",",
"options",
")"
] |
retrieves all matches from string as a dict .
|
train
| false
|
4,905
|
def parse_django_adminopt_node(env, sig, signode):
try:
from sphinx.domains.std import option_desc_re
except ImportError:
from sphinx.directives.desc import option_desc_re
count = 0
firstname = ''
for m in option_desc_re.finditer(sig):
(optname, args) = m.groups()
if count:
signode += addnodes.desc_addname(', ', ', ')
signode += addnodes.desc_name(optname, optname)
signode += addnodes.desc_addname(args, args)
if (not count):
firstname = optname
count += 1
if (not count):
for m in simple_option_desc_re.finditer(sig):
(optname, args) = m.groups()
if count:
signode += addnodes.desc_addname(', ', ', ')
signode += addnodes.desc_name(optname, optname)
signode += addnodes.desc_addname(args, args)
if (not count):
firstname = optname
count += 1
if (not firstname):
raise ValueError
return firstname
|
[
"def",
"parse_django_adminopt_node",
"(",
"env",
",",
"sig",
",",
"signode",
")",
":",
"try",
":",
"from",
"sphinx",
".",
"domains",
".",
"std",
"import",
"option_desc_re",
"except",
"ImportError",
":",
"from",
"sphinx",
".",
"directives",
".",
"desc",
"import",
"option_desc_re",
"count",
"=",
"0",
"firstname",
"=",
"''",
"for",
"m",
"in",
"option_desc_re",
".",
"finditer",
"(",
"sig",
")",
":",
"(",
"optname",
",",
"args",
")",
"=",
"m",
".",
"groups",
"(",
")",
"if",
"count",
":",
"signode",
"+=",
"addnodes",
".",
"desc_addname",
"(",
"', '",
",",
"', '",
")",
"signode",
"+=",
"addnodes",
".",
"desc_name",
"(",
"optname",
",",
"optname",
")",
"signode",
"+=",
"addnodes",
".",
"desc_addname",
"(",
"args",
",",
"args",
")",
"if",
"(",
"not",
"count",
")",
":",
"firstname",
"=",
"optname",
"count",
"+=",
"1",
"if",
"(",
"not",
"count",
")",
":",
"for",
"m",
"in",
"simple_option_desc_re",
".",
"finditer",
"(",
"sig",
")",
":",
"(",
"optname",
",",
"args",
")",
"=",
"m",
".",
"groups",
"(",
")",
"if",
"count",
":",
"signode",
"+=",
"addnodes",
".",
"desc_addname",
"(",
"', '",
",",
"', '",
")",
"signode",
"+=",
"addnodes",
".",
"desc_name",
"(",
"optname",
",",
"optname",
")",
"signode",
"+=",
"addnodes",
".",
"desc_addname",
"(",
"args",
",",
"args",
")",
"if",
"(",
"not",
"count",
")",
":",
"firstname",
"=",
"optname",
"count",
"+=",
"1",
"if",
"(",
"not",
"firstname",
")",
":",
"raise",
"ValueError",
"return",
"firstname"
] |
a copy of sphinx .
|
train
| true
|
4,906
|
def _afterpoint(string):
if _isnumber(string):
if _isint(string):
return (-1)
else:
pos = string.rfind(u'.')
pos = (string.lower().rfind(u'e') if (pos < 0) else pos)
if (pos >= 0):
return ((len(string) - pos) - 1)
else:
return (-1)
else:
return (-1)
|
[
"def",
"_afterpoint",
"(",
"string",
")",
":",
"if",
"_isnumber",
"(",
"string",
")",
":",
"if",
"_isint",
"(",
"string",
")",
":",
"return",
"(",
"-",
"1",
")",
"else",
":",
"pos",
"=",
"string",
".",
"rfind",
"(",
"u'.'",
")",
"pos",
"=",
"(",
"string",
".",
"lower",
"(",
")",
".",
"rfind",
"(",
"u'e'",
")",
"if",
"(",
"pos",
"<",
"0",
")",
"else",
"pos",
")",
"if",
"(",
"pos",
">=",
"0",
")",
":",
"return",
"(",
"(",
"len",
"(",
"string",
")",
"-",
"pos",
")",
"-",
"1",
")",
"else",
":",
"return",
"(",
"-",
"1",
")",
"else",
":",
"return",
"(",
"-",
"1",
")"
] |
symbols after a decimal point .
|
train
| true
|
4,907
|
def get_change_str(changes):
res = [(u'%s %d' % (key, changes[key])) for key in changes if (changes[key] > 0)]
if res:
return ', '.join(res)
return 'no changed'
|
[
"def",
"get_change_str",
"(",
"changes",
")",
":",
"res",
"=",
"[",
"(",
"u'%s %d'",
"%",
"(",
"key",
",",
"changes",
"[",
"key",
"]",
")",
")",
"for",
"key",
"in",
"changes",
"if",
"(",
"changes",
"[",
"key",
"]",
">",
"0",
")",
"]",
"if",
"res",
":",
"return",
"', '",
".",
"join",
"(",
"res",
")",
"return",
"'no changed'"
] |
returns a formatted string for the non-zero items of a changes dictionary .
|
train
| false
|
4,908
|
def get_review_request_fields():
for field in field_registry:
(yield field)
|
[
"def",
"get_review_request_fields",
"(",
")",
":",
"for",
"field",
"in",
"field_registry",
":",
"(",
"yield",
"field",
")"
] |
yield all registered field classes .
|
train
| false
|
4,909
|
def job_status(name=None):
if (not name):
raise SaltInvocationError('Required parameter `name` is missing.')
server = _connect()
if (not job_exists(name)):
raise SaltInvocationError('Job `{0}` does not exists.'.format(name))
return server.get_job_info('empty')['buildable']
|
[
"def",
"job_status",
"(",
"name",
"=",
"None",
")",
":",
"if",
"(",
"not",
"name",
")",
":",
"raise",
"SaltInvocationError",
"(",
"'Required parameter `name` is missing.'",
")",
"server",
"=",
"_connect",
"(",
")",
"if",
"(",
"not",
"job_exists",
"(",
"name",
")",
")",
":",
"raise",
"SaltInvocationError",
"(",
"'Job `{0}` does not exists.'",
".",
"format",
"(",
"name",
")",
")",
"return",
"server",
".",
"get_job_info",
"(",
"'empty'",
")",
"[",
"'buildable'",
"]"
] |
returns a json objecting representing the status of a job .
|
train
| true
|
4,911
|
def getexistingdirectory(parent=None, caption='', basedir='', options=QFileDialog.ShowDirsOnly):
if (sys.platform == 'win32'):
(_temp1, _temp2) = (sys.stdout, sys.stderr)
(sys.stdout, sys.stderr) = (None, None)
try:
result = QFileDialog.getExistingDirectory(parent, caption, basedir, options)
finally:
if (sys.platform == 'win32'):
(sys.stdout, sys.stderr) = (_temp1, _temp2)
if (not is_text_string(result)):
result = to_text_string(result)
return result
|
[
"def",
"getexistingdirectory",
"(",
"parent",
"=",
"None",
",",
"caption",
"=",
"''",
",",
"basedir",
"=",
"''",
",",
"options",
"=",
"QFileDialog",
".",
"ShowDirsOnly",
")",
":",
"if",
"(",
"sys",
".",
"platform",
"==",
"'win32'",
")",
":",
"(",
"_temp1",
",",
"_temp2",
")",
"=",
"(",
"sys",
".",
"stdout",
",",
"sys",
".",
"stderr",
")",
"(",
"sys",
".",
"stdout",
",",
"sys",
".",
"stderr",
")",
"=",
"(",
"None",
",",
"None",
")",
"try",
":",
"result",
"=",
"QFileDialog",
".",
"getExistingDirectory",
"(",
"parent",
",",
"caption",
",",
"basedir",
",",
"options",
")",
"finally",
":",
"if",
"(",
"sys",
".",
"platform",
"==",
"'win32'",
")",
":",
"(",
"sys",
".",
"stdout",
",",
"sys",
".",
"stderr",
")",
"=",
"(",
"_temp1",
",",
"_temp2",
")",
"if",
"(",
"not",
"is_text_string",
"(",
"result",
")",
")",
":",
"result",
"=",
"to_text_string",
"(",
"result",
")",
"return",
"result"
] |
wrapper around qtgui .
|
train
| true
|
4,912
|
def make_callable_template(key, typer, recvr=None):
def generic(self):
return typer
name = ('%s_CallableTemplate' % (key,))
bases = (CallableTemplate,)
class_dict = dict(key=key, generic=generic, recvr=recvr)
return type(name, bases, class_dict)
|
[
"def",
"make_callable_template",
"(",
"key",
",",
"typer",
",",
"recvr",
"=",
"None",
")",
":",
"def",
"generic",
"(",
"self",
")",
":",
"return",
"typer",
"name",
"=",
"(",
"'%s_CallableTemplate'",
"%",
"(",
"key",
",",
")",
")",
"bases",
"=",
"(",
"CallableTemplate",
",",
")",
"class_dict",
"=",
"dict",
"(",
"key",
"=",
"key",
",",
"generic",
"=",
"generic",
",",
"recvr",
"=",
"recvr",
")",
"return",
"type",
"(",
"name",
",",
"bases",
",",
"class_dict",
")"
] |
create a callable template with the given key and typer function .
|
train
| false
|
4,914
|
@cache_permission
def can_use_mt(user, translation):
if (not appsettings.MACHINE_TRANSLATION_ENABLED):
return False
if (not has_group_perm(user, 'trans.use_mt', translation)):
return False
if check_owner(user, translation.subproject.project, 'trans.use_mt'):
return True
return (can_translate(user, translation) or can_suggest(user, translation))
|
[
"@",
"cache_permission",
"def",
"can_use_mt",
"(",
"user",
",",
"translation",
")",
":",
"if",
"(",
"not",
"appsettings",
".",
"MACHINE_TRANSLATION_ENABLED",
")",
":",
"return",
"False",
"if",
"(",
"not",
"has_group_perm",
"(",
"user",
",",
"'trans.use_mt'",
",",
"translation",
")",
")",
":",
"return",
"False",
"if",
"check_owner",
"(",
"user",
",",
"translation",
".",
"subproject",
".",
"project",
",",
"'trans.use_mt'",
")",
":",
"return",
"True",
"return",
"(",
"can_translate",
"(",
"user",
",",
"translation",
")",
"or",
"can_suggest",
"(",
"user",
",",
"translation",
")",
")"
] |
checks whether user can use machine translation .
|
train
| false
|
4,916
|
def user_structure(user, site):
full_name = user.get_full_name().split()
first_name = full_name[0]
try:
last_name = full_name[1]
except IndexError:
last_name = ''
return {'userid': user.pk, 'email': user.email, 'nickname': user.get_username(), 'lastname': last_name, 'firstname': first_name, 'url': ('%s://%s%s' % (PROTOCOL, site.domain, user.get_absolute_url()))}
|
[
"def",
"user_structure",
"(",
"user",
",",
"site",
")",
":",
"full_name",
"=",
"user",
".",
"get_full_name",
"(",
")",
".",
"split",
"(",
")",
"first_name",
"=",
"full_name",
"[",
"0",
"]",
"try",
":",
"last_name",
"=",
"full_name",
"[",
"1",
"]",
"except",
"IndexError",
":",
"last_name",
"=",
"''",
"return",
"{",
"'userid'",
":",
"user",
".",
"pk",
",",
"'email'",
":",
"user",
".",
"email",
",",
"'nickname'",
":",
"user",
".",
"get_username",
"(",
")",
",",
"'lastname'",
":",
"last_name",
",",
"'firstname'",
":",
"first_name",
",",
"'url'",
":",
"(",
"'%s://%s%s'",
"%",
"(",
"PROTOCOL",
",",
"site",
".",
"domain",
",",
"user",
".",
"get_absolute_url",
"(",
")",
")",
")",
"}"
] |
an user structure .
|
train
| true
|
4,918
|
def snapshot(name, suffix=None):
return _virt_call(name, 'snapshot', 'saved', 'Snapshot has been taken', suffix=suffix)
|
[
"def",
"snapshot",
"(",
"name",
",",
"suffix",
"=",
"None",
")",
":",
"return",
"_virt_call",
"(",
"name",
",",
"'snapshot'",
",",
"'saved'",
",",
"'Snapshot has been taken'",
",",
"suffix",
"=",
"suffix",
")"
] |
create a snapshot .
|
train
| false
|
4,920
|
def all_living_collectors():
for col in all_collectors():
if (col.proc is not None):
(yield col)
|
[
"def",
"all_living_collectors",
"(",
")",
":",
"for",
"col",
"in",
"all_collectors",
"(",
")",
":",
"if",
"(",
"col",
".",
"proc",
"is",
"not",
"None",
")",
":",
"(",
"yield",
"col",
")"
] |
generator to return all defined collectors that have an active process .
|
train
| false
|
4,921
|
def smart_urlquote_wrapper(matched_url):
try:
return smart_urlquote(matched_url)
except ValueError:
return None
|
[
"def",
"smart_urlquote_wrapper",
"(",
"matched_url",
")",
":",
"try",
":",
"return",
"smart_urlquote",
"(",
"matched_url",
")",
"except",
"ValueError",
":",
"return",
"None"
] |
simple wrapper for smart_urlquote .
|
train
| false
|
4,922
|
@hug.directive(apply_globally=True)
def my_directive_global(default=None, **kwargs):
return default
|
[
"@",
"hug",
".",
"directive",
"(",
"apply_globally",
"=",
"True",
")",
"def",
"my_directive_global",
"(",
"default",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"return",
"default"
] |
for testing .
|
train
| false
|
4,923
|
def negotiateProtocol(serverProtocols, clientProtocols, clientOptions=None):
(caCertificate, serverCertificate) = certificatesForAuthorityAndServer()
trustRoot = sslverify.OpenSSLCertificateAuthorities([caCertificate.original])
(sProto, cProto, pump) = loopbackTLSConnectionInMemory(trustRoot=trustRoot, privateKey=serverCertificate.privateKey.original, serverCertificate=serverCertificate.original, clientProtocols=clientProtocols, serverProtocols=serverProtocols, clientOptions=clientOptions)
pump.flush()
return (cProto.negotiatedProtocol, cProto.wrappedProtocol.lostReason)
|
[
"def",
"negotiateProtocol",
"(",
"serverProtocols",
",",
"clientProtocols",
",",
"clientOptions",
"=",
"None",
")",
":",
"(",
"caCertificate",
",",
"serverCertificate",
")",
"=",
"certificatesForAuthorityAndServer",
"(",
")",
"trustRoot",
"=",
"sslverify",
".",
"OpenSSLCertificateAuthorities",
"(",
"[",
"caCertificate",
".",
"original",
"]",
")",
"(",
"sProto",
",",
"cProto",
",",
"pump",
")",
"=",
"loopbackTLSConnectionInMemory",
"(",
"trustRoot",
"=",
"trustRoot",
",",
"privateKey",
"=",
"serverCertificate",
".",
"privateKey",
".",
"original",
",",
"serverCertificate",
"=",
"serverCertificate",
".",
"original",
",",
"clientProtocols",
"=",
"clientProtocols",
",",
"serverProtocols",
"=",
"serverProtocols",
",",
"clientOptions",
"=",
"clientOptions",
")",
"pump",
".",
"flush",
"(",
")",
"return",
"(",
"cProto",
".",
"negotiatedProtocol",
",",
"cProto",
".",
"wrappedProtocol",
".",
"lostReason",
")"
] |
create the tls connection and negotiate a next protocol .
|
train
| false
|
4,924
|
def test_show_legend():
line = Line()
line.add('_', [1, 2, 3])
q = line.render_pyquery()
assert (len(q('.legend')) == 1)
line.show_legend = False
q = line.render_pyquery()
assert (len(q('.legend')) == 0)
|
[
"def",
"test_show_legend",
"(",
")",
":",
"line",
"=",
"Line",
"(",
")",
"line",
".",
"add",
"(",
"'_'",
",",
"[",
"1",
",",
"2",
",",
"3",
"]",
")",
"q",
"=",
"line",
".",
"render_pyquery",
"(",
")",
"assert",
"(",
"len",
"(",
"q",
"(",
"'.legend'",
")",
")",
"==",
"1",
")",
"line",
".",
"show_legend",
"=",
"False",
"q",
"=",
"line",
".",
"render_pyquery",
"(",
")",
"assert",
"(",
"len",
"(",
"q",
"(",
"'.legend'",
")",
")",
"==",
"0",
")"
] |
test show legend option .
|
train
| false
|
4,925
|
def create_collection(application):
data = {'type': amo.COLLECTION_NORMAL, 'application': application, 'name': ('Collection %s' % abs(hash(datetime.now()))), 'addon_count': random.randint(200, 2000), 'subscribers': random.randint(1000, 5000), 'monthly_subscribers': random.randint(100, 500), 'weekly_subscribers': random.randint(10, 50), 'upvotes': random.randint(100, 500), 'downvotes': random.randint(100, 500), 'listed': True}
c = Collection(**data)
c.slug = slugify(data['name'])
c.rating = ((c.upvotes - c.downvotes) * math.log((c.upvotes + c.downvotes)))
c.created = c.modified = datetime(2014, 10, 27, random.randint(0, 23), random.randint(0, 59))
c.save()
return c
|
[
"def",
"create_collection",
"(",
"application",
")",
":",
"data",
"=",
"{",
"'type'",
":",
"amo",
".",
"COLLECTION_NORMAL",
",",
"'application'",
":",
"application",
",",
"'name'",
":",
"(",
"'Collection %s'",
"%",
"abs",
"(",
"hash",
"(",
"datetime",
".",
"now",
"(",
")",
")",
")",
")",
",",
"'addon_count'",
":",
"random",
".",
"randint",
"(",
"200",
",",
"2000",
")",
",",
"'subscribers'",
":",
"random",
".",
"randint",
"(",
"1000",
",",
"5000",
")",
",",
"'monthly_subscribers'",
":",
"random",
".",
"randint",
"(",
"100",
",",
"500",
")",
",",
"'weekly_subscribers'",
":",
"random",
".",
"randint",
"(",
"10",
",",
"50",
")",
",",
"'upvotes'",
":",
"random",
".",
"randint",
"(",
"100",
",",
"500",
")",
",",
"'downvotes'",
":",
"random",
".",
"randint",
"(",
"100",
",",
"500",
")",
",",
"'listed'",
":",
"True",
"}",
"c",
"=",
"Collection",
"(",
"**",
"data",
")",
"c",
".",
"slug",
"=",
"slugify",
"(",
"data",
"[",
"'name'",
"]",
")",
"c",
".",
"rating",
"=",
"(",
"(",
"c",
".",
"upvotes",
"-",
"c",
".",
"downvotes",
")",
"*",
"math",
".",
"log",
"(",
"(",
"c",
".",
"upvotes",
"+",
"c",
".",
"downvotes",
")",
")",
")",
"c",
".",
"created",
"=",
"c",
".",
"modified",
"=",
"datetime",
"(",
"2014",
",",
"10",
",",
"27",
",",
"random",
".",
"randint",
"(",
"0",
",",
"23",
")",
",",
"random",
".",
"randint",
"(",
"0",
",",
"59",
")",
")",
"c",
".",
"save",
"(",
")",
"return",
"c"
] |
creates a new :class:collection with the given label and alias and returns it .
|
train
| false
|
4,926
|
def GetMovingImages(ListOfImagesDictionaries, registrationImageTypes, interpolationMapping):
if (len(registrationImageTypes) != 1):
print('ERROR: Multivariate imageing not supported yet!')
return []
moving_images = [mdict[registrationImageTypes[0]] for mdict in ListOfImagesDictionaries]
moving_interpolation_type = interpolationMapping[registrationImageTypes[0]]
return (moving_images, moving_interpolation_type)
|
[
"def",
"GetMovingImages",
"(",
"ListOfImagesDictionaries",
",",
"registrationImageTypes",
",",
"interpolationMapping",
")",
":",
"if",
"(",
"len",
"(",
"registrationImageTypes",
")",
"!=",
"1",
")",
":",
"print",
"(",
"'ERROR: Multivariate imageing not supported yet!'",
")",
"return",
"[",
"]",
"moving_images",
"=",
"[",
"mdict",
"[",
"registrationImageTypes",
"[",
"0",
"]",
"]",
"for",
"mdict",
"in",
"ListOfImagesDictionaries",
"]",
"moving_interpolation_type",
"=",
"interpolationMapping",
"[",
"registrationImageTypes",
"[",
"0",
"]",
"]",
"return",
"(",
"moving_images",
",",
"moving_interpolation_type",
")"
] |
this currently only works when registrationimagetypes has length of exactly 1 .
|
train
| false
|
4,928
|
def _change_access(course, user, level, action, send_email=True):
try:
role = ROLES[level](course.id)
except KeyError:
raise ValueError("unrecognized level '{}'".format(level))
if (action == 'allow'):
if (level == 'ccx_coach'):
email_params = get_email_params(course, True)
enroll_email(course_id=course.id, student_email=user.email, auto_enroll=True, email_students=send_email, email_params=email_params)
role.add_users(user)
elif (action == 'revoke'):
role.remove_users(user)
else:
raise ValueError("unrecognized action '{}'".format(action))
|
[
"def",
"_change_access",
"(",
"course",
",",
"user",
",",
"level",
",",
"action",
",",
"send_email",
"=",
"True",
")",
":",
"try",
":",
"role",
"=",
"ROLES",
"[",
"level",
"]",
"(",
"course",
".",
"id",
")",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"\"unrecognized level '{}'\"",
".",
"format",
"(",
"level",
")",
")",
"if",
"(",
"action",
"==",
"'allow'",
")",
":",
"if",
"(",
"level",
"==",
"'ccx_coach'",
")",
":",
"email_params",
"=",
"get_email_params",
"(",
"course",
",",
"True",
")",
"enroll_email",
"(",
"course_id",
"=",
"course",
".",
"id",
",",
"student_email",
"=",
"user",
".",
"email",
",",
"auto_enroll",
"=",
"True",
",",
"email_students",
"=",
"send_email",
",",
"email_params",
"=",
"email_params",
")",
"role",
".",
"add_users",
"(",
"user",
")",
"elif",
"(",
"action",
"==",
"'revoke'",
")",
":",
"role",
".",
"remove_users",
"(",
"user",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"unrecognized action '{}'\"",
".",
"format",
"(",
"action",
")",
")"
] |
change access of user .
|
train
| false
|
4,929
|
@XFAIL
def test_conditionset_equality():
assert (solveset(Eq(tan(x), y), x) == ConditionSet(x, Eq(tan(x), y), S.Complexes))
|
[
"@",
"XFAIL",
"def",
"test_conditionset_equality",
"(",
")",
":",
"assert",
"(",
"solveset",
"(",
"Eq",
"(",
"tan",
"(",
"x",
")",
",",
"y",
")",
",",
"x",
")",
"==",
"ConditionSet",
"(",
"x",
",",
"Eq",
"(",
"tan",
"(",
"x",
")",
",",
"y",
")",
",",
"S",
".",
"Complexes",
")",
")"
] |
checking equality of different representations of conditionset .
|
train
| false
|
4,930
|
def _read_comp_coeff(fid, d):
d['ncomp'] = _read_int2(fid)
d['comp'] = list()
for k in range(d['ncomp']):
comp = dict()
d['comp'].append(comp)
comp['sensor_name'] = _read_string(fid, 32)
comp['coeff_type'] = _read_int(fid)
_read_int(fid)
comp['ncoeff'] = _read_int2(fid)
comp['coeffs'] = np.zeros(comp['ncoeff'])
comp['sensors'] = [_read_string(fid, CTF.CTFV_SENSOR_LABEL) for p in range(comp['ncoeff'])]
unused = (CTF.CTFV_MAX_BALANCING - comp['ncoeff'])
comp['sensors'] += ([''] * unused)
fid.seek((unused * CTF.CTFV_SENSOR_LABEL), 1)
comp['coeffs'][:comp['ncoeff']] = _read_double(fid, comp['ncoeff'])
fid.seek((unused * 8), 1)
comp['scanno'] = d['ch_names'].index(comp['sensor_name'])
|
[
"def",
"_read_comp_coeff",
"(",
"fid",
",",
"d",
")",
":",
"d",
"[",
"'ncomp'",
"]",
"=",
"_read_int2",
"(",
"fid",
")",
"d",
"[",
"'comp'",
"]",
"=",
"list",
"(",
")",
"for",
"k",
"in",
"range",
"(",
"d",
"[",
"'ncomp'",
"]",
")",
":",
"comp",
"=",
"dict",
"(",
")",
"d",
"[",
"'comp'",
"]",
".",
"append",
"(",
"comp",
")",
"comp",
"[",
"'sensor_name'",
"]",
"=",
"_read_string",
"(",
"fid",
",",
"32",
")",
"comp",
"[",
"'coeff_type'",
"]",
"=",
"_read_int",
"(",
"fid",
")",
"_read_int",
"(",
"fid",
")",
"comp",
"[",
"'ncoeff'",
"]",
"=",
"_read_int2",
"(",
"fid",
")",
"comp",
"[",
"'coeffs'",
"]",
"=",
"np",
".",
"zeros",
"(",
"comp",
"[",
"'ncoeff'",
"]",
")",
"comp",
"[",
"'sensors'",
"]",
"=",
"[",
"_read_string",
"(",
"fid",
",",
"CTF",
".",
"CTFV_SENSOR_LABEL",
")",
"for",
"p",
"in",
"range",
"(",
"comp",
"[",
"'ncoeff'",
"]",
")",
"]",
"unused",
"=",
"(",
"CTF",
".",
"CTFV_MAX_BALANCING",
"-",
"comp",
"[",
"'ncoeff'",
"]",
")",
"comp",
"[",
"'sensors'",
"]",
"+=",
"(",
"[",
"''",
"]",
"*",
"unused",
")",
"fid",
".",
"seek",
"(",
"(",
"unused",
"*",
"CTF",
".",
"CTFV_SENSOR_LABEL",
")",
",",
"1",
")",
"comp",
"[",
"'coeffs'",
"]",
"[",
":",
"comp",
"[",
"'ncoeff'",
"]",
"]",
"=",
"_read_double",
"(",
"fid",
",",
"comp",
"[",
"'ncoeff'",
"]",
")",
"fid",
".",
"seek",
"(",
"(",
"unused",
"*",
"8",
")",
",",
"1",
")",
"comp",
"[",
"'scanno'",
"]",
"=",
"d",
"[",
"'ch_names'",
"]",
".",
"index",
"(",
"comp",
"[",
"'sensor_name'",
"]",
")"
] |
read compensation coefficients .
|
train
| false
|
4,931
|
def generate_request_for_request_token(consumer_key, signature_type, scopes, rsa_key=None, consumer_secret=None, auth_server_url=REQUEST_TOKEN_URL, next='oob', version='1.0'):
request = atom.http_core.HttpRequest(auth_server_url, 'POST')
if scopes:
request.uri.query['scope'] = ' '.join(scopes)
timestamp = str(int(time.time()))
nonce = ''.join([str(random.randint(0, 9)) for i in xrange(15)])
signature = None
if (signature_type == HMAC_SHA1):
signature = generate_hmac_signature(request, consumer_key, consumer_secret, timestamp, nonce, version, next=next)
elif (signature_type == RSA_SHA1):
signature = generate_rsa_signature(request, consumer_key, rsa_key, timestamp, nonce, version, next=next)
else:
return None
request.headers['Authorization'] = generate_auth_header(consumer_key, timestamp, nonce, signature_type, signature, version, next)
request.headers['Content-Length'] = '0'
return request
|
[
"def",
"generate_request_for_request_token",
"(",
"consumer_key",
",",
"signature_type",
",",
"scopes",
",",
"rsa_key",
"=",
"None",
",",
"consumer_secret",
"=",
"None",
",",
"auth_server_url",
"=",
"REQUEST_TOKEN_URL",
",",
"next",
"=",
"'oob'",
",",
"version",
"=",
"'1.0'",
")",
":",
"request",
"=",
"atom",
".",
"http_core",
".",
"HttpRequest",
"(",
"auth_server_url",
",",
"'POST'",
")",
"if",
"scopes",
":",
"request",
".",
"uri",
".",
"query",
"[",
"'scope'",
"]",
"=",
"' '",
".",
"join",
"(",
"scopes",
")",
"timestamp",
"=",
"str",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
"nonce",
"=",
"''",
".",
"join",
"(",
"[",
"str",
"(",
"random",
".",
"randint",
"(",
"0",
",",
"9",
")",
")",
"for",
"i",
"in",
"xrange",
"(",
"15",
")",
"]",
")",
"signature",
"=",
"None",
"if",
"(",
"signature_type",
"==",
"HMAC_SHA1",
")",
":",
"signature",
"=",
"generate_hmac_signature",
"(",
"request",
",",
"consumer_key",
",",
"consumer_secret",
",",
"timestamp",
",",
"nonce",
",",
"version",
",",
"next",
"=",
"next",
")",
"elif",
"(",
"signature_type",
"==",
"RSA_SHA1",
")",
":",
"signature",
"=",
"generate_rsa_signature",
"(",
"request",
",",
"consumer_key",
",",
"rsa_key",
",",
"timestamp",
",",
"nonce",
",",
"version",
",",
"next",
"=",
"next",
")",
"else",
":",
"return",
"None",
"request",
".",
"headers",
"[",
"'Authorization'",
"]",
"=",
"generate_auth_header",
"(",
"consumer_key",
",",
"timestamp",
",",
"nonce",
",",
"signature_type",
",",
"signature",
",",
"version",
",",
"next",
")",
"request",
".",
"headers",
"[",
"'Content-Length'",
"]",
"=",
"'0'",
"return",
"request"
] |
creates request to be sent to auth server to get an oauth request token .
|
train
| false
|
4,932
|
def test_hcae_basic():
with open(os.path.join(yaml_dir_path, 'hcae.yaml')) as f:
yaml_string = f.read()
train = yaml_parse.load(yaml_string)
train.main_loop()
|
[
"def",
"test_hcae_basic",
"(",
")",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"yaml_dir_path",
",",
"'hcae.yaml'",
")",
")",
"as",
"f",
":",
"yaml_string",
"=",
"f",
".",
"read",
"(",
")",
"train",
"=",
"yaml_parse",
".",
"load",
"(",
"yaml_string",
")",
"train",
".",
"main_loop",
"(",
")"
] |
tests that we can load a higher order contractive autoencoder and train it for a few epochs on a dummy dataset-- tiny model and dataset .
|
train
| false
|
4,933
|
def discretize(vectors, copy=True, max_svd_restarts=30, n_iter_max=20, random_state=None):
from scipy.sparse import csc_matrix
from scipy.linalg import LinAlgError
random_state = check_random_state(random_state)
vectors = as_float_array(vectors, copy=copy)
eps = np.finfo(float).eps
(n_samples, n_components) = vectors.shape
norm_ones = np.sqrt(n_samples)
for i in range(vectors.shape[1]):
vectors[:, i] = ((vectors[:, i] / norm(vectors[:, i])) * norm_ones)
if (vectors[(0, i)] != 0):
vectors[:, i] = (((-1) * vectors[:, i]) * np.sign(vectors[(0, i)]))
vectors = (vectors / np.sqrt((vectors ** 2).sum(axis=1))[:, np.newaxis])
svd_restarts = 0
has_converged = False
while ((svd_restarts < max_svd_restarts) and (not has_converged)):
rotation = np.zeros((n_components, n_components))
rotation[:, 0] = vectors[random_state.randint(n_samples), :].T
c = np.zeros(n_samples)
for j in range(1, n_components):
c += np.abs(np.dot(vectors, rotation[:, (j - 1)]))
rotation[:, j] = vectors[c.argmin(), :].T
last_objective_value = 0.0
n_iter = 0
while (not has_converged):
n_iter += 1
t_discrete = np.dot(vectors, rotation)
labels = t_discrete.argmax(axis=1)
vectors_discrete = csc_matrix((np.ones(len(labels)), (np.arange(0, n_samples), labels)), shape=(n_samples, n_components))
t_svd = (vectors_discrete.T * vectors)
try:
(U, S, Vh) = np.linalg.svd(t_svd)
svd_restarts += 1
except LinAlgError:
print 'SVD did not converge, randomizing and trying again'
break
ncut_value = (2.0 * (n_samples - S.sum()))
if ((abs((ncut_value - last_objective_value)) < eps) or (n_iter > n_iter_max)):
has_converged = True
else:
last_objective_value = ncut_value
rotation = np.dot(Vh.T, U.T)
if (not has_converged):
raise LinAlgError('SVD did not converge')
return labels
|
[
"def",
"discretize",
"(",
"vectors",
",",
"copy",
"=",
"True",
",",
"max_svd_restarts",
"=",
"30",
",",
"n_iter_max",
"=",
"20",
",",
"random_state",
"=",
"None",
")",
":",
"from",
"scipy",
".",
"sparse",
"import",
"csc_matrix",
"from",
"scipy",
".",
"linalg",
"import",
"LinAlgError",
"random_state",
"=",
"check_random_state",
"(",
"random_state",
")",
"vectors",
"=",
"as_float_array",
"(",
"vectors",
",",
"copy",
"=",
"copy",
")",
"eps",
"=",
"np",
".",
"finfo",
"(",
"float",
")",
".",
"eps",
"(",
"n_samples",
",",
"n_components",
")",
"=",
"vectors",
".",
"shape",
"norm_ones",
"=",
"np",
".",
"sqrt",
"(",
"n_samples",
")",
"for",
"i",
"in",
"range",
"(",
"vectors",
".",
"shape",
"[",
"1",
"]",
")",
":",
"vectors",
"[",
":",
",",
"i",
"]",
"=",
"(",
"(",
"vectors",
"[",
":",
",",
"i",
"]",
"/",
"norm",
"(",
"vectors",
"[",
":",
",",
"i",
"]",
")",
")",
"*",
"norm_ones",
")",
"if",
"(",
"vectors",
"[",
"(",
"0",
",",
"i",
")",
"]",
"!=",
"0",
")",
":",
"vectors",
"[",
":",
",",
"i",
"]",
"=",
"(",
"(",
"(",
"-",
"1",
")",
"*",
"vectors",
"[",
":",
",",
"i",
"]",
")",
"*",
"np",
".",
"sign",
"(",
"vectors",
"[",
"(",
"0",
",",
"i",
")",
"]",
")",
")",
"vectors",
"=",
"(",
"vectors",
"/",
"np",
".",
"sqrt",
"(",
"(",
"vectors",
"**",
"2",
")",
".",
"sum",
"(",
"axis",
"=",
"1",
")",
")",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
")",
"svd_restarts",
"=",
"0",
"has_converged",
"=",
"False",
"while",
"(",
"(",
"svd_restarts",
"<",
"max_svd_restarts",
")",
"and",
"(",
"not",
"has_converged",
")",
")",
":",
"rotation",
"=",
"np",
".",
"zeros",
"(",
"(",
"n_components",
",",
"n_components",
")",
")",
"rotation",
"[",
":",
",",
"0",
"]",
"=",
"vectors",
"[",
"random_state",
".",
"randint",
"(",
"n_samples",
")",
",",
":",
"]",
".",
"T",
"c",
"=",
"np",
".",
"zeros",
"(",
"n_samples",
")",
"for",
"j",
"in",
"range",
"(",
"1",
",",
"n_components",
")",
":",
"c",
"+=",
"np",
".",
"abs",
"(",
"np",
".",
"dot",
"(",
"vectors",
",",
"rotation",
"[",
":",
",",
"(",
"j",
"-",
"1",
")",
"]",
")",
")",
"rotation",
"[",
":",
",",
"j",
"]",
"=",
"vectors",
"[",
"c",
".",
"argmin",
"(",
")",
",",
":",
"]",
".",
"T",
"last_objective_value",
"=",
"0.0",
"n_iter",
"=",
"0",
"while",
"(",
"not",
"has_converged",
")",
":",
"n_iter",
"+=",
"1",
"t_discrete",
"=",
"np",
".",
"dot",
"(",
"vectors",
",",
"rotation",
")",
"labels",
"=",
"t_discrete",
".",
"argmax",
"(",
"axis",
"=",
"1",
")",
"vectors_discrete",
"=",
"csc_matrix",
"(",
"(",
"np",
".",
"ones",
"(",
"len",
"(",
"labels",
")",
")",
",",
"(",
"np",
".",
"arange",
"(",
"0",
",",
"n_samples",
")",
",",
"labels",
")",
")",
",",
"shape",
"=",
"(",
"n_samples",
",",
"n_components",
")",
")",
"t_svd",
"=",
"(",
"vectors_discrete",
".",
"T",
"*",
"vectors",
")",
"try",
":",
"(",
"U",
",",
"S",
",",
"Vh",
")",
"=",
"np",
".",
"linalg",
".",
"svd",
"(",
"t_svd",
")",
"svd_restarts",
"+=",
"1",
"except",
"LinAlgError",
":",
"print",
"'SVD did not converge, randomizing and trying again'",
"break",
"ncut_value",
"=",
"(",
"2.0",
"*",
"(",
"n_samples",
"-",
"S",
".",
"sum",
"(",
")",
")",
")",
"if",
"(",
"(",
"abs",
"(",
"(",
"ncut_value",
"-",
"last_objective_value",
")",
")",
"<",
"eps",
")",
"or",
"(",
"n_iter",
">",
"n_iter_max",
")",
")",
":",
"has_converged",
"=",
"True",
"else",
":",
"last_objective_value",
"=",
"ncut_value",
"rotation",
"=",
"np",
".",
"dot",
"(",
"Vh",
".",
"T",
",",
"U",
".",
"T",
")",
"if",
"(",
"not",
"has_converged",
")",
":",
"raise",
"LinAlgError",
"(",
"'SVD did not converge'",
")",
"return",
"labels"
] |
discretize x parameters bins : int .
|
train
| false
|
4,934
|
def read_torrent_name(torrent_file, default_name=None):
try:
with open(torrent_file, 'rb') as fp:
torrent_info = bdecode(fp.read())
except IOError as e:
logger.error('Unable to open torrent file: %s', torrent_file)
return
if torrent_info:
try:
return torrent_info['info']['name']
except KeyError:
if default_name:
logger.warning("Couldn't get name from torrent file: %s. Defaulting to '%s'", e, default_name)
else:
logger.warning("Couldn't get name from torrent file: %s. No default given", e)
return default_name
|
[
"def",
"read_torrent_name",
"(",
"torrent_file",
",",
"default_name",
"=",
"None",
")",
":",
"try",
":",
"with",
"open",
"(",
"torrent_file",
",",
"'rb'",
")",
"as",
"fp",
":",
"torrent_info",
"=",
"bdecode",
"(",
"fp",
".",
"read",
"(",
")",
")",
"except",
"IOError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"'Unable to open torrent file: %s'",
",",
"torrent_file",
")",
"return",
"if",
"torrent_info",
":",
"try",
":",
"return",
"torrent_info",
"[",
"'info'",
"]",
"[",
"'name'",
"]",
"except",
"KeyError",
":",
"if",
"default_name",
":",
"logger",
".",
"warning",
"(",
"\"Couldn't get name from torrent file: %s. Defaulting to '%s'\"",
",",
"e",
",",
"default_name",
")",
"else",
":",
"logger",
".",
"warning",
"(",
"\"Couldn't get name from torrent file: %s. No default given\"",
",",
"e",
")",
"return",
"default_name"
] |
read the torrent file and return the torrent name .
|
train
| false
|
4,935
|
@pytest.mark.django_db
def test_add_store_fs_by_path(po_directory, english):
project = ProjectDBFactory(source_language=english)
language = LanguageDBFactory()
tp = TranslationProjectFactory(project=project, language=language)
store = StoreDBFactory(translation_project=tp, parent=tp.directory, name='example_store.po')
conf = config.get(tp.project.__class__, instance=tp.project)
conf.set_config('pootle_fs.fs_type', 'localfs')
conf.set_config('pootle_fs.fs_url', 'foo')
fs_path = '/some/fs/example_store.po'
pootle_path = store.pootle_path
store_fs = StoreFS.objects.create(pootle_path=pootle_path, path=fs_path)
assert (store_fs.project == project)
assert (store_fs.store == store)
assert (store_fs.pootle_path == pootle_path)
assert (store_fs.path == fs_path)
assert (store_fs.last_sync_hash is None)
assert (store_fs.last_sync_mtime is None)
assert (store_fs.last_sync_revision is None)
|
[
"@",
"pytest",
".",
"mark",
".",
"django_db",
"def",
"test_add_store_fs_by_path",
"(",
"po_directory",
",",
"english",
")",
":",
"project",
"=",
"ProjectDBFactory",
"(",
"source_language",
"=",
"english",
")",
"language",
"=",
"LanguageDBFactory",
"(",
")",
"tp",
"=",
"TranslationProjectFactory",
"(",
"project",
"=",
"project",
",",
"language",
"=",
"language",
")",
"store",
"=",
"StoreDBFactory",
"(",
"translation_project",
"=",
"tp",
",",
"parent",
"=",
"tp",
".",
"directory",
",",
"name",
"=",
"'example_store.po'",
")",
"conf",
"=",
"config",
".",
"get",
"(",
"tp",
".",
"project",
".",
"__class__",
",",
"instance",
"=",
"tp",
".",
"project",
")",
"conf",
".",
"set_config",
"(",
"'pootle_fs.fs_type'",
",",
"'localfs'",
")",
"conf",
".",
"set_config",
"(",
"'pootle_fs.fs_url'",
",",
"'foo'",
")",
"fs_path",
"=",
"'/some/fs/example_store.po'",
"pootle_path",
"=",
"store",
".",
"pootle_path",
"store_fs",
"=",
"StoreFS",
".",
"objects",
".",
"create",
"(",
"pootle_path",
"=",
"pootle_path",
",",
"path",
"=",
"fs_path",
")",
"assert",
"(",
"store_fs",
".",
"project",
"==",
"project",
")",
"assert",
"(",
"store_fs",
".",
"store",
"==",
"store",
")",
"assert",
"(",
"store_fs",
".",
"pootle_path",
"==",
"pootle_path",
")",
"assert",
"(",
"store_fs",
".",
"path",
"==",
"fs_path",
")",
"assert",
"(",
"store_fs",
".",
"last_sync_hash",
"is",
"None",
")",
"assert",
"(",
"store_fs",
".",
"last_sync_mtime",
"is",
"None",
")",
"assert",
"(",
"store_fs",
".",
"last_sync_revision",
"is",
"None",
")"
] |
add a store_fs for pootle_path .
|
train
| false
|
4,938
|
def lazyprop(func):
attr_name = ('_lazy_' + func.__name__)
@property
def _lazyprop(self):
'A lazily evaluated propery.\n '
if (not hasattr(self, attr_name)):
setattr(self, attr_name, func(self))
return getattr(self, attr_name)
return _lazyprop
|
[
"def",
"lazyprop",
"(",
"func",
")",
":",
"attr_name",
"=",
"(",
"'_lazy_'",
"+",
"func",
".",
"__name__",
")",
"@",
"property",
"def",
"_lazyprop",
"(",
"self",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"self",
",",
"attr_name",
")",
")",
":",
"setattr",
"(",
"self",
",",
"attr_name",
",",
"func",
"(",
"self",
")",
")",
"return",
"getattr",
"(",
"self",
",",
"attr_name",
")",
"return",
"_lazyprop"
] |
wraps a property so it is lazily evaluated .
|
train
| false
|
4,940
|
def xhtml_escape(value):
return _XHTML_ESCAPE_RE.sub((lambda match: _XHTML_ESCAPE_DICT[match.group(0)]), to_basestring(value))
|
[
"def",
"xhtml_escape",
"(",
"value",
")",
":",
"return",
"_XHTML_ESCAPE_RE",
".",
"sub",
"(",
"(",
"lambda",
"match",
":",
"_XHTML_ESCAPE_DICT",
"[",
"match",
".",
"group",
"(",
"0",
")",
"]",
")",
",",
"to_basestring",
"(",
"value",
")",
")"
] |
escapes a string so it is valid within html or xml .
|
train
| true
|
4,941
|
def endpts_to_intervals(endpts):
length = len(endpts)
if (not (isinstance(endpts, tuple) or isinstance(endpts, list))):
raise exceptions.PlotlyError('The intervals_endpts argument must be a list or tuple of a sequence of increasing numbers.')
for item in endpts:
if isinstance(item, str):
raise exceptions.PlotlyError('The intervals_endpts argument must be a list or tuple of a sequence of increasing numbers.')
for k in range((length - 1)):
if (endpts[k] >= endpts[(k + 1)]):
raise exceptions.PlotlyError('The intervals_endpts argument must be a list or tuple of a sequence of increasing numbers.')
else:
intervals = []
intervals.append([float('-inf'), endpts[0]])
for k in range((length - 1)):
interval = []
interval.append(endpts[k])
interval.append(endpts[(k + 1)])
intervals.append(interval)
intervals.append([endpts[(length - 1)], float('inf')])
return intervals
|
[
"def",
"endpts_to_intervals",
"(",
"endpts",
")",
":",
"length",
"=",
"len",
"(",
"endpts",
")",
"if",
"(",
"not",
"(",
"isinstance",
"(",
"endpts",
",",
"tuple",
")",
"or",
"isinstance",
"(",
"endpts",
",",
"list",
")",
")",
")",
":",
"raise",
"exceptions",
".",
"PlotlyError",
"(",
"'The intervals_endpts argument must be a list or tuple of a sequence of increasing numbers.'",
")",
"for",
"item",
"in",
"endpts",
":",
"if",
"isinstance",
"(",
"item",
",",
"str",
")",
":",
"raise",
"exceptions",
".",
"PlotlyError",
"(",
"'The intervals_endpts argument must be a list or tuple of a sequence of increasing numbers.'",
")",
"for",
"k",
"in",
"range",
"(",
"(",
"length",
"-",
"1",
")",
")",
":",
"if",
"(",
"endpts",
"[",
"k",
"]",
">=",
"endpts",
"[",
"(",
"k",
"+",
"1",
")",
"]",
")",
":",
"raise",
"exceptions",
".",
"PlotlyError",
"(",
"'The intervals_endpts argument must be a list or tuple of a sequence of increasing numbers.'",
")",
"else",
":",
"intervals",
"=",
"[",
"]",
"intervals",
".",
"append",
"(",
"[",
"float",
"(",
"'-inf'",
")",
",",
"endpts",
"[",
"0",
"]",
"]",
")",
"for",
"k",
"in",
"range",
"(",
"(",
"length",
"-",
"1",
")",
")",
":",
"interval",
"=",
"[",
"]",
"interval",
".",
"append",
"(",
"endpts",
"[",
"k",
"]",
")",
"interval",
".",
"append",
"(",
"endpts",
"[",
"(",
"k",
"+",
"1",
")",
"]",
")",
"intervals",
".",
"append",
"(",
"interval",
")",
"intervals",
".",
"append",
"(",
"[",
"endpts",
"[",
"(",
"length",
"-",
"1",
")",
"]",
",",
"float",
"(",
"'inf'",
")",
"]",
")",
"return",
"intervals"
] |
returns a list of intervals for categorical colormaps accepts a list or tuple of sequentially increasing numbers and returns a list representation of the mathematical intervals with these numbers as endpoints .
|
train
| false
|
4,942
|
def vimeo(link):
pattern = 'https?:\\/\\/vimeo\\.com\\/([\\d]+)'
match = re.match(pattern, link)
if (not match):
return None
return ('https://player.vimeo.com/video/%s' % match.group(1))
|
[
"def",
"vimeo",
"(",
"link",
")",
":",
"pattern",
"=",
"'https?:\\\\/\\\\/vimeo\\\\.com\\\\/([\\\\d]+)'",
"match",
"=",
"re",
".",
"match",
"(",
"pattern",
",",
"link",
")",
"if",
"(",
"not",
"match",
")",
":",
"return",
"None",
"return",
"(",
"'https://player.vimeo.com/video/%s'",
"%",
"match",
".",
"group",
"(",
"1",
")",
")"
] |
find vimeo player url .
|
train
| false
|
4,943
|
def _escape_value(value):
return value.replace('\\', '\\\\').replace('\n', '\\n').replace(' DCTB ', '\\t').replace('"', '\\"')
|
[
"def",
"_escape_value",
"(",
"value",
")",
":",
"return",
"value",
".",
"replace",
"(",
"'\\\\'",
",",
"'\\\\\\\\'",
")",
".",
"replace",
"(",
"'\\n'",
",",
"'\\\\n'",
")",
".",
"replace",
"(",
"' DCTB '",
",",
"'\\\\t'",
")",
".",
"replace",
"(",
"'\"'",
",",
"'\\\\\"'",
")"
] |
escape a value .
|
train
| false
|
4,944
|
def Die(name, sides=6):
return rv(name, DieDistribution, sides)
|
[
"def",
"Die",
"(",
"name",
",",
"sides",
"=",
"6",
")",
":",
"return",
"rv",
"(",
"name",
",",
"DieDistribution",
",",
"sides",
")"
] |
create a finite random variable representing a fair die .
|
train
| false
|
4,947
|
@pytest.mark.parametrize('arg', ['', 'arg'])
def test_setup_teardown_function_level_with_optional_argument(testdir, monkeypatch, arg):
import sys
trace_setups_teardowns = []
monkeypatch.setattr(sys, 'trace_setups_teardowns', trace_setups_teardowns, raising=False)
p = testdir.makepyfile("\n import pytest\n import sys\n\n trace = sys.trace_setups_teardowns.append\n\n def setup_module({arg}): trace('setup_module')\n def teardown_module({arg}): trace('teardown_module')\n\n def setup_function({arg}): trace('setup_function')\n def teardown_function({arg}): trace('teardown_function')\n\n def test_function_1(): pass\n def test_function_2(): pass\n\n class Test:\n def setup_method(self, {arg}): trace('setup_method')\n def teardown_method(self, {arg}): trace('teardown_method')\n\n def test_method_1(self): pass\n def test_method_2(self): pass\n ".format(arg=arg))
result = testdir.inline_run(p)
result.assertoutcome(passed=4)
expected = ['setup_module', 'setup_function', 'teardown_function', 'setup_function', 'teardown_function', 'setup_method', 'teardown_method', 'setup_method', 'teardown_method', 'teardown_module']
assert (trace_setups_teardowns == expected)
|
[
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'arg'",
",",
"[",
"''",
",",
"'arg'",
"]",
")",
"def",
"test_setup_teardown_function_level_with_optional_argument",
"(",
"testdir",
",",
"monkeypatch",
",",
"arg",
")",
":",
"import",
"sys",
"trace_setups_teardowns",
"=",
"[",
"]",
"monkeypatch",
".",
"setattr",
"(",
"sys",
",",
"'trace_setups_teardowns'",
",",
"trace_setups_teardowns",
",",
"raising",
"=",
"False",
")",
"p",
"=",
"testdir",
".",
"makepyfile",
"(",
"\"\\n import pytest\\n import sys\\n\\n trace = sys.trace_setups_teardowns.append\\n\\n def setup_module({arg}): trace('setup_module')\\n def teardown_module({arg}): trace('teardown_module')\\n\\n def setup_function({arg}): trace('setup_function')\\n def teardown_function({arg}): trace('teardown_function')\\n\\n def test_function_1(): pass\\n def test_function_2(): pass\\n\\n class Test:\\n def setup_method(self, {arg}): trace('setup_method')\\n def teardown_method(self, {arg}): trace('teardown_method')\\n\\n def test_method_1(self): pass\\n def test_method_2(self): pass\\n \"",
".",
"format",
"(",
"arg",
"=",
"arg",
")",
")",
"result",
"=",
"testdir",
".",
"inline_run",
"(",
"p",
")",
"result",
".",
"assertoutcome",
"(",
"passed",
"=",
"4",
")",
"expected",
"=",
"[",
"'setup_module'",
",",
"'setup_function'",
",",
"'teardown_function'",
",",
"'setup_function'",
",",
"'teardown_function'",
",",
"'setup_method'",
",",
"'teardown_method'",
",",
"'setup_method'",
",",
"'teardown_method'",
",",
"'teardown_module'",
"]",
"assert",
"(",
"trace_setups_teardowns",
"==",
"expected",
")"
] |
parameter to setup/teardown xunit-style functions parameter is now optional .
|
train
| false
|
4,948
|
def tower_loss(scope):
(images, labels) = cifar10.distorted_inputs()
logits = cifar10.inference(images)
_ = cifar10.loss(logits, labels)
losses = tf.get_collection('losses', scope)
total_loss = tf.add_n(losses, name='total_loss')
for l in (losses + [total_loss]):
loss_name = re.sub(('%s_[0-9]*/' % cifar10.TOWER_NAME), '', l.op.name)
tf.scalar_summary(loss_name, l)
return total_loss
|
[
"def",
"tower_loss",
"(",
"scope",
")",
":",
"(",
"images",
",",
"labels",
")",
"=",
"cifar10",
".",
"distorted_inputs",
"(",
")",
"logits",
"=",
"cifar10",
".",
"inference",
"(",
"images",
")",
"_",
"=",
"cifar10",
".",
"loss",
"(",
"logits",
",",
"labels",
")",
"losses",
"=",
"tf",
".",
"get_collection",
"(",
"'losses'",
",",
"scope",
")",
"total_loss",
"=",
"tf",
".",
"add_n",
"(",
"losses",
",",
"name",
"=",
"'total_loss'",
")",
"for",
"l",
"in",
"(",
"losses",
"+",
"[",
"total_loss",
"]",
")",
":",
"loss_name",
"=",
"re",
".",
"sub",
"(",
"(",
"'%s_[0-9]*/'",
"%",
"cifar10",
".",
"TOWER_NAME",
")",
",",
"''",
",",
"l",
".",
"op",
".",
"name",
")",
"tf",
".",
"scalar_summary",
"(",
"loss_name",
",",
"l",
")",
"return",
"total_loss"
] |
calculate the total loss on a single tower running the cifar model .
|
train
| false
|
4,949
|
def get_input(prompt):
if (sys.version_info.major >= 3):
return input(prompt)
else:
return raw_input(prompt).decode(u'utf8')
|
[
"def",
"get_input",
"(",
"prompt",
")",
":",
"if",
"(",
"sys",
".",
"version_info",
".",
"major",
">=",
"3",
")",
":",
"return",
"input",
"(",
"prompt",
")",
"else",
":",
"return",
"raw_input",
"(",
"prompt",
")",
".",
"decode",
"(",
"u'utf8'",
")"
] |
fetch user input and unify input prompts .
|
train
| false
|
4,950
|
def contains_one_of(*fields):
message = 'Must contain any one of the following fields: {0}'.format(', '.join(fields))
def check_contains(endpoint_fields):
for field in fields:
if (field in endpoint_fields):
return
errors = {}
for field in fields:
errors[field] = 'one of these must have a value'
return errors
check_contains.__doc__ = message
return check_contains
|
[
"def",
"contains_one_of",
"(",
"*",
"fields",
")",
":",
"message",
"=",
"'Must contain any one of the following fields: {0}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"fields",
")",
")",
"def",
"check_contains",
"(",
"endpoint_fields",
")",
":",
"for",
"field",
"in",
"fields",
":",
"if",
"(",
"field",
"in",
"endpoint_fields",
")",
":",
"return",
"errors",
"=",
"{",
"}",
"for",
"field",
"in",
"fields",
":",
"errors",
"[",
"field",
"]",
"=",
"'one of these must have a value'",
"return",
"errors",
"check_contains",
".",
"__doc__",
"=",
"message",
"return",
"check_contains"
] |
enables ensuring that one of multiple optional fields is set .
|
train
| true
|
4,951
|
def chisquare(f_obs, f_exp=None, ddof=0, axis=0):
return power_divergence(f_obs, f_exp=f_exp, ddof=ddof, axis=axis, lambda_='pearson')
|
[
"def",
"chisquare",
"(",
"f_obs",
",",
"f_exp",
"=",
"None",
",",
"ddof",
"=",
"0",
",",
"axis",
"=",
"0",
")",
":",
"return",
"power_divergence",
"(",
"f_obs",
",",
"f_exp",
"=",
"f_exp",
",",
"ddof",
"=",
"ddof",
",",
"axis",
"=",
"axis",
",",
"lambda_",
"=",
"'pearson'",
")"
] |
chisquare goodness-of-fit test the null hypothesis is that the distance between the expected distribution and the observed frequencies is value .
|
train
| false
|
4,953
|
def _normexpr(expr):
textbuf = []
for part in expr.parts:
if isinstance(part, six.string_types):
textbuf.append(part)
else:
if textbuf:
text = u''.join(textbuf)
if text:
(yield text)
textbuf = []
(yield part)
if textbuf:
text = u''.join(textbuf)
if text:
(yield text)
|
[
"def",
"_normexpr",
"(",
"expr",
")",
":",
"textbuf",
"=",
"[",
"]",
"for",
"part",
"in",
"expr",
".",
"parts",
":",
"if",
"isinstance",
"(",
"part",
",",
"six",
".",
"string_types",
")",
":",
"textbuf",
".",
"append",
"(",
"part",
")",
"else",
":",
"if",
"textbuf",
":",
"text",
"=",
"u''",
".",
"join",
"(",
"textbuf",
")",
"if",
"text",
":",
"(",
"yield",
"text",
")",
"textbuf",
"=",
"[",
"]",
"(",
"yield",
"part",
")",
"if",
"textbuf",
":",
"text",
"=",
"u''",
".",
"join",
"(",
"textbuf",
")",
"if",
"text",
":",
"(",
"yield",
"text",
")"
] |
normalize an expression objects parts .
|
train
| false
|
4,955
|
def date_to_month_number(date):
return year_month_to_month_number(date.year, date.month)
|
[
"def",
"date_to_month_number",
"(",
"date",
")",
":",
"return",
"year_month_to_month_number",
"(",
"date",
".",
"year",
",",
"date",
".",
"month",
")"
] |
this function converts a date to a month number .
|
train
| false
|
4,956
|
def _get_unpassable_types(arg):
if isinstance(arg, (basestring, int, long)):
return set()
elif isinstance(arg, (list, tuple, set, frozenset, dict)):
if isinstance(arg, dict):
parts = itertools.chain(arg.iterkeys(), arg.itervalues())
else:
parts = iter(arg)
types = set()
for part in parts:
types |= _get_unpassable_types(part)
return types
else:
return set([type(arg)])
|
[
"def",
"_get_unpassable_types",
"(",
"arg",
")",
":",
"if",
"isinstance",
"(",
"arg",
",",
"(",
"basestring",
",",
"int",
",",
"long",
")",
")",
":",
"return",
"set",
"(",
")",
"elif",
"isinstance",
"(",
"arg",
",",
"(",
"list",
",",
"tuple",
",",
"set",
",",
"frozenset",
",",
"dict",
")",
")",
":",
"if",
"isinstance",
"(",
"arg",
",",
"dict",
")",
":",
"parts",
"=",
"itertools",
".",
"chain",
"(",
"arg",
".",
"iterkeys",
"(",
")",
",",
"arg",
".",
"itervalues",
"(",
")",
")",
"else",
":",
"parts",
"=",
"iter",
"(",
"arg",
")",
"types",
"=",
"set",
"(",
")",
"for",
"part",
"in",
"parts",
":",
"types",
"|=",
"_get_unpassable_types",
"(",
"part",
")",
"return",
"types",
"else",
":",
"return",
"set",
"(",
"[",
"type",
"(",
"arg",
")",
"]",
")"
] |
given an argument .
|
train
| false
|
4,957
|
def set_cover_position(hass, position, entity_id=None):
data = ({ATTR_ENTITY_ID: entity_id} if entity_id else {})
data[ATTR_POSITION] = position
hass.services.call(DOMAIN, SERVICE_SET_COVER_POSITION, data)
|
[
"def",
"set_cover_position",
"(",
"hass",
",",
"position",
",",
"entity_id",
"=",
"None",
")",
":",
"data",
"=",
"(",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
"}",
"if",
"entity_id",
"else",
"{",
"}",
")",
"data",
"[",
"ATTR_POSITION",
"]",
"=",
"position",
"hass",
".",
"services",
".",
"call",
"(",
"DOMAIN",
",",
"SERVICE_SET_COVER_POSITION",
",",
"data",
")"
] |
move to specific position all or specified cover .
|
train
| false
|
4,958
|
def scenegraphState(view, name):
state = ('====== Scenegraph state for %s ======\n' % name)
state += ('view size: %dx%d\n' % (view.width(), view.height()))
state += ('view transform:\n' + indent(transformStr(view.transform()), ' '))
for item in view.scene().items():
if (item.parentItem() is None):
state += (itemState(item) + '\n')
return state
|
[
"def",
"scenegraphState",
"(",
"view",
",",
"name",
")",
":",
"state",
"=",
"(",
"'====== Scenegraph state for %s ======\\n'",
"%",
"name",
")",
"state",
"+=",
"(",
"'view size: %dx%d\\n'",
"%",
"(",
"view",
".",
"width",
"(",
")",
",",
"view",
".",
"height",
"(",
")",
")",
")",
"state",
"+=",
"(",
"'view transform:\\n'",
"+",
"indent",
"(",
"transformStr",
"(",
"view",
".",
"transform",
"(",
")",
")",
",",
"' '",
")",
")",
"for",
"item",
"in",
"view",
".",
"scene",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"(",
"item",
".",
"parentItem",
"(",
")",
"is",
"None",
")",
":",
"state",
"+=",
"(",
"itemState",
"(",
"item",
")",
"+",
"'\\n'",
")",
"return",
"state"
] |
return information about the scenegraph for debugging test failures .
|
train
| false
|
4,960
|
def encode64(content, encoding='utf-8'):
return b64encode(content.encode(encoding)).decode(encoding)
|
[
"def",
"encode64",
"(",
"content",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"return",
"b64encode",
"(",
"content",
".",
"encode",
"(",
"encoding",
")",
")",
".",
"decode",
"(",
"encoding",
")"
] |
encode some content in base64 .
|
train
| false
|
4,963
|
def evaluate_filters(doc, filters):
if isinstance(filters, dict):
for (key, value) in filters.iteritems():
f = get_filter(None, {key: value})
if (not compare(doc.get(f.fieldname), f.operator, f.value)):
return False
elif isinstance(filters, (list, tuple)):
for d in filters:
f = get_filter(None, d)
if (not compare(doc.get(f.fieldname), f.operator, f.value)):
return False
return True
|
[
"def",
"evaluate_filters",
"(",
"doc",
",",
"filters",
")",
":",
"if",
"isinstance",
"(",
"filters",
",",
"dict",
")",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"filters",
".",
"iteritems",
"(",
")",
":",
"f",
"=",
"get_filter",
"(",
"None",
",",
"{",
"key",
":",
"value",
"}",
")",
"if",
"(",
"not",
"compare",
"(",
"doc",
".",
"get",
"(",
"f",
".",
"fieldname",
")",
",",
"f",
".",
"operator",
",",
"f",
".",
"value",
")",
")",
":",
"return",
"False",
"elif",
"isinstance",
"(",
"filters",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"d",
"in",
"filters",
":",
"f",
"=",
"get_filter",
"(",
"None",
",",
"d",
")",
"if",
"(",
"not",
"compare",
"(",
"doc",
".",
"get",
"(",
"f",
".",
"fieldname",
")",
",",
"f",
".",
"operator",
",",
"f",
".",
"value",
")",
")",
":",
"return",
"False",
"return",
"True"
] |
returns true if doc matches filters .
|
train
| false
|
4,964
|
def install_shutdown_handlers(function, override_sigint=True):
reactor._handleSignals()
signal.signal(signal.SIGTERM, function)
if ((signal.getsignal(signal.SIGINT) == signal.default_int_handler) or override_sigint):
signal.signal(signal.SIGINT, function)
if hasattr(signal, 'SIGBREAK'):
signal.signal(signal.SIGBREAK, function)
|
[
"def",
"install_shutdown_handlers",
"(",
"function",
",",
"override_sigint",
"=",
"True",
")",
":",
"reactor",
".",
"_handleSignals",
"(",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"function",
")",
"if",
"(",
"(",
"signal",
".",
"getsignal",
"(",
"signal",
".",
"SIGINT",
")",
"==",
"signal",
".",
"default_int_handler",
")",
"or",
"override_sigint",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"function",
")",
"if",
"hasattr",
"(",
"signal",
",",
"'SIGBREAK'",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGBREAK",
",",
"function",
")"
] |
install the given function as a signal handler for all common shutdown signals .
|
train
| false
|
4,965
|
def update_forum_role(course_id, user, rolename, action):
role = Role.objects.get(course_id=course_id, name=rolename)
if (action == 'allow'):
role.users.add(user)
elif (action == 'revoke'):
role.users.remove(user)
else:
raise ValueError("unrecognized action '{}'".format(action))
|
[
"def",
"update_forum_role",
"(",
"course_id",
",",
"user",
",",
"rolename",
",",
"action",
")",
":",
"role",
"=",
"Role",
".",
"objects",
".",
"get",
"(",
"course_id",
"=",
"course_id",
",",
"name",
"=",
"rolename",
")",
"if",
"(",
"action",
"==",
"'allow'",
")",
":",
"role",
".",
"users",
".",
"add",
"(",
"user",
")",
"elif",
"(",
"action",
"==",
"'revoke'",
")",
":",
"role",
".",
"users",
".",
"remove",
"(",
"user",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"unrecognized action '{}'\"",
".",
"format",
"(",
"action",
")",
")"
] |
change forum access of user .
|
train
| false
|
4,966
|
@pytest.fixture()
def isolated_jedi_cache(monkeypatch, tmpdir):
monkeypatch.setattr(settings, 'cache_directory', str(tmpdir))
|
[
"@",
"pytest",
".",
"fixture",
"(",
")",
"def",
"isolated_jedi_cache",
"(",
"monkeypatch",
",",
"tmpdir",
")",
":",
"monkeypatch",
".",
"setattr",
"(",
"settings",
",",
"'cache_directory'",
",",
"str",
"(",
"tmpdir",
")",
")"
] |
set jedi .
|
train
| false
|
4,967
|
def _autocomple_finish(el_id, browser):
giveup = 0.0
sleeptime = 0.2
el = browser.find_element_by_id(el_id)
while (giveup < 60):
try:
if (el.find_elements_by_tag_name('option')[0].text != ''):
return
except:
print ('StaleElementReferenceException %s' % giveup)
el = browser.find_element_by_id(el_id)
time.sleep(sleeptime)
giveup += sleeptime
|
[
"def",
"_autocomple_finish",
"(",
"el_id",
",",
"browser",
")",
":",
"giveup",
"=",
"0.0",
"sleeptime",
"=",
"0.2",
"el",
"=",
"browser",
".",
"find_element_by_id",
"(",
"el_id",
")",
"while",
"(",
"giveup",
"<",
"60",
")",
":",
"try",
":",
"if",
"(",
"el",
".",
"find_elements_by_tag_name",
"(",
"'option'",
")",
"[",
"0",
"]",
".",
"text",
"!=",
"''",
")",
":",
"return",
"except",
":",
"print",
"(",
"'StaleElementReferenceException %s'",
"%",
"giveup",
")",
"el",
"=",
"browser",
".",
"find_element_by_id",
"(",
"el_id",
")",
"time",
".",
"sleep",
"(",
"sleeptime",
")",
"giveup",
"+=",
"sleeptime"
] |
helper function .
|
train
| false
|
4,968
|
def write_double(fid, kind, data):
data_size = 8
data = np.array(data, dtype='>f8').T
_write(fid, data, kind, data_size, FIFF.FIFFT_DOUBLE, '>f8')
|
[
"def",
"write_double",
"(",
"fid",
",",
"kind",
",",
"data",
")",
":",
"data_size",
"=",
"8",
"data",
"=",
"np",
".",
"array",
"(",
"data",
",",
"dtype",
"=",
"'>f8'",
")",
".",
"T",
"_write",
"(",
"fid",
",",
"data",
",",
"kind",
",",
"data_size",
",",
"FIFF",
".",
"FIFFT_DOUBLE",
",",
"'>f8'",
")"
] |
write a double-precision floating point tag to a fif file .
|
train
| false
|
4,969
|
def xmlInvariance(n, forwardpasses=1):
tmpfile = tempfile.NamedTemporaryFile(dir='.')
f = tmpfile.name
tmpfile.close()
NetworkWriter.writeToFile(n, f)
tmpnet = NetworkReader.readFrom(f)
NetworkWriter.writeToFile(tmpnet, f)
endnet = NetworkReader.readFrom(f)
os.unlink(f)
netCompare(tmpnet, endnet, forwardpasses, True)
|
[
"def",
"xmlInvariance",
"(",
"n",
",",
"forwardpasses",
"=",
"1",
")",
":",
"tmpfile",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"dir",
"=",
"'.'",
")",
"f",
"=",
"tmpfile",
".",
"name",
"tmpfile",
".",
"close",
"(",
")",
"NetworkWriter",
".",
"writeToFile",
"(",
"n",
",",
"f",
")",
"tmpnet",
"=",
"NetworkReader",
".",
"readFrom",
"(",
"f",
")",
"NetworkWriter",
".",
"writeToFile",
"(",
"tmpnet",
",",
"f",
")",
"endnet",
"=",
"NetworkReader",
".",
"readFrom",
"(",
"f",
")",
"os",
".",
"unlink",
"(",
"f",
")",
"netCompare",
"(",
"tmpnet",
",",
"endnet",
",",
"forwardpasses",
",",
"True",
")"
] |
try writing a network to an xml file .
|
train
| false
|
4,971
|
def _register(event, handler):
k = handler.__name__
setattr(cp.tools, k, cp.Tool(event, handler))
cp.config.update({('tools.%s.on' % k): True})
|
[
"def",
"_register",
"(",
"event",
",",
"handler",
")",
":",
"k",
"=",
"handler",
".",
"__name__",
"setattr",
"(",
"cp",
".",
"tools",
",",
"k",
",",
"cp",
".",
"Tool",
"(",
"event",
",",
"handler",
")",
")",
"cp",
".",
"config",
".",
"update",
"(",
"{",
"(",
"'tools.%s.on'",
"%",
"k",
")",
":",
"True",
"}",
")"
] |
registers the given event handler .
|
train
| false
|
4,972
|
def _normalize_dir(string):
return re.sub('\\\\$', '', string.lower())
|
[
"def",
"_normalize_dir",
"(",
"string",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'\\\\\\\\$'",
",",
"''",
",",
"string",
".",
"lower",
"(",
")",
")"
] |
normalize the directory to make comparison possible .
|
train
| false
|
4,973
|
def iter_devices_partitions(devices_dir, item_type):
devices = listdir(devices_dir)
shuffle(devices)
devices_partitions = []
for device in devices:
partitions = listdir(os.path.join(devices_dir, device, item_type))
shuffle(partitions)
devices_partitions.append((device, iter(partitions)))
yielded = True
while yielded:
yielded = False
for (device, partitions) in devices_partitions:
try:
(yield (device, partitions.next()))
yielded = True
except StopIteration:
pass
|
[
"def",
"iter_devices_partitions",
"(",
"devices_dir",
",",
"item_type",
")",
":",
"devices",
"=",
"listdir",
"(",
"devices_dir",
")",
"shuffle",
"(",
"devices",
")",
"devices_partitions",
"=",
"[",
"]",
"for",
"device",
"in",
"devices",
":",
"partitions",
"=",
"listdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"devices_dir",
",",
"device",
",",
"item_type",
")",
")",
"shuffle",
"(",
"partitions",
")",
"devices_partitions",
".",
"append",
"(",
"(",
"device",
",",
"iter",
"(",
"partitions",
")",
")",
")",
"yielded",
"=",
"True",
"while",
"yielded",
":",
"yielded",
"=",
"False",
"for",
"(",
"device",
",",
"partitions",
")",
"in",
"devices_partitions",
":",
"try",
":",
"(",
"yield",
"(",
"device",
",",
"partitions",
".",
"next",
"(",
")",
")",
")",
"yielded",
"=",
"True",
"except",
"StopIteration",
":",
"pass"
] |
iterate over partitions across all devices .
|
train
| false
|
4,976
|
@utils.arg('image', metavar='<image>', help=_('Name or ID of image.'))
@deprecated_image
def do_image_show(cs, args):
image = _find_image(cs, args.image)
_print_image(image)
|
[
"@",
"utils",
".",
"arg",
"(",
"'image'",
",",
"metavar",
"=",
"'<image>'",
",",
"help",
"=",
"_",
"(",
"'Name or ID of image.'",
")",
")",
"@",
"deprecated_image",
"def",
"do_image_show",
"(",
"cs",
",",
"args",
")",
":",
"image",
"=",
"_find_image",
"(",
"cs",
",",
"args",
".",
"image",
")",
"_print_image",
"(",
"image",
")"
] |
show details about the given image .
|
train
| false
|
4,977
|
def trash(fid):
url = build_url(RESOURCE, id=fid, route='trash')
return request('post', url)
|
[
"def",
"trash",
"(",
"fid",
")",
":",
"url",
"=",
"build_url",
"(",
"RESOURCE",
",",
"id",
"=",
"fid",
",",
"route",
"=",
"'trash'",
")",
"return",
"request",
"(",
"'post'",
",",
"url",
")"
] |
soft-delete a general file from plotly .
|
train
| false
|
4,978
|
def compute_distance_matrix_from_metadata(column_data):
data_row = array(column_data)
data_col = reshape(data_row, (1, len(data_row)))
dist_mtx = abs((data_row - data_col.T))
return dist_mtx
|
[
"def",
"compute_distance_matrix_from_metadata",
"(",
"column_data",
")",
":",
"data_row",
"=",
"array",
"(",
"column_data",
")",
"data_col",
"=",
"reshape",
"(",
"data_row",
",",
"(",
"1",
",",
"len",
"(",
"data_row",
")",
")",
")",
"dist_mtx",
"=",
"abs",
"(",
"(",
"data_row",
"-",
"data_col",
".",
"T",
")",
")",
"return",
"dist_mtx"
] |
calculates distance matrix on a single column of a mapping file inputs: column_data .
|
train
| false
|
4,980
|
def reset():
_runtime.reset()
|
[
"def",
"reset",
"(",
")",
":",
"_runtime",
".",
"reset",
"(",
")"
] |
reset password page .
|
train
| false
|
4,982
|
def ParseTraceLocationLine(msg):
parsed = re.match(kCodeLocationLine, msg)
if (not parsed):
return None
try:
return (parsed.group(1), parsed.group(2), parsed.group(3))
except IndexError as e:
logging.warning(('RE matched "%s", but extracted wrong number of items: %r' % (msg, e)))
return None
|
[
"def",
"ParseTraceLocationLine",
"(",
"msg",
")",
":",
"parsed",
"=",
"re",
".",
"match",
"(",
"kCodeLocationLine",
",",
"msg",
")",
"if",
"(",
"not",
"parsed",
")",
":",
"return",
"None",
"try",
":",
"return",
"(",
"parsed",
".",
"group",
"(",
"1",
")",
",",
"parsed",
".",
"group",
"(",
"2",
")",
",",
"parsed",
".",
"group",
"(",
"3",
")",
")",
"except",
"IndexError",
"as",
"e",
":",
"logging",
".",
"warning",
"(",
"(",
"'RE matched \"%s\", but extracted wrong number of items: %r'",
"%",
"(",
"msg",
",",
"e",
")",
")",
")",
"return",
"None"
] |
parse the location line of a stack trace .
|
train
| false
|
4,983
|
def select_lang(cache, function):
if (function.argument_types() not in ([u'STRING'], [u'IDENT'])):
raise ExpressionError((u'Expected a single string or ident for :lang(), got %r' % function.arguments))
lang = function.arguments[0].value
if lang:
lang = ascii_lower(lang)
lp = (lang + u'-')
for (tlang, elem_set) in cache.lang_map.iteritems():
if ((tlang == lang) or ((tlang is not None) and tlang.startswith(lp))):
for elem in elem_set:
(yield elem)
|
[
"def",
"select_lang",
"(",
"cache",
",",
"function",
")",
":",
"if",
"(",
"function",
".",
"argument_types",
"(",
")",
"not",
"in",
"(",
"[",
"u'STRING'",
"]",
",",
"[",
"u'IDENT'",
"]",
")",
")",
":",
"raise",
"ExpressionError",
"(",
"(",
"u'Expected a single string or ident for :lang(), got %r'",
"%",
"function",
".",
"arguments",
")",
")",
"lang",
"=",
"function",
".",
"arguments",
"[",
"0",
"]",
".",
"value",
"if",
"lang",
":",
"lang",
"=",
"ascii_lower",
"(",
"lang",
")",
"lp",
"=",
"(",
"lang",
"+",
"u'-'",
")",
"for",
"(",
"tlang",
",",
"elem_set",
")",
"in",
"cache",
".",
"lang_map",
".",
"iteritems",
"(",
")",
":",
"if",
"(",
"(",
"tlang",
"==",
"lang",
")",
"or",
"(",
"(",
"tlang",
"is",
"not",
"None",
")",
"and",
"tlang",
".",
"startswith",
"(",
"lp",
")",
")",
")",
":",
"for",
"elem",
"in",
"elem_set",
":",
"(",
"yield",
"elem",
")"
] |
implement :lang() .
|
train
| false
|
4,984
|
def describe_api_resources(restApiId, region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
resources = sorted(_multi_call(conn.get_resources, 'items', restApiId=restApiId), key=(lambda k: k['path']))
return {'resources': resources}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
|
[
"def",
"describe_api_resources",
"(",
"restApiId",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"resources",
"=",
"sorted",
"(",
"_multi_call",
"(",
"conn",
".",
"get_resources",
",",
"'items'",
",",
"restApiId",
"=",
"restApiId",
")",
",",
"key",
"=",
"(",
"lambda",
"k",
":",
"k",
"[",
"'path'",
"]",
")",
")",
"return",
"{",
"'resources'",
":",
"resources",
"}",
"except",
"ClientError",
"as",
"e",
":",
"return",
"{",
"'error'",
":",
"salt",
".",
"utils",
".",
"boto3",
".",
"get_error",
"(",
"e",
")",
"}"
] |
given rest api id .
|
train
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.