id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
28,642 | def random_rot(dim):
H = eye(dim)
D = ones((dim,))
for n in range(1, dim):
x = normal(size=(((dim - n) + 1),))
D[(n - 1)] = sign(x[0])
x[0] -= (D[(n - 1)] * sqrt((x * x).sum()))
Hx = (eye(((dim - n) + 1)) - ((2.0 * outer(x, x)) / (x * x).sum()))
mat = eye(dim)
mat[(n - 1):, (n - 1):] = Hx
H = dot(H, mat)
D[(-1)] = (- D.prod())
H = (D * H.T).T
return H
| [
"def",
"random_rot",
"(",
"dim",
")",
":",
"H",
"=",
"eye",
"(",
"dim",
")",
"D",
"=",
"ones",
"(",
"(",
"dim",
",",
")",
")",
"for",
"n",
"in",
"range",
"(",
"1",
",",
"dim",
")",
":",
"x",
"=",
"normal",
"(",
"size",
"=",
"(",
"(",
"(",... | return a random rotation matrix . | train | false |
28,644 | def libvlc_video_set_crop_geometry(p_mi, psz_geometry):
f = (_Cfunctions.get('libvlc_video_set_crop_geometry', None) or _Cfunction('libvlc_video_set_crop_geometry', ((1,), (1,)), None, None, MediaPlayer, ctypes.c_char_p))
return f(p_mi, psz_geometry)
| [
"def",
"libvlc_video_set_crop_geometry",
"(",
"p_mi",
",",
"psz_geometry",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_video_set_crop_geometry'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_video_set_crop_geometry'",
",",
"(",
"(",
"1... | set new crop filter geometry . | train | true |
28,648 | def get_denoiser_data_dir():
dir = (get_qiime_project_dir() + '/qiime/support_files/denoiser/Data/')
return dir
| [
"def",
"get_denoiser_data_dir",
"(",
")",
":",
"dir",
"=",
"(",
"get_qiime_project_dir",
"(",
")",
"+",
"'/qiime/support_files/denoiser/Data/'",
")",
"return",
"dir"
] | return the directory of the denoiser error profiles . | train | false |
28,650 | def cloud_providers_config(path, env_var='SALT_CLOUD_PROVIDERS_CONFIG', defaults=None):
if (defaults is None):
defaults = PROVIDER_CONFIG_DEFAULTS
overrides = salt.config.load_config(path, env_var, os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.providers'))
default_include = overrides.get('default_include', defaults['default_include'])
include = overrides.get('include', [])
overrides.update(salt.config.include_config(default_include, path, verbose=False))
overrides.update(salt.config.include_config(include, path, verbose=True))
return apply_cloud_providers_config(overrides, defaults)
| [
"def",
"cloud_providers_config",
"(",
"path",
",",
"env_var",
"=",
"'SALT_CLOUD_PROVIDERS_CONFIG'",
",",
"defaults",
"=",
"None",
")",
":",
"if",
"(",
"defaults",
"is",
"None",
")",
":",
"defaults",
"=",
"PROVIDER_CONFIG_DEFAULTS",
"overrides",
"=",
"salt",
".",... | read in the salt cloud providers configuration file . | train | true |
28,652 | def create_admin_user(username, password):
u = User()
u.username = username
u.email = '{0}@dev.mail.example.com'.format(username)
u.is_superuser = True
u.is_staff = True
u.set_password(password)
try:
u.save()
print('Created user {0} with password {1}.'.format(username, password))
except Exception as e:
pass
| [
"def",
"create_admin_user",
"(",
"username",
",",
"password",
")",
":",
"u",
"=",
"User",
"(",
")",
"u",
".",
"username",
"=",
"username",
"u",
".",
"email",
"=",
"'{0}@dev.mail.example.com'",
".",
"format",
"(",
"username",
")",
"u",
".",
"is_superuser",
... | create a user for testing the admin . | train | false |
28,653 | def get_redis_error_classes():
from redis import exceptions
if hasattr(exceptions, u'InvalidData'):
DataError = exceptions.InvalidData
else:
DataError = exceptions.DataError
return error_classes_t((virtual.Transport.connection_errors + (InconsistencyError, socket.error, IOError, OSError, exceptions.ConnectionError, exceptions.AuthenticationError, exceptions.TimeoutError)), (virtual.Transport.channel_errors + (DataError, exceptions.InvalidResponse, exceptions.ResponseError)))
| [
"def",
"get_redis_error_classes",
"(",
")",
":",
"from",
"redis",
"import",
"exceptions",
"if",
"hasattr",
"(",
"exceptions",
",",
"u'InvalidData'",
")",
":",
"DataError",
"=",
"exceptions",
".",
"InvalidData",
"else",
":",
"DataError",
"=",
"exceptions",
".",
... | return tuple of redis error classes . | train | false |
28,655 | def __render_script(path, vm_=None, opts=None, minion=''):
log.info('Rendering deploy script: {0}'.format(path))
try:
with salt.utils.fopen(path, 'r') as fp_:
template = Template(fp_.read())
return str(template.render(opts=opts, vm=vm_, minion=minion))
except AttributeError:
with salt.utils.fopen(path, 'r') as fp_:
return fp_.read()
| [
"def",
"__render_script",
"(",
"path",
",",
"vm_",
"=",
"None",
",",
"opts",
"=",
"None",
",",
"minion",
"=",
"''",
")",
":",
"log",
".",
"info",
"(",
"'Rendering deploy script: {0}'",
".",
"format",
"(",
"path",
")",
")",
"try",
":",
"with",
"salt",
... | return the rendered script . | train | false |
28,656 | @decorators.which('ssh-keyscan')
def recv_known_host(hostname, enc=None, port=None, hash_known_hosts=True, timeout=5):
need_dash_t = ('CentOS-5',)
cmd = ['ssh-keyscan']
if port:
cmd.extend(['-p', port])
if enc:
cmd.extend(['-t', enc])
if ((not enc) and (__grains__.get('osfinger') in need_dash_t)):
cmd.extend(['-t', 'rsa'])
if hash_known_hosts:
cmd.append('-H')
cmd.extend(['-T', str(timeout)])
cmd.append(hostname)
lines = None
attempts = 5
while ((not lines) and (attempts > 0)):
attempts = (attempts - 1)
lines = __salt__['cmd.run'](cmd, python_shell=False).splitlines()
known_hosts = list(_parse_openssh_output(lines))
return (known_hosts[0] if known_hosts else None)
| [
"@",
"decorators",
".",
"which",
"(",
"'ssh-keyscan'",
")",
"def",
"recv_known_host",
"(",
"hostname",
",",
"enc",
"=",
"None",
",",
"port",
"=",
"None",
",",
"hash_known_hosts",
"=",
"True",
",",
"timeout",
"=",
"5",
")",
":",
"need_dash_t",
"=",
"(",
... | retrieve information about host public key from remote server hostname the name of the remote host enc defines what type of key is being used . | train | false |
28,657 | def PackPlacemark(placemark):
return ','.join([base64hex.B64HexEncode(x.encode('utf-8'), padding=False) for x in placemark])
| [
"def",
"PackPlacemark",
"(",
"placemark",
")",
":",
"return",
"','",
".",
"join",
"(",
"[",
"base64hex",
".",
"B64HexEncode",
"(",
"x",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"padding",
"=",
"False",
")",
"for",
"x",
"in",
"placemark",
"]",
")"
] | converts placemark named tuple into a packed . | train | false |
28,658 | def _marked(func, mark):
try:
func_mark = getattr(func, mark.name)
except AttributeError:
return False
return ((mark.args == func_mark.args) and (mark.kwargs == func_mark.kwargs))
| [
"def",
"_marked",
"(",
"func",
",",
"mark",
")",
":",
"try",
":",
"func_mark",
"=",
"getattr",
"(",
"func",
",",
"mark",
".",
"name",
")",
"except",
"AttributeError",
":",
"return",
"False",
"return",
"(",
"(",
"mark",
".",
"args",
"==",
"func_mark",
... | returns true if :func: is already marked with :mark: . | train | false |
28,659 | def mso(sample_len=1000, n_samples=1):
signals = []
for _ in range(n_samples):
phase = np.random.rand()
x = np.atleast_2d(np.arange(sample_len)).T
signals.append((np.sin(((0.2 * x) + phase)) + np.sin(((0.311 * x) + phase))))
return signals
| [
"def",
"mso",
"(",
"sample_len",
"=",
"1000",
",",
"n_samples",
"=",
"1",
")",
":",
"signals",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"n_samples",
")",
":",
"phase",
"=",
"np",
".",
"random",
".",
"rand",
"(",
")",
"x",
"=",
"np",
".",
... | mso -> input generate the multiple sinewave oscillator time-series . | train | false |
28,660 | def _on_process_docstring(app, what, name, obj, options, lines):
if ((what == 'module') and (name == 'falcon')):
lines[:] = []
return
lines[:] = [_process_line(line) for line in lines]
| [
"def",
"_on_process_docstring",
"(",
"app",
",",
"what",
",",
"name",
",",
"obj",
",",
"options",
",",
"lines",
")",
":",
"if",
"(",
"(",
"what",
"==",
"'module'",
")",
"and",
"(",
"name",
"==",
"'falcon'",
")",
")",
":",
"lines",
"[",
":",
"]",
... | process the docstring for a given python object . | train | false |
28,661 | @register.filter(is_safe=True)
def random(value):
return random_module.choice(value)
| [
"@",
"register",
".",
"filter",
"(",
"is_safe",
"=",
"True",
")",
"def",
"random",
"(",
"value",
")",
":",
"return",
"random_module",
".",
"choice",
"(",
"value",
")"
] | random() -> str get a random user agent string . | train | false |
28,663 | def libvlc_audio_set_volume_callback(mp, set_volume):
f = (_Cfunctions.get('libvlc_audio_set_volume_callback', None) or _Cfunction('libvlc_audio_set_volume_callback', ((1,), (1,)), None, None, MediaPlayer, AudioSetVolumeCb))
return f(mp, set_volume)
| [
"def",
"libvlc_audio_set_volume_callback",
"(",
"mp",
",",
"set_volume",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_audio_set_volume_callback'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_audio_set_volume_callback'",
",",
"(",
"(",
... | set callbacks and private data for decoded audio . | train | true |
28,664 | def process_body():
try:
import xmlrpclib
return xmlrpclib.loads(cherrypy.request.body.read())
except Exception:
return (('ERROR PARAMS',), 'ERRORMETHOD')
| [
"def",
"process_body",
"(",
")",
":",
"try",
":",
"import",
"xmlrpclib",
"return",
"xmlrpclib",
".",
"loads",
"(",
"cherrypy",
".",
"request",
".",
"body",
".",
"read",
"(",
")",
")",
"except",
"Exception",
":",
"return",
"(",
"(",
"'ERROR PARAMS'",
",",... | return from request body . | train | false |
28,667 | def is_task_object(a):
return (isinstance(a, Task) and a.use_task_objects)
| [
"def",
"is_task_object",
"(",
"a",
")",
":",
"return",
"(",
"isinstance",
"(",
"a",
",",
"Task",
")",
"and",
"a",
".",
"use_task_objects",
")"
] | determine if the provided value is a task object . | train | false |
28,670 | def extract_primer(seq, possible_primers, min_idx=None, max_idx=None):
primer_idx = None
primer = None
for possible_primer in possible_primers:
if (possible_primer in seq):
primer_idx = seq.index(possible_primer)
primer = possible_primer
if (((min_idx is not None) and (primer_idx < min_idx)) or ((max_idx is not None) and (primer_idx > max_idx))):
primer_idx = None
primer = None
else:
break
if (primer_idx is None):
raise PrimerMismatchError('Sequence does not contain any primers from the mapping file, Please verify your mapping file.')
before_primer = seq[:primer_idx]
after_primer = seq.replace((before_primer + primer), '', 1)
return (before_primer, primer, after_primer)
| [
"def",
"extract_primer",
"(",
"seq",
",",
"possible_primers",
",",
"min_idx",
"=",
"None",
",",
"max_idx",
"=",
"None",
")",
":",
"primer_idx",
"=",
"None",
"primer",
"=",
"None",
"for",
"possible_primer",
"in",
"possible_primers",
":",
"if",
"(",
"possible_... | given possible primers . | train | false |
28,674 | def ifconfigTest(net):
hosts = net.hosts
for host in hosts:
info(host.cmd('ifconfig'))
| [
"def",
"ifconfigTest",
"(",
"net",
")",
":",
"hosts",
"=",
"net",
".",
"hosts",
"for",
"host",
"in",
"hosts",
":",
"info",
"(",
"host",
".",
"cmd",
"(",
"'ifconfig'",
")",
")"
] | run ifconfig on all hosts in net . | train | false |
28,675 | def requires_module(function, name, call=None):
call = (('import %s' % name) if (call is None) else call)
try:
from nose.plugins.skip import SkipTest
except ImportError:
SkipTest = AssertionError
@wraps(function)
def dec(*args, **kwargs):
try:
exec call in globals(), locals()
except Exception as exc:
raise SkipTest(('Test %s skipped, requires %s. Got exception (%s)' % (function.__name__, name, exc)))
return function(*args, **kwargs)
return dec
| [
"def",
"requires_module",
"(",
"function",
",",
"name",
",",
"call",
"=",
"None",
")",
":",
"call",
"=",
"(",
"(",
"'import %s'",
"%",
"name",
")",
"if",
"(",
"call",
"is",
"None",
")",
"else",
"call",
")",
"try",
":",
"from",
"nose",
".",
"plugins... | decorator to skip test if package is not available . | train | false |
28,676 | def test_drop_channels_mixin():
(raw, events) = _get_data()[:2]
epochs = Epochs(raw, events, event_id, tmin, tmax, preload=True)
drop_ch = epochs.ch_names[:3]
ch_names = epochs.ch_names[3:]
ch_names_orig = epochs.ch_names
dummy = epochs.copy().drop_channels(drop_ch)
assert_equal(ch_names, dummy.ch_names)
assert_equal(ch_names_orig, epochs.ch_names)
assert_equal(len(ch_names_orig), epochs.get_data().shape[1])
epochs.drop_channels(drop_ch)
assert_equal(ch_names, epochs.ch_names)
assert_equal(len(ch_names), epochs.get_data().shape[1])
| [
"def",
"test_drop_channels_mixin",
"(",
")",
":",
"(",
"raw",
",",
"events",
")",
"=",
"_get_data",
"(",
")",
"[",
":",
"2",
"]",
"epochs",
"=",
"Epochs",
"(",
"raw",
",",
"events",
",",
"event_id",
",",
"tmin",
",",
"tmax",
",",
"preload",
"=",
"T... | test channels-dropping functionality . | train | false |
28,677 | def art_for_album(album, paths, maxwidth=None, local_only=False):
out = None
cover_names = config['fetchart']['cover_names'].as_str_seq()
cover_names = map(util.bytestring_path, cover_names)
cautious = config['fetchart']['cautious'].get(bool)
if paths:
for path in paths:
out = art_in_path(path, cover_names, cautious)
if out:
break
remote_priority = config['fetchart']['remote_priority'].get(bool)
if ((not local_only) and (remote_priority or (not out))):
for url in _source_urls(album, config['fetchart']['sources'].as_str_seq()):
if maxwidth:
url = ArtResizer.shared.proxy_url(maxwidth, url)
candidate = _fetch_image(url)
if candidate:
out = candidate
break
if (maxwidth and out):
out = ArtResizer.shared.resize(maxwidth, out)
return out
| [
"def",
"art_for_album",
"(",
"album",
",",
"paths",
",",
"maxwidth",
"=",
"None",
",",
"local_only",
"=",
"False",
")",
":",
"out",
"=",
"None",
"cover_names",
"=",
"config",
"[",
"'fetchart'",
"]",
"[",
"'cover_names'",
"]",
".",
"as_str_seq",
"(",
")",... | given an album object . | train | false |
28,678 | def get_rpc_client(**kwargs):
target = oslo_messaging.Target(**kwargs)
serializer = RequestContextSerializer(JsonPayloadSerializer())
return oslo_messaging.RPCClient(TRANSPORT, target, serializer=serializer)
| [
"def",
"get_rpc_client",
"(",
"**",
"kwargs",
")",
":",
"target",
"=",
"oslo_messaging",
".",
"Target",
"(",
"**",
"kwargs",
")",
"serializer",
"=",
"RequestContextSerializer",
"(",
"JsonPayloadSerializer",
"(",
")",
")",
"return",
"oslo_messaging",
".",
"RPCCli... | return a configured oslo_messaging rpcclient . | train | false |
28,679 | def apply_(obj, data):
for (key, value) in data.items():
if _safe_value(obj, key, value):
obj[key] = value
else:
obj.set_parse(key, six.text_type(value))
| [
"def",
"apply_",
"(",
"obj",
",",
"data",
")",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"data",
".",
"items",
"(",
")",
":",
"if",
"_safe_value",
"(",
"obj",
",",
"key",
",",
"value",
")",
":",
"obj",
"[",
"key",
"]",
"=",
"value",
"e... | set the fields of a dbcore . | train | false |
28,684 | def _update_entry(entry, status, directives):
for (directive, state) in six.iteritems(directives):
if (directive == 'delete_others'):
status['delete_others'] = state
continue
for (attr, vals) in six.iteritems(state):
status['mentioned_attributes'].add(attr)
vals = _toset(vals)
if (directive == 'default'):
if (len(vals) and ((attr not in entry) or (not len(entry[attr])))):
entry[attr] = vals
elif (directive == 'add'):
vals.update(entry.get(attr, ()))
if len(vals):
entry[attr] = vals
elif (directive == 'delete'):
existing_vals = entry.pop(attr, set())
if len(vals):
existing_vals -= vals
if len(existing_vals):
entry[attr] = existing_vals
elif (directive == 'replace'):
entry.pop(attr, None)
if len(vals):
entry[attr] = vals
else:
raise ValueError(('unknown directive: ' + directive))
| [
"def",
"_update_entry",
"(",
"entry",
",",
"status",
",",
"directives",
")",
":",
"for",
"(",
"directive",
",",
"state",
")",
"in",
"six",
".",
"iteritems",
"(",
"directives",
")",
":",
"if",
"(",
"directive",
"==",
"'delete_others'",
")",
":",
"status",... | update an entrys attributes using the provided directives . | train | true |
28,685 | def set_monitor_timeout(timeout, power='ac', scheme=None):
return _set_powercfg_value(scheme, 'SUB_VIDEO', 'VIDEOIDLE', power, timeout)
| [
"def",
"set_monitor_timeout",
"(",
"timeout",
",",
"power",
"=",
"'ac'",
",",
"scheme",
"=",
"None",
")",
":",
"return",
"_set_powercfg_value",
"(",
"scheme",
",",
"'SUB_VIDEO'",
",",
"'VIDEOIDLE'",
",",
"power",
",",
"timeout",
")"
] | set the monitor timeout in minutes for the given power scheme cli example: . | train | false |
28,686 | def gf_neg(f, p, K):
return [((- coeff) % p) for coeff in f]
| [
"def",
"gf_neg",
"(",
"f",
",",
"p",
",",
"K",
")",
":",
"return",
"[",
"(",
"(",
"-",
"coeff",
")",
"%",
"p",
")",
"for",
"coeff",
"in",
"f",
"]"
] | negate a polynomial in gf(p)[x] . | train | false |
28,687 | @slow_test
def test_browse_raw():
check_usage(mne_browse_raw)
| [
"@",
"slow_test",
"def",
"test_browse_raw",
"(",
")",
":",
"check_usage",
"(",
"mne_browse_raw",
")"
] | test mne browse_raw . | train | false |
28,688 | def normalize_timestamps(sid):
sub_paths = [store.path(sid, submission.filename) for submission in g.source.submissions]
if (len(sub_paths) > 1):
args = ['touch']
args.extend(sub_paths[:(-1)])
rc = subprocess.call(args)
if (rc != 0):
app.logger.warning(("Couldn't normalize submission timestamps (touch exited with %d)" % rc))
| [
"def",
"normalize_timestamps",
"(",
"sid",
")",
":",
"sub_paths",
"=",
"[",
"store",
".",
"path",
"(",
"sid",
",",
"submission",
".",
"filename",
")",
"for",
"submission",
"in",
"g",
".",
"source",
".",
"submissions",
"]",
"if",
"(",
"len",
"(",
"sub_p... | update the timestamps on all of the sources submissions to match that of the latest submission . | train | false |
28,689 | def set_statusbar(event):
index = main_window.call(event.widget, 'index', 'active')
if (index == 0):
statustext.set('More information about this program')
elif (index == 2):
statustext.set('Terminate the program')
else:
statustext.set('This is the statusbar')
return
| [
"def",
"set_statusbar",
"(",
"event",
")",
":",
"index",
"=",
"main_window",
".",
"call",
"(",
"event",
".",
"widget",
",",
"'index'",
",",
"'active'",
")",
"if",
"(",
"index",
"==",
"0",
")",
":",
"statustext",
".",
"set",
"(",
"'More information about ... | show statusbar comments from menu selection . | train | false |
28,692 | def fromChunk(data):
(prefix, rest) = data.split('\r\n', 1)
length = int(prefix, 16)
if (length < 0):
raise ValueError(('Chunk length must be >= 0, not %d' % (length,)))
if (rest[length:(length + 2)] != '\r\n'):
raise ValueError('chunk must end with CRLF')
return (rest[:length], rest[(length + 2):])
| [
"def",
"fromChunk",
"(",
"data",
")",
":",
"(",
"prefix",
",",
"rest",
")",
"=",
"data",
".",
"split",
"(",
"'\\r\\n'",
",",
"1",
")",
"length",
"=",
"int",
"(",
"prefix",
",",
"16",
")",
"if",
"(",
"length",
"<",
"0",
")",
":",
"raise",
"Value... | convert chunk to string . | train | false |
28,693 | def bootstrap_find_resource(filename, cdn, use_minified=None, local=True):
config = current_app.config
if (None == use_minified):
use_minified = config['BOOTSTRAP_USE_MINIFIED']
if use_minified:
filename = ('%s.min.%s' % tuple(filename.rsplit('.', 1)))
cdns = current_app.extensions['bootstrap']['cdns']
resource_url = cdns[cdn].get_resource_url(filename)
if (resource_url.startswith('//') and config['BOOTSTRAP_CDN_FORCE_SSL']):
resource_url = ('https:%s' % resource_url)
return resource_url
| [
"def",
"bootstrap_find_resource",
"(",
"filename",
",",
"cdn",
",",
"use_minified",
"=",
"None",
",",
"local",
"=",
"True",
")",
":",
"config",
"=",
"current_app",
".",
"config",
"if",
"(",
"None",
"==",
"use_minified",
")",
":",
"use_minified",
"=",
"conf... | resource finding function . | train | true |
28,694 | def datetimeformat(dt, fmt=None, relative=False):
fmt = (fmt or '%b %d, %Y %I:%M %p')
if relative:
time_difference = _relative_timestamp(dt)
if time_difference:
return '{} ago'.format(time_difference)
return dt.strftime(fmt)
| [
"def",
"datetimeformat",
"(",
"dt",
",",
"fmt",
"=",
"None",
",",
"relative",
"=",
"False",
")",
":",
"fmt",
"=",
"(",
"fmt",
"or",
"'%b %d, %Y %I:%M %p'",
")",
"if",
"relative",
":",
"time_difference",
"=",
"_relative_timestamp",
"(",
"dt",
")",
"if",
"... | returns date/time formatted using babels locale settings . | train | false |
28,695 | def no_quote(s):
return s
| [
"def",
"no_quote",
"(",
"s",
")",
":",
"return",
"s"
] | quoting that doesnt do anything . | train | false |
28,696 | def local_node_connectivity(G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None):
if (flow_func is None):
flow_func = default_flow_func
if (auxiliary is None):
H = build_auxiliary_node_connectivity(G)
else:
H = auxiliary
mapping = H.graph.get('mapping', None)
if (mapping is None):
raise nx.NetworkXError('Invalid auxiliary digraph.')
kwargs = dict(flow_func=flow_func, residual=residual)
if (flow_func is shortest_augmenting_path):
kwargs['cutoff'] = cutoff
kwargs['two_phase'] = True
elif (flow_func is edmonds_karp):
kwargs['cutoff'] = cutoff
elif (flow_func is dinitz):
kwargs['cutoff'] = cutoff
elif (flow_func is boykov_kolmogorov):
kwargs['cutoff'] = cutoff
return nx.maximum_flow_value(H, ('%sB' % mapping[s]), ('%sA' % mapping[t]), **kwargs)
| [
"def",
"local_node_connectivity",
"(",
"G",
",",
"s",
",",
"t",
",",
"flow_func",
"=",
"None",
",",
"auxiliary",
"=",
"None",
",",
"residual",
"=",
"None",
",",
"cutoff",
"=",
"None",
")",
":",
"if",
"(",
"flow_func",
"is",
"None",
")",
":",
"flow_fu... | computes local node connectivity for nodes s and t . | train | false |
28,698 | def Scatter(data=None, x=None, y=None, **kws):
kws['x'] = x
kws['y'] = y
return create_and_build(ScatterBuilder, data, **kws)
| [
"def",
"Scatter",
"(",
"data",
"=",
"None",
",",
"x",
"=",
"None",
",",
"y",
"=",
"None",
",",
"**",
"kws",
")",
":",
"kws",
"[",
"'x'",
"]",
"=",
"x",
"kws",
"[",
"'y'",
"]",
"=",
"y",
"return",
"create_and_build",
"(",
"ScatterBuilder",
",",
... | create a scatter chart using :class:scatterbuilder <bokeh . | train | false |
28,699 | def create_and_build(builder_class, *data, **kws):
if (getattr(builder_class, 'dimensions') is None):
raise NotImplementedError(('Each builder must specify its dimensions, %s does not.' % builder_class.__name__))
if (getattr(builder_class, 'default_attributes') is None):
raise NotImplementedError(('Each builder must specify its default_attributes, %s does not.' % builder_class.__name__))
builder_props = (set(builder_class.properties()) | set(getattr(builder_class, '__deprecated_attributes__', [])))
for dim in builder_class.dimensions:
builder_props.add(dim)
for attr_name in builder_class.default_attributes.keys():
builder_props.add(attr_name)
builder_kws = {k: v for (k, v) in kws.items() if (k in builder_props)}
builder = builder_class(*data, **builder_kws)
chart_kws = {k: v for (k, v) in kws.items() if (k not in builder_props)}
chart = Chart(**chart_kws)
chart.add_builder(builder)
chart.start_plot()
return chart
| [
"def",
"create_and_build",
"(",
"builder_class",
",",
"*",
"data",
",",
"**",
"kws",
")",
":",
"if",
"(",
"getattr",
"(",
"builder_class",
",",
"'dimensions'",
")",
"is",
"None",
")",
":",
"raise",
"NotImplementedError",
"(",
"(",
"'Each builder must specify i... | a factory function for handling chart and builder generation . | train | false |
28,700 | def jelly(object, taster=DummySecurityOptions(), persistentStore=None, invoker=None):
return _Jellier(taster, persistentStore, invoker).jelly(object)
| [
"def",
"jelly",
"(",
"object",
",",
"taster",
"=",
"DummySecurityOptions",
"(",
")",
",",
"persistentStore",
"=",
"None",
",",
"invoker",
"=",
"None",
")",
":",
"return",
"_Jellier",
"(",
"taster",
",",
"persistentStore",
",",
"invoker",
")",
".",
"jelly",... | serialize to s-expression . | train | false |
28,701 | def convert_colorscale_to_rgb(colorscale):
for color in colorscale:
color[1] = convert_to_RGB_255(color[1])
for color in colorscale:
color[1] = label_rgb(color[1])
return colorscale
| [
"def",
"convert_colorscale_to_rgb",
"(",
"colorscale",
")",
":",
"for",
"color",
"in",
"colorscale",
":",
"color",
"[",
"1",
"]",
"=",
"convert_to_RGB_255",
"(",
"color",
"[",
"1",
"]",
")",
"for",
"color",
"in",
"colorscale",
":",
"color",
"[",
"1",
"]"... | converts the colors in a colorscale to rgb colors a colorscale is an array of arrays . | train | false |
28,702 | def coerce_dtypes(df, dtypes):
for c in df.columns:
if ((c in dtypes) and (df.dtypes[c] != dtypes[c])):
if (np.issubdtype(df.dtypes[c], np.floating) and np.issubdtype(dtypes[c], np.integer)):
if (df[c] % 1).any():
msg = "Runtime type mismatch. Add {'%s': float} to dtype= keyword in read_csv/read_table"
raise TypeError((msg % c))
df[c] = df[c].astype(dtypes[c])
| [
"def",
"coerce_dtypes",
"(",
"df",
",",
"dtypes",
")",
":",
"for",
"c",
"in",
"df",
".",
"columns",
":",
"if",
"(",
"(",
"c",
"in",
"dtypes",
")",
"and",
"(",
"df",
".",
"dtypes",
"[",
"c",
"]",
"!=",
"dtypes",
"[",
"c",
"]",
")",
")",
":",
... | coerce dataframe to dtypes safely operates in place parameters df: pandas dataframe dtypes: dict like {x: float} . | train | false |
28,703 | def __create_orget_address(conn, name, region):
try:
addy = conn.ex_get_address(name, region)
except ResourceNotFoundError:
addr_kwargs = {'name': name, 'region': region}
new_addy = create_address(addr_kwargs, 'function')
addy = conn.ex_get_address(new_addy['name'], new_addy['region'])
return addy
| [
"def",
"__create_orget_address",
"(",
"conn",
",",
"name",
",",
"region",
")",
":",
"try",
":",
"addy",
"=",
"conn",
".",
"ex_get_address",
"(",
"name",
",",
"region",
")",
"except",
"ResourceNotFoundError",
":",
"addr_kwargs",
"=",
"{",
"'name'",
":",
"na... | reuse or create a static ip address . | train | true |
28,704 | def total_norm_constraint(tensor_vars, max_norm, epsilon=1e-07, return_norm=False):
norm = T.sqrt(sum((T.sum((tensor ** 2)) for tensor in tensor_vars)))
dtype = np.dtype(theano.config.floatX).type
target_norm = T.clip(norm, 0, dtype(max_norm))
multiplier = (target_norm / (dtype(epsilon) + norm))
tensor_vars_scaled = [(step * multiplier) for step in tensor_vars]
if return_norm:
return (tensor_vars_scaled, norm)
else:
return tensor_vars_scaled
| [
"def",
"total_norm_constraint",
"(",
"tensor_vars",
",",
"max_norm",
",",
"epsilon",
"=",
"1e-07",
",",
"return_norm",
"=",
"False",
")",
":",
"norm",
"=",
"T",
".",
"sqrt",
"(",
"sum",
"(",
"(",
"T",
".",
"sum",
"(",
"(",
"tensor",
"**",
"2",
")",
... | rescales a list of tensors based on their combined norm if the combined norm of the input tensors exceeds the threshold then all tensors are rescaled such that the combined norm is equal to the threshold . | train | false |
28,705 | def _shape_repr(shape):
if (len(shape) == 0):
return '()'
joined = ', '.join((('%d' % e) for e in shape))
if (len(shape) == 1):
joined += ','
return ('(%s)' % joined)
| [
"def",
"_shape_repr",
"(",
"shape",
")",
":",
"if",
"(",
"len",
"(",
"shape",
")",
"==",
"0",
")",
":",
"return",
"'()'",
"joined",
"=",
"', '",
".",
"join",
"(",
"(",
"(",
"'%d'",
"%",
"e",
")",
"for",
"e",
"in",
"shape",
")",
")",
"if",
"("... | return a platform independent representation of an array shape under python 2 . | train | true |
28,706 | def split_null_strings(data):
strings = []
start = 0
for i in xrange(0, len(data), 2):
if ((data[i] == '\x00') and (data[(i + 1)] == '\x00')):
strings.append(data[start:i])
start = (i + 2)
return [s.decode('utf-16') for s in strings]
| [
"def",
"split_null_strings",
"(",
"data",
")",
":",
"strings",
"=",
"[",
"]",
"start",
"=",
"0",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"len",
"(",
"data",
")",
",",
"2",
")",
":",
"if",
"(",
"(",
"data",
"[",
"i",
"]",
"==",
"'\\x00'",
"... | splits a concatenation of null-terminated utf-16 strings . | train | false |
28,708 | @keras_test
def test_vector_regression():
np.random.seed(1337)
nb_hidden = 10
((X_train, y_train), (X_test, y_test)) = get_test_data(nb_train=500, nb_test=200, input_shape=(20,), output_shape=(2,), classification=False)
model = Sequential([Dense(nb_hidden, input_shape=(X_train.shape[(-1)],), activation='tanh'), Dense(y_train.shape[(-1)])])
model.compile(loss='hinge', optimizer='adagrad')
history = model.fit(X_train, y_train, nb_epoch=20, batch_size=16, validation_data=(X_test, y_test), verbose=0)
assert (history.history['val_loss'][(-1)] < 0.9)
| [
"@",
"keras_test",
"def",
"test_vector_regression",
"(",
")",
":",
"np",
".",
"random",
".",
"seed",
"(",
"1337",
")",
"nb_hidden",
"=",
"10",
"(",
"(",
"X_train",
",",
"y_train",
")",
",",
"(",
"X_test",
",",
"y_test",
")",
")",
"=",
"get_test_data",
... | perform float data prediction using 2 layer mlp with tanh and sigmoid activations . | train | false |
28,709 | def waitForVBL():
pass
| [
"def",
"waitForVBL",
"(",
")",
":",
"pass"
] | deprecated: waiting for a vbl is handled by the screen flip . | train | false |
28,710 | def set_change(name, change):
pre_info = info(name)
if (change == pre_info['change']):
return True
if (__grains__['kernel'] == 'FreeBSD'):
cmd = ['pw', 'user', 'mod', name, '-f', change]
else:
cmd = ['usermod', '-f', change, name]
__salt__['cmd.run'](cmd, python_shell=False)
post_info = info(name)
if (post_info['change'] != pre_info['change']):
return (post_info['change'] == change)
| [
"def",
"set_change",
"(",
"name",
",",
"change",
")",
":",
"pre_info",
"=",
"info",
"(",
"name",
")",
"if",
"(",
"change",
"==",
"pre_info",
"[",
"'change'",
"]",
")",
":",
"return",
"True",
"if",
"(",
"__grains__",
"[",
"'kernel'",
"]",
"==",
"'Free... | sets the date on which the password expires . | train | true |
28,712 | def cache_cluster_absent(name, wait=600, region=None, key=None, keyid=None, profile=None, **args):
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
args = dict([(k, v) for (k, v) in args.items() if (not k.startswith('_'))])
exists = __salt__['boto3_elasticache.cache_cluster_exists'](name, region=region, key=key, keyid=keyid, profile=profile)
if exists:
if __opts__['test']:
ret['comment'] = 'Cache cluster {0} would be removed.'.format(name)
ret['result'] = None
return ret
deleted = __salt__['boto3_elasticache.delete_cache_cluster'](name, wait=wait, region=region, key=key, keyid=keyid, profile=profile, **args)
if deleted:
ret['changes']['old'] = name
ret['changes']['new'] = None
else:
ret['result'] = False
ret['comment'] = 'Failed to delete {0} cache cluster.'.format(name)
else:
ret['comment'] = 'Cache cluster {0} already absent.'.format(name)
return ret
| [
"def",
"cache_cluster_absent",
"(",
"name",
",",
"wait",
"=",
"600",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
",",
"**",
"args",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
... | ensure a given cache cluster is deleted . | train | true |
28,713 | def libvlc_vlm_get_event_manager(p_instance):
f = (_Cfunctions.get('libvlc_vlm_get_event_manager', None) or _Cfunction('libvlc_vlm_get_event_manager', ((1,),), class_result(EventManager), ctypes.c_void_p, Instance))
return f(p_instance)
| [
"def",
"libvlc_vlm_get_event_manager",
"(",
"p_instance",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_vlm_get_event_manager'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_vlm_get_event_manager'",
",",
"(",
"(",
"1",
",",
")",
",",
... | get libvlc_event_manager from a vlm media . | train | true |
28,714 | def buttongroup(*items):
group = QtWidgets.QButtonGroup()
for i in items:
group.addButton(i)
return group
| [
"def",
"buttongroup",
"(",
"*",
"items",
")",
":",
"group",
"=",
"QtWidgets",
".",
"QButtonGroup",
"(",
")",
"for",
"i",
"in",
"items",
":",
"group",
".",
"addButton",
"(",
"i",
")",
"return",
"group"
] | create a qbuttongroup for the specified items . | train | false |
28,715 | def take_snapshot():
if (not is_tracing()):
raise RuntimeError('the tracemalloc module must be tracing memory allocations to take a snapshot')
traces = _get_traces()
traceback_limit = get_traceback_limit()
return Snapshot(traces, traceback_limit)
| [
"def",
"take_snapshot",
"(",
")",
":",
"if",
"(",
"not",
"is_tracing",
"(",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"'the tracemalloc module must be tracing memory allocations to take a snapshot'",
")",
"traces",
"=",
"_get_traces",
"(",
")",
"traceback_limit",
"... | take a snapshot of traces of memory blocks allocated by python . | train | false |
28,716 | def generate_adhoc_ssl_context():
crypto = _get_openssl_crypto_module()
import tempfile
import atexit
(cert, pkey) = generate_adhoc_ssl_pair()
(cert_handle, cert_file) = tempfile.mkstemp()
(pkey_handle, pkey_file) = tempfile.mkstemp()
atexit.register(os.remove, pkey_file)
atexit.register(os.remove, cert_file)
os.write(cert_handle, crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
os.write(pkey_handle, crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
os.close(cert_handle)
os.close(pkey_handle)
ctx = load_ssl_context(cert_file, pkey_file)
return ctx
| [
"def",
"generate_adhoc_ssl_context",
"(",
")",
":",
"crypto",
"=",
"_get_openssl_crypto_module",
"(",
")",
"import",
"tempfile",
"import",
"atexit",
"(",
"cert",
",",
"pkey",
")",
"=",
"generate_adhoc_ssl_pair",
"(",
")",
"(",
"cert_handle",
",",
"cert_file",
")... | generates an adhoc ssl context for the development server . | train | true |
28,717 | def all_details(client, module):
if (module.params.get('max_items') or module.params.get('next_marker')):
module.fail_json(msg='Cannot specify max_items nor next_marker for query=all.')
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
lambda_facts[function_name] = {}
lambda_facts[function_name].update(config_details(client, module)[function_name])
lambda_facts[function_name].update(alias_details(client, module)[function_name])
lambda_facts[function_name].update(policy_details(client, module)[function_name])
lambda_facts[function_name].update(version_details(client, module)[function_name])
lambda_facts[function_name].update(mapping_details(client, module)[function_name])
else:
lambda_facts.update(config_details(client, module))
return lambda_facts
| [
"def",
"all_details",
"(",
"client",
",",
"module",
")",
":",
"if",
"(",
"module",
".",
"params",
".",
"get",
"(",
"'max_items'",
")",
"or",
"module",
".",
"params",
".",
"get",
"(",
"'next_marker'",
")",
")",
":",
"module",
".",
"fail_json",
"(",
"m... | returns all lambda related facts . | train | false |
28,718 | def dice(u, v):
u = _validate_vector(u)
v = _validate_vector(v)
if (u.dtype == bool):
ntt = (u & v).sum()
else:
ntt = (u * v).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v)
return (float((ntf + nft)) / float((((2.0 * ntt) + ntf) + nft)))
| [
"def",
"dice",
"(",
"u",
",",
"v",
")",
":",
"u",
"=",
"_validate_vector",
"(",
"u",
")",
"v",
"=",
"_validate_vector",
"(",
"v",
")",
"if",
"(",
"u",
".",
"dtype",
"==",
"bool",
")",
":",
"ntt",
"=",
"(",
"u",
"&",
"v",
")",
".",
"sum",
"(... | computes the dice dissimilarity between two boolean 1-d arrays . | train | false |
28,722 | def boto3_log(method):
counter = itertools.count(1)
def _run_with_logging(*args, **kwargs):
'\n Run given boto3.ec2.ServiceResource method with exception\n logging for ``ClientError``.\n '
with AWS_ACTION(operation=[method.__name__, args[1:], kwargs], count=next(counter)):
return method(*args, **kwargs)
return _run_with_logging
| [
"def",
"boto3_log",
"(",
"method",
")",
":",
"counter",
"=",
"itertools",
".",
"count",
"(",
"1",
")",
"def",
"_run_with_logging",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"with",
"AWS_ACTION",
"(",
"operation",
"=",
"[",
"method",
".",
"__name... | decorator to run a boto3 . | train | false |
28,723 | @pytest.fixture(autouse=True)
def enable_caret_browsing(qapp):
settings = QWebSettings.globalSettings()
old_value = settings.testAttribute(QWebSettings.CaretBrowsingEnabled)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, True)
(yield)
settings.setAttribute(QWebSettings.CaretBrowsingEnabled, old_value)
| [
"@",
"pytest",
".",
"fixture",
"(",
"autouse",
"=",
"True",
")",
"def",
"enable_caret_browsing",
"(",
"qapp",
")",
":",
"settings",
"=",
"QWebSettings",
".",
"globalSettings",
"(",
")",
"old_value",
"=",
"settings",
".",
"testAttribute",
"(",
"QWebSettings",
... | fixture to enable caret browsing globally . | train | false |
28,724 | def test_no_stdlib_collections():
import collections
matplotlib = import_module('matplotlib', __import__kwargs={'fromlist': ['cm', 'collections']}, min_module_version='1.1.0', catch=(RuntimeError,))
if matplotlib:
assert (collections != matplotlib.collections)
| [
"def",
"test_no_stdlib_collections",
"(",
")",
":",
"import",
"collections",
"matplotlib",
"=",
"import_module",
"(",
"'matplotlib'",
",",
"__import__kwargs",
"=",
"{",
"'fromlist'",
":",
"[",
"'cm'",
",",
"'collections'",
"]",
"}",
",",
"min_module_version",
"=",... | make sure we get the right collections when it is not part of a larger list . | train | false |
28,725 | def _mysql_reset_sequences(style, connection):
tables = connection.introspection.django_table_names(only_existing=True)
flush_statements = connection.ops.sql_flush(style, tables, connection.introspection.sequence_list())
return [s for s in flush_statements if s.startswith('ALTER')]
| [
"def",
"_mysql_reset_sequences",
"(",
"style",
",",
"connection",
")",
":",
"tables",
"=",
"connection",
".",
"introspection",
".",
"django_table_names",
"(",
"only_existing",
"=",
"True",
")",
"flush_statements",
"=",
"connection",
".",
"ops",
".",
"sql_flush",
... | return a sql statements needed to reset django tables . | train | false |
28,726 | def request_user_is_admin(request):
user_db = get_user_db_from_request(request=request)
return user_is_admin(user_db=user_db)
| [
"def",
"request_user_is_admin",
"(",
"request",
")",
":",
"user_db",
"=",
"get_user_db_from_request",
"(",
"request",
"=",
"request",
")",
"return",
"user_is_admin",
"(",
"user_db",
"=",
"user_db",
")"
] | check if the logged-in request user has admin role . | train | false |
28,727 | def getComplexPaths(vector3Paths):
complexPaths = []
for vector3Path in vector3Paths:
complexPaths.append(getComplexPath(vector3Path))
return complexPaths
| [
"def",
"getComplexPaths",
"(",
"vector3Paths",
")",
":",
"complexPaths",
"=",
"[",
"]",
"for",
"vector3Path",
"in",
"vector3Paths",
":",
"complexPaths",
".",
"append",
"(",
"getComplexPath",
"(",
"vector3Path",
")",
")",
"return",
"complexPaths"
] | get the complex paths from the vector3 paths . | train | false |
28,728 | def version_clean(verstr):
if (verstr and ('pkg.version_clean' in __salt__)):
return __salt__['pkg.version_clean'](verstr)
return verstr
| [
"def",
"version_clean",
"(",
"verstr",
")",
":",
"if",
"(",
"verstr",
"and",
"(",
"'pkg.version_clean'",
"in",
"__salt__",
")",
")",
":",
"return",
"__salt__",
"[",
"'pkg.version_clean'",
"]",
"(",
"verstr",
")",
"return",
"verstr"
] | clean the version string removing extra data . | train | false |
28,729 | def list_principals():
ret = {}
cmd = __execute_kadmin('list_principals')
if ((cmd['retcode'] != 0) or cmd['stderr']):
ret['comment'] = cmd['stderr'].splitlines()[(-1)]
ret['result'] = False
return ret
ret = {'principals': []}
for i in cmd['stdout'].splitlines()[1:]:
ret['principals'].append(i)
return ret
| [
"def",
"list_principals",
"(",
")",
":",
"ret",
"=",
"{",
"}",
"cmd",
"=",
"__execute_kadmin",
"(",
"'list_principals'",
")",
"if",
"(",
"(",
"cmd",
"[",
"'retcode'",
"]",
"!=",
"0",
")",
"or",
"cmd",
"[",
"'stderr'",
"]",
")",
":",
"ret",
"[",
"'c... | get all principals cli example: . | train | true |
28,730 | def send_query_completion_email(recipient_id, query_id):
email_subject = ('Query %s has successfully completed' % query_id)
email_body_template = 'Hi %s,<br>Your query with id %s has succesfully completed its execution. Visit the result page <a href="https://www.oppia.org/emaildashboardresult/%s">here</a> to see result of your query.<br><br>Thanks!<br><br>Best wishes,<br>The Oppia Team<br><br>%s'
recipient_user_settings = user_services.get_user_settings(recipient_id)
email_body = (email_body_template % (recipient_user_settings.username, query_id, query_id, EMAIL_FOOTER.value))
_send_email(recipient_id, feconf.SYSTEM_COMMITTER_ID, feconf.EMAIL_INTENT_QUERY_STATUS_NOTIFICATION, email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)
| [
"def",
"send_query_completion_email",
"(",
"recipient_id",
",",
"query_id",
")",
":",
"email_subject",
"=",
"(",
"'Query %s has successfully completed'",
"%",
"query_id",
")",
"email_body_template",
"=",
"'Hi %s,<br>Your query with id %s has succesfully completed its execution. Vis... | send an email to the initiator of a bulk email query with a link to view the query results . | train | false |
28,732 | def _additional_sanity_checks(module, zone):
overwrite = module.params['overwrite']
record_name = module.params['record']
record_type = module.params['type']
state = module.params['state']
if ((record_type == 'CNAME') and (record_name == zone.domain)):
module.fail_json(msg='CNAME records cannot match the zone name', changed=False)
if ((record_type == 'NS') and (record_name == zone.domain) and (state == 'absent')):
module.fail_json(msg='cannot delete root NS records', changed=False)
if ((record_type == 'NS') and (record_name == zone.domain) and overwrite):
module.fail_json(msg='cannot update existing root NS records', changed=False)
if ((record_type == 'SOA') and (record_name != zone.domain)):
module.fail_json(msg=('non-root SOA records are not permitted, got: %s' % record_name), changed=False)
| [
"def",
"_additional_sanity_checks",
"(",
"module",
",",
"zone",
")",
":",
"overwrite",
"=",
"module",
".",
"params",
"[",
"'overwrite'",
"]",
"record_name",
"=",
"module",
".",
"params",
"[",
"'record'",
"]",
"record_type",
"=",
"module",
".",
"params",
"[",... | run input sanity checks that depend on info from the zone/record . | train | false |
28,733 | def force_str(s, encoding='utf-8'):
if isinstance(s, str):
return s
elif isinstance(s, Text):
return s.encode(encoding)
elif isinstance(s, binary_type):
return s.decode(encoding)
else:
raise TypeError('force_str expects a string type')
| [
"def",
"force_str",
"(",
"s",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"str",
")",
":",
"return",
"s",
"elif",
"isinstance",
"(",
"s",
",",
"Text",
")",
":",
"return",
"s",
".",
"encode",
"(",
"encoding",
")",
... | converts a string to a native string . | train | false |
28,734 | def migrate_data():
_sync_repo(repo_name=DATA_MIGRATION_REPO)
| [
"def",
"migrate_data",
"(",
")",
":",
"_sync_repo",
"(",
"repo_name",
"=",
"DATA_MIGRATION_REPO",
")"
] | migrate data to match the new schema . | train | false |
28,735 | def local_config(mobsf_home):
if (not os.path.exists(CONFIG_PATH)):
os.makedirs(CONFIG_PATH)
shutil.copy((mobsf_home + '\\install\\windows\\config.txt'), os.path.join(CONFIG_PATH, CONFIG_FILE))
| [
"def",
"local_config",
"(",
"mobsf_home",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"CONFIG_PATH",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"CONFIG_PATH",
")",
"shutil",
".",
"copy",
"(",
"(",
"mobsf_home",
"+",
"'\\\\install... | move local config and save paths . | train | false |
28,736 | def can_talk_to_agent():
return bool(_get_pageant_window_object())
| [
"def",
"can_talk_to_agent",
"(",
")",
":",
"return",
"bool",
"(",
"_get_pageant_window_object",
"(",
")",
")"
] | check to see if there is a "pageant" agent we can talk to . | train | false |
28,738 | def _count_duplicated_locations(locations, new):
ret = 0
for loc in locations:
if ((loc['url'] == new['url']) and (loc['metadata'] == new['metadata'])):
ret += 1
return ret
| [
"def",
"_count_duplicated_locations",
"(",
"locations",
",",
"new",
")",
":",
"ret",
"=",
"0",
"for",
"loc",
"in",
"locations",
":",
"if",
"(",
"(",
"loc",
"[",
"'url'",
"]",
"==",
"new",
"[",
"'url'",
"]",
")",
"and",
"(",
"loc",
"[",
"'metadata'",
... | to calculate the count of duplicated locations for new one . | train | false |
28,739 | def concatv(*seqs):
return concat(seqs)
| [
"def",
"concatv",
"(",
"*",
"seqs",
")",
":",
"return",
"concat",
"(",
"seqs",
")"
] | variadic version of concat . | train | false |
28,740 | def getMP(data, count=1):
mp = []
c = 0
for i in range(count):
(length,) = struct.unpack('>L', data[c:(c + 4)])
mp.append(Util.number.bytes_to_long(data[(c + 4):((c + 4) + length)]))
c += (4 + length)
return (tuple(mp) + (data[c:],))
| [
"def",
"getMP",
"(",
"data",
",",
"count",
"=",
"1",
")",
":",
"mp",
"=",
"[",
"]",
"c",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"count",
")",
":",
"(",
"length",
",",
")",
"=",
"struct",
".",
"unpack",
"(",
"'>L'",
",",
"data",
"[",
"c",... | get multiple precision integer out of the string . | train | false |
28,741 | def _run_exitfuncs():
exc_info = None
while _exithandlers:
(func, targs, kargs) = _exithandlers.pop()
try:
func(*targs, **kargs)
except SystemExit:
exc_info = sys.exc_info()
except:
import traceback
sys.stderr.write('Error in atexit._run_exitfuncs:\n')
traceback.print_exc()
exc_info = sys.exc_info()
if (exc_info is not None):
raise exc_info[0](exc_info[1])
| [
"def",
"_run_exitfuncs",
"(",
")",
":",
"exc_info",
"=",
"None",
"while",
"_exithandlers",
":",
"(",
"func",
",",
"targs",
",",
"kargs",
")",
"=",
"_exithandlers",
".",
"pop",
"(",
")",
"try",
":",
"func",
"(",
"*",
"targs",
",",
"**",
"kargs",
")",
... | run any registered exit functions _exithandlers is traversed in reverse order so functions are executed last in . | train | false |
28,744 | def load_signatures(filename, cache=True):
global SIGNATURE_CACHE
f = open(filename, 'r')
sigjson = f.read()
f.close()
sigdata = simplejson.loads(sigjson)
if cache:
SIGNATURE_CACHE = sigdata
| [
"def",
"load_signatures",
"(",
"filename",
",",
"cache",
"=",
"True",
")",
":",
"global",
"SIGNATURE_CACHE",
"f",
"=",
"open",
"(",
"filename",
",",
"'r'",
")",
"sigjson",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"sigdata",
"=",
... | loads the import signatures for distros . | train | false |
28,745 | def prepare_class(name, bases=(), kwds=None):
if (kwds is None):
kwds = {}
else:
kwds = dict(kwds)
if ('metaclass' in kwds):
meta = kwds.pop('metaclass')
elif bases:
meta = type(bases[0])
else:
meta = type
if isinstance(meta, type):
meta = _calculate_meta(meta, bases)
if hasattr(meta, '__prepare__'):
ns = meta.__prepare__(name, bases, **kwds)
else:
ns = {}
return (meta, ns, kwds)
| [
"def",
"prepare_class",
"(",
"name",
",",
"bases",
"=",
"(",
")",
",",
"kwds",
"=",
"None",
")",
":",
"if",
"(",
"kwds",
"is",
"None",
")",
":",
"kwds",
"=",
"{",
"}",
"else",
":",
"kwds",
"=",
"dict",
"(",
"kwds",
")",
"if",
"(",
"'metaclass'"... | call the __prepare__ method of the appropriate metaclass . | train | false |
28,746 | def maxout(x, pool_size, axis=1):
if (pool_size <= 0):
raise ValueError('pool_size must be a positive integer.')
x_shape = x.shape
if ((x_shape[axis] % pool_size) != 0):
expect = 'x.shape[axis] % pool_size == 0'
actual = 'x.shape[axis]={}, pool_size={}'.format(x_shape[axis], pool_size)
msg = 'axis dimension must be divided by pool_size'
raise type_check.InvalidType(expect, actual, msg)
shape = ((x_shape[:axis] + ((x_shape[axis] // pool_size), pool_size)) + x_shape[(axis + 1):])
x = reshape.reshape(x, shape)
return minmax.max(x, axis=(axis + 1))
| [
"def",
"maxout",
"(",
"x",
",",
"pool_size",
",",
"axis",
"=",
"1",
")",
":",
"if",
"(",
"pool_size",
"<=",
"0",
")",
":",
"raise",
"ValueError",
"(",
"'pool_size must be a positive integer.'",
")",
"x_shape",
"=",
"x",
".",
"shape",
"if",
"(",
"(",
"x... | maxout activation function . | train | false |
28,747 | def getUniqueToken(word):
uniqueString = '@#!'
for character in uniqueString:
if (character not in word):
return character
uniqueNumber = 0
while True:
for character in uniqueString:
uniqueToken = (character + str(uniqueNumber))
if (uniqueToken not in word):
return uniqueToken
uniqueNumber += 1
| [
"def",
"getUniqueToken",
"(",
"word",
")",
":",
"uniqueString",
"=",
"'@#!'",
"for",
"character",
"in",
"uniqueString",
":",
"if",
"(",
"character",
"not",
"in",
"word",
")",
":",
"return",
"character",
"uniqueNumber",
"=",
"0",
"while",
"True",
":",
"for"... | get unique token . | train | false |
28,748 | def distribution():
return s3_rest_controller()
| [
"def",
"distribution",
"(",
")",
":",
"return",
"s3_rest_controller",
"(",
")"
] | activities which include distributions . | train | false |
28,751 | def build_component_list(compdict, custom=None, convert=update_classpath):
def _check_components(complist):
if (len({convert(c) for c in complist}) != len(complist)):
raise ValueError('Some paths in {!r} convert to the same object, please update your settings'.format(complist))
def _map_keys(compdict):
if isinstance(compdict, BaseSettings):
compbs = BaseSettings()
for (k, v) in six.iteritems(compdict):
prio = compdict.getpriority(k)
if (compbs.getpriority(convert(k)) == prio):
raise ValueError('Some paths in {!r} convert to the same object, please update your settings'.format(list(compdict.keys())))
else:
compbs.set(convert(k), v, priority=prio)
return compbs
else:
_check_components(compdict)
return {convert(k): v for (k, v) in six.iteritems(compdict)}
if isinstance(custom, (list, tuple)):
_check_components(custom)
return type(custom)((convert(c) for c in custom))
if (custom is not None):
compdict.update(custom)
compdict = without_none_values(_map_keys(compdict))
return [k for (k, v) in sorted(six.iteritems(compdict), key=itemgetter(1))]
| [
"def",
"build_component_list",
"(",
"compdict",
",",
"custom",
"=",
"None",
",",
"convert",
"=",
"update_classpath",
")",
":",
"def",
"_check_components",
"(",
"complist",
")",
":",
"if",
"(",
"len",
"(",
"{",
"convert",
"(",
"c",
")",
"for",
"c",
"in",
... | compose a component list based on a custom and base dict of components . | train | false |
28,752 | def pr_remove_affiliation(master, affiliate, role=None):
master_pe = pr_get_pe_id(master)
affiliate_pe = pr_get_pe_id(affiliate)
if affiliate_pe:
s3db = current.s3db
atable = s3db.pr_affiliation
rtable = s3db.pr_role
query = ((atable.pe_id == affiliate_pe) & (atable.role_id == rtable.id))
if master_pe:
query &= (rtable.pe_id == master_pe)
if role:
query &= (rtable.role == role)
rows = current.db(query).select(rtable.id)
for row in rows:
pr_remove_from_role(row.id, affiliate_pe)
return
| [
"def",
"pr_remove_affiliation",
"(",
"master",
",",
"affiliate",
",",
"role",
"=",
"None",
")",
":",
"master_pe",
"=",
"pr_get_pe_id",
"(",
"master",
")",
"affiliate_pe",
"=",
"pr_get_pe_id",
"(",
"affiliate",
")",
"if",
"affiliate_pe",
":",
"s3db",
"=",
"cu... | remove affiliation records . | train | false |
28,753 | def encode_frag_archive_bodies(policy, body):
segment_size = policy.ec_segment_size
chunks = [body[x:(x + segment_size)] for x in range(0, len(body), segment_size)]
fragment_payloads = []
for chunk in chunks:
fragments = policy.pyeclib_driver.encode(chunk)
if (not fragments):
break
fragment_payloads.append(fragments)
ec_archive_bodies = [''.join(frags) for frags in zip(*fragment_payloads)]
return ec_archive_bodies
| [
"def",
"encode_frag_archive_bodies",
"(",
"policy",
",",
"body",
")",
":",
"segment_size",
"=",
"policy",
".",
"ec_segment_size",
"chunks",
"=",
"[",
"body",
"[",
"x",
":",
"(",
"x",
"+",
"segment_size",
")",
"]",
"for",
"x",
"in",
"range",
"(",
"0",
"... | given a stub body produce a list of complete frag_archive bodies as strings in frag_index order . | train | false |
28,756 | def hash_data(hash_):
res = {}
for (key, value) in hash_.items():
if (type(value) in (str, unicode)):
if (value == 'true'):
value = True
elif (value == 'false'):
value = False
elif (value == 'null'):
value = None
elif (value.isdigit() and (not re.match('^0[0-9]+', value))):
value = int(value)
elif re.match('^\\d{4}-\\d{2}-\\d{2}$', value):
value = datetime.strptime(value, '%Y-%m-%d')
res[key] = value
return res
| [
"def",
"hash_data",
"(",
"hash_",
")",
":",
"res",
"=",
"{",
"}",
"for",
"(",
"key",
",",
"value",
")",
"in",
"hash_",
".",
"items",
"(",
")",
":",
"if",
"(",
"type",
"(",
"value",
")",
"in",
"(",
"str",
",",
"unicode",
")",
")",
":",
"if",
... | return the base64 encoded sha1 hash of the data . | train | false |
28,757 | def _sum_clones_gradients(clone_grads):
sum_grads = []
for grad_and_vars in zip(*clone_grads):
grads = []
var = grad_and_vars[0][1]
for (g, v) in grad_and_vars:
assert (v == var)
if (g is not None):
grads.append(g)
if grads:
if (len(grads) > 1):
sum_grad = tf.add_n(grads, name=(var.op.name + '/sum_grads'))
else:
sum_grad = grads[0]
sum_grads.append((sum_grad, var))
return sum_grads
| [
"def",
"_sum_clones_gradients",
"(",
"clone_grads",
")",
":",
"sum_grads",
"=",
"[",
"]",
"for",
"grad_and_vars",
"in",
"zip",
"(",
"*",
"clone_grads",
")",
":",
"grads",
"=",
"[",
"]",
"var",
"=",
"grad_and_vars",
"[",
"0",
"]",
"[",
"1",
"]",
"for",
... | calculate the sum gradient for each shared variable across all clones . | train | false |
28,758 | def detach_usage_plan_from_apis(plan_id, apis, region=None, key=None, keyid=None, profile=None):
return _update_usage_plan_apis(plan_id, apis, 'remove', region=region, key=key, keyid=keyid, profile=profile)
| [
"def",
"detach_usage_plan_from_apis",
"(",
"plan_id",
",",
"apis",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"return",
"_update_usage_plan_apis",
"(",
"plan_id",
",",
"apis",
",",... | detaches given usage plan from each of the apis provided in a list of apiid and stage value . | train | true |
28,759 | def roundrobin(iterables):
pending = len(iterables)
nexts = cycle((iter(it).next for it in iterables))
while pending:
try:
for next in nexts:
(yield next())
except StopIteration:
pending -= 1
nexts = cycle(islice(nexts, pending))
| [
"def",
"roundrobin",
"(",
"iterables",
")",
":",
"pending",
"=",
"len",
"(",
"iterables",
")",
"nexts",
"=",
"cycle",
"(",
"(",
"iter",
"(",
"it",
")",
".",
"next",
"for",
"it",
"in",
"iterables",
")",
")",
"while",
"pending",
":",
"try",
":",
"for... | roundrobin take elements from iterables in a round-robin fashion . | train | true |
28,760 | @patch('sys.exit')
def test_validate_python(mock_exit):
with patch('sys.version_info', new_callable=PropertyMock(return_value=(2, 7, 8))):
main.validate_python()
assert (mock_exit.called is True)
mock_exit.reset_mock()
with patch('sys.version_info', new_callable=PropertyMock(return_value=(3, 2, 0))):
main.validate_python()
assert (mock_exit.called is True)
mock_exit.reset_mock()
with patch('sys.version_info', new_callable=PropertyMock(return_value=(3, 4, 1))):
main.validate_python()
assert (mock_exit.called is True)
mock_exit.reset_mock()
with patch('sys.version_info', new_callable=PropertyMock(return_value=(3, 4, 2))):
main.validate_python()
assert (mock_exit.called is False)
mock_exit.reset_mock()
with patch('sys.version_info', new_callable=PropertyMock(return_value=(3, 5, 1))):
main.validate_python()
assert (mock_exit.called is False)
| [
"@",
"patch",
"(",
"'sys.exit'",
")",
"def",
"test_validate_python",
"(",
"mock_exit",
")",
":",
"with",
"patch",
"(",
"'sys.version_info'",
",",
"new_callable",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"(",
"2",
",",
"7",
",",
"8",
")",
")",
")",
... | test validate python version method . | train | false |
28,761 | def check_scoring(estimator, scoring=None, allow_none=False):
has_scoring = (scoring is not None)
if (not hasattr(estimator, 'fit')):
raise TypeError(("estimator should be an estimator implementing 'fit' method, %r was passed" % estimator))
if isinstance(scoring, six.string_types):
return get_scorer(scoring)
elif has_scoring:
module = getattr(scoring, '__module__', None)
if (hasattr(module, 'startswith') and module.startswith('sklearn.metrics.') and (not module.startswith('sklearn.metrics.scorer')) and (not module.startswith('sklearn.metrics.tests.'))):
raise ValueError(('scoring value %r looks like it is a metric function rather than a scorer. A scorer should require an estimator as its first parameter. Please use `make_scorer` to convert a metric to a scorer.' % scoring))
return get_scorer(scoring)
elif hasattr(estimator, 'score'):
return _passthrough_scorer
elif allow_none:
return None
else:
raise TypeError(("If no scoring is specified, the estimator passed should have a 'score' method. The estimator %r does not." % estimator))
| [
"def",
"check_scoring",
"(",
"estimator",
",",
"scoring",
"=",
"None",
",",
"allow_none",
"=",
"False",
")",
":",
"has_scoring",
"=",
"(",
"scoring",
"is",
"not",
"None",
")",
"if",
"(",
"not",
"hasattr",
"(",
"estimator",
",",
"'fit'",
")",
")",
":",
... | determine scorer from user options . | train | false |
28,762 | def getfield(f):
if isinstance(f, list):
return [getfield(x) for x in f]
else:
return f.value
| [
"def",
"getfield",
"(",
"f",
")",
":",
"if",
"isinstance",
"(",
"f",
",",
"list",
")",
":",
"return",
"[",
"getfield",
"(",
"x",
")",
"for",
"x",
"in",
"f",
"]",
"else",
":",
"return",
"f",
".",
"value"
] | convert values from cgi . | train | true |
28,763 | def get_local_branch():
if is_git_repo():
(o, _) = Popen(u'git branch | grep "\\* "', shell=True, stdout=PIPE, cwd=os.path.dirname(nipype.__file__)).communicate()
return o.strip()[2:]
else:
return None
| [
"def",
"get_local_branch",
"(",
")",
":",
"if",
"is_git_repo",
"(",
")",
":",
"(",
"o",
",",
"_",
")",
"=",
"Popen",
"(",
"u'git branch | grep \"\\\\* \"'",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"PIPE",
",",
"cwd",
"=",
"os",
".",
"path",
".... | determine current branch . | train | false |
28,764 | def find_active_constraints(x, lb, ub, rtol=1e-10):
active = np.zeros_like(x, dtype=int)
if (rtol == 0):
active[(x <= lb)] = (-1)
active[(x >= ub)] = 1
return active
lower_dist = (x - lb)
upper_dist = (ub - x)
lower_threshold = (rtol * np.maximum(1, np.abs(lb)))
upper_threshold = (rtol * np.maximum(1, np.abs(ub)))
lower_active = (np.isfinite(lb) & (lower_dist <= np.minimum(upper_dist, lower_threshold)))
active[lower_active] = (-1)
upper_active = (np.isfinite(ub) & (upper_dist <= np.minimum(lower_dist, upper_threshold)))
active[upper_active] = 1
return active
| [
"def",
"find_active_constraints",
"(",
"x",
",",
"lb",
",",
"ub",
",",
"rtol",
"=",
"1e-10",
")",
":",
"active",
"=",
"np",
".",
"zeros_like",
"(",
"x",
",",
"dtype",
"=",
"int",
")",
"if",
"(",
"rtol",
"==",
"0",
")",
":",
"active",
"[",
"(",
... | determine which constraints are active in a given point . | train | false |
28,765 | def read_rle_bit_packed_hybrid(file_obj, width, length=None):
debug_logging = logger.isEnabledFor(logging.DEBUG)
io_obj = file_obj
if (length is None):
length = read_plain_int32(file_obj, 1)[0]
raw_bytes = file_obj.read(length)
if (raw_bytes == ''):
return None
io_obj = io.BytesIO(raw_bytes)
res = []
while (io_obj.tell() < length):
header = read_unsigned_var_int(io_obj)
if ((header & 1) == 0):
res += read_rle(io_obj, header, width, debug_logging)
else:
res += read_bitpacked(io_obj, header, width, debug_logging)
return res
| [
"def",
"read_rle_bit_packed_hybrid",
"(",
"file_obj",
",",
"width",
",",
"length",
"=",
"None",
")",
":",
"debug_logging",
"=",
"logger",
".",
"isEnabledFor",
"(",
"logging",
".",
"DEBUG",
")",
"io_obj",
"=",
"file_obj",
"if",
"(",
"length",
"is",
"None",
... | read values from fo using the rel/bit-packed hybrid encoding . | train | true |
28,766 | def jid_to_time(jid):
jid = str(jid)
if (len(jid) != 20):
return ''
year = jid[:4]
month = jid[4:6]
day = jid[6:8]
hour = jid[8:10]
minute = jid[10:12]
second = jid[12:14]
micro = jid[14:]
ret = '{0}, {1} {2} {3}:{4}:{5}.{6}'.format(year, months[int(month)], day, hour, minute, second, micro)
return ret
| [
"def",
"jid_to_time",
"(",
"jid",
")",
":",
"jid",
"=",
"str",
"(",
"jid",
")",
"if",
"(",
"len",
"(",
"jid",
")",
"!=",
"20",
")",
":",
"return",
"''",
"year",
"=",
"jid",
"[",
":",
"4",
"]",
"month",
"=",
"jid",
"[",
"4",
":",
"6",
"]",
... | convert a salt job id into the time when the job was invoked . | train | true |
28,767 | def dmp_normal(f, u, K):
if (not u):
return dup_normal(f, K)
v = (u - 1)
return dmp_strip([dmp_normal(c, v, K) for c in f], u)
| [
"def",
"dmp_normal",
"(",
"f",
",",
"u",
",",
"K",
")",
":",
"if",
"(",
"not",
"u",
")",
":",
"return",
"dup_normal",
"(",
"f",
",",
"K",
")",
"v",
"=",
"(",
"u",
"-",
"1",
")",
"return",
"dmp_strip",
"(",
"[",
"dmp_normal",
"(",
"c",
",",
... | normalize a multivariate polynomial in the given domain . | train | false |
28,768 | @cleanup
def test_determinism_plain():
_determinism_check(u'', format=u'pdf')
| [
"@",
"cleanup",
"def",
"test_determinism_plain",
"(",
")",
":",
"_determinism_check",
"(",
"u''",
",",
"format",
"=",
"u'pdf'",
")"
] | test for reproducible pdf output: simple figure . | train | false |
28,769 | def reset_profile():
with _profile_lock:
sys.getdxp()
global _cumulative_profile
_cumulative_profile = sys.getdxp()
| [
"def",
"reset_profile",
"(",
")",
":",
"with",
"_profile_lock",
":",
"sys",
".",
"getdxp",
"(",
")",
"global",
"_cumulative_profile",
"_cumulative_profile",
"=",
"sys",
".",
"getdxp",
"(",
")"
] | forgets any execution profile that has been gathered so far . | train | false |
28,770 | def sign_v1_message(body, platform, version, epoch=None):
token = get_secret_token(platform, version, global_version=1)
epoch = int((epoch or current_epoch()))
payload = epoch_wrap(epoch, body)
signature = versioned_hmac(token, payload, global_version=1)
return '{global_version}:{platform}:{version}:{epoch}:{signature}'.format(global_version=1, platform=platform, version=version, epoch=epoch, signature=signature)
| [
"def",
"sign_v1_message",
"(",
"body",
",",
"platform",
",",
"version",
",",
"epoch",
"=",
"None",
")",
":",
"token",
"=",
"get_secret_token",
"(",
"platform",
",",
"version",
",",
"global_version",
"=",
"1",
")",
"epoch",
"=",
"int",
"(",
"(",
"epoch",
... | reference implementation of the v1 mobile body signing . | train | false |
28,771 | def path2mod(path):
return path[:(-3)].replace(os.sep, '.')
| [
"def",
"path2mod",
"(",
"path",
")",
":",
"return",
"path",
"[",
":",
"(",
"-",
"3",
")",
"]",
".",
"replace",
"(",
"os",
".",
"sep",
",",
"'.'",
")"
] | convert a file path to a dotted module name . | train | false |
28,772 | def lbp_mul_term(f, cx):
return lbp(sig_mult(Sign(f), cx[0]), Polyn(f).mul_term(cx), Num(f))
| [
"def",
"lbp_mul_term",
"(",
"f",
",",
"cx",
")",
":",
"return",
"lbp",
"(",
"sig_mult",
"(",
"Sign",
"(",
"f",
")",
",",
"cx",
"[",
"0",
"]",
")",
",",
"Polyn",
"(",
"f",
")",
".",
"mul_term",
"(",
"cx",
")",
",",
"Num",
"(",
"f",
")",
")"
... | multiply a labeled polynomial with a term . | train | false |
28,773 | def query_schema(query_params_schema, min_version=None, max_version=None):
def add_validator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if ('req' in kwargs):
req = kwargs['req']
else:
req = args[1]
if _schema_validation_helper(query_params_schema, req.GET.dict_of_lists(), min_version, max_version, args, kwargs, is_body=False):
_strip_additional_query_parameters(query_params_schema, req)
return func(*args, **kwargs)
return wrapper
return add_validator
| [
"def",
"query_schema",
"(",
"query_params_schema",
",",
"min_version",
"=",
"None",
",",
"max_version",
"=",
"None",
")",
":",
"def",
"add_validator",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"a... | register a schema to validate request query parameters . | train | false |
28,774 | @manager.command()
@argument('email')
@option('--org', 'organization', default='default', help="the organization the user belongs to, (leave blank for 'default').")
def grant_admin(email, organization='default'):
try:
org = models.Organization.get_by_slug(organization)
admin_group = org.admin_group
user = models.User.get_by_email_and_org(email, org)
if (admin_group.id in user.group_ids):
print 'User is already an admin.'
else:
user.group_ids = (user.group_ids + [org.admin_group.id])
models.db.session.add(user)
models.db.session.commit()
print 'User updated.'
except NoResultFound:
print ('User [%s] not found.' % email)
| [
"@",
"manager",
".",
"command",
"(",
")",
"@",
"argument",
"(",
"'email'",
")",
"@",
"option",
"(",
"'--org'",
",",
"'organization'",
",",
"default",
"=",
"'default'",
",",
"help",
"=",
"\"the organization the user belongs to, (leave blank for 'default').\"",
")",
... | grant admin access to user email . | train | false |
28,775 | def complete_multipartite_graph(*subset_sizes):
G = nx.Graph()
G.name = 'complete_multiparite_graph{}'.format(subset_sizes)
if (len(subset_sizes) == 0):
return G
try:
extents = pairwise(accumulate(((0,) + subset_sizes)))
subsets = [range(start, end) for (start, end) in extents]
except TypeError:
subsets = subset_sizes
try:
for (i, subset) in enumerate(subsets):
G.add_nodes_from(subset, subset=i)
except TypeError:
raise nx.NetworkXError('Arguments must be all ints or all iterables')
for (subset1, subset2) in itertools.combinations(subsets, 2):
G.add_edges_from(itertools.product(subset1, subset2))
return G
| [
"def",
"complete_multipartite_graph",
"(",
"*",
"subset_sizes",
")",
":",
"G",
"=",
"nx",
".",
"Graph",
"(",
")",
"G",
".",
"name",
"=",
"'complete_multiparite_graph{}'",
".",
"format",
"(",
"subset_sizes",
")",
"if",
"(",
"len",
"(",
"subset_sizes",
")",
... | returns the complete multipartite graph with the specified subset sizes . | train | false |
28,777 | def ctrl_direction(bmRequestType):
return (bmRequestType & _CTRL_DIR_MASK)
| [
"def",
"ctrl_direction",
"(",
"bmRequestType",
")",
":",
"return",
"(",
"bmRequestType",
"&",
"_CTRL_DIR_MASK",
")"
] | return the direction of a control request . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.