id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
4,581
def for_me(conditions, myself): if (not conditions.audience_restriction): return True for restriction in conditions.audience_restriction: if (not restriction.audience): continue for audience in restriction.audience: if (audience.text.strip() == myself): return True else: pass return False
[ "def", "for_me", "(", "conditions", ",", "myself", ")", ":", "if", "(", "not", "conditions", ".", "audience_restriction", ")", ":", "return", "True", "for", "restriction", "in", "conditions", ".", "audience_restriction", ":", "if", "(", "not", "restriction", ".", "audience", ")", ":", "continue", "for", "audience", "in", "restriction", ".", "audience", ":", "if", "(", "audience", ".", "text", ".", "strip", "(", ")", "==", "myself", ")", ":", "return", "True", "else", ":", "pass", "return", "False" ]
am i among the intended audiences .
train
true
4,582
def octahedron(radius, dtype=np.uint8): n = ((2 * radius) + 1) (Z, Y, X) = np.mgrid[(- radius):radius:(n * 1j), (- radius):radius:(n * 1j), (- radius):radius:(n * 1j)] s = ((np.abs(X) + np.abs(Y)) + np.abs(Z)) return np.array((s <= radius), dtype=dtype)
[ "def", "octahedron", "(", "radius", ",", "dtype", "=", "np", ".", "uint8", ")", ":", "n", "=", "(", "(", "2", "*", "radius", ")", "+", "1", ")", "(", "Z", ",", "Y", ",", "X", ")", "=", "np", ".", "mgrid", "[", "(", "-", "radius", ")", ":", "radius", ":", "(", "n", "*", "1j", ")", ",", "(", "-", "radius", ")", ":", "radius", ":", "(", "n", "*", "1j", ")", ",", "(", "-", "radius", ")", ":", "radius", ":", "(", "n", "*", "1j", ")", "]", "s", "=", "(", "(", "np", ".", "abs", "(", "X", ")", "+", "np", ".", "abs", "(", "Y", ")", ")", "+", "np", ".", "abs", "(", "Z", ")", ")", "return", "np", ".", "array", "(", "(", "s", "<=", "radius", ")", ",", "dtype", "=", "dtype", ")" ]
generates a octahedron-shaped structuring element .
train
false
4,583
def test_read_space_delimiter(): table = '\n Name --Phone- ----TCP-----\n John 555-1234 192.168.1.10\n Mary 555-2134 192.168.1.12\n Bob 555-4527 192.168.1.9\n' dat = ascii.read(table, Reader=ascii.FixedWidth, guess=False, delimiter=' ') assert_equal(tuple(dat.dtype.names), ('Name', '--Phone-', '----TCP-----')) assert_equal(dat[1][0], 'Mary') assert_equal(dat[0][1], '555-1234') assert_equal(dat[2][2], '192.168.1.9')
[ "def", "test_read_space_delimiter", "(", ")", ":", "table", "=", "'\\n Name --Phone- ----TCP-----\\n John 555-1234 192.168.1.10\\n Mary 555-2134 192.168.1.12\\n Bob 555-4527 192.168.1.9\\n'", "dat", "=", "ascii", ".", "read", "(", "table", ",", "Reader", "=", "ascii", ".", "FixedWidth", ",", "guess", "=", "False", ",", "delimiter", "=", "' '", ")", "assert_equal", "(", "tuple", "(", "dat", ".", "dtype", ".", "names", ")", ",", "(", "'Name'", ",", "'--Phone-'", ",", "'----TCP-----'", ")", ")", "assert_equal", "(", "dat", "[", "1", "]", "[", "0", "]", ",", "'Mary'", ")", "assert_equal", "(", "dat", "[", "0", "]", "[", "1", "]", ",", "'555-1234'", ")", "assert_equal", "(", "dat", "[", "2", "]", "[", "2", "]", ",", "'192.168.1.9'", ")" ]
table with space delimiter .
train
false
4,584
def getindex(ndim, ind, strides): ret = 0 for i in range(ndim): ret += (strides[i] * ind[i]) return ret
[ "def", "getindex", "(", "ndim", ",", "ind", ",", "strides", ")", ":", "ret", "=", "0", "for", "i", "in", "range", "(", "ndim", ")", ":", "ret", "+=", "(", "strides", "[", "i", "]", "*", "ind", "[", "i", "]", ")", "return", "ret" ]
convert multi-dimensional index to the position in the flat list .
train
false
4,585
def fixed2csv(f, schema, output=None, **kwargs): streaming = (True if output else False) if (not streaming): output = six.StringIO() try: encoding = kwargs['encoding'] except KeyError: encoding = None writer = agate.csv.writer(output) reader = FixedWidthReader(f, schema, encoding=encoding) writer.writerows(reader) if (not streaming): data = output.getvalue() output.close() return data return ''
[ "def", "fixed2csv", "(", "f", ",", "schema", ",", "output", "=", "None", ",", "**", "kwargs", ")", ":", "streaming", "=", "(", "True", "if", "output", "else", "False", ")", "if", "(", "not", "streaming", ")", ":", "output", "=", "six", ".", "StringIO", "(", ")", "try", ":", "encoding", "=", "kwargs", "[", "'encoding'", "]", "except", "KeyError", ":", "encoding", "=", "None", "writer", "=", "agate", ".", "csv", ".", "writer", "(", "output", ")", "reader", "=", "FixedWidthReader", "(", "f", ",", "schema", ",", "encoding", "=", "encoding", ")", "writer", ".", "writerows", "(", "reader", ")", "if", "(", "not", "streaming", ")", ":", "data", "=", "output", ".", "getvalue", "(", ")", "output", ".", "close", "(", ")", "return", "data", "return", "''" ]
convert a fixed-width file to csv using a csv-formatted schema description .
train
false
4,587
def _wait_for_job(linode_id, job_id, timeout=300, quiet=True): interval = 5 iterations = int((timeout / interval)) for i in range(0, iterations): jobs_result = _query('linode', 'job.list', args={'LinodeID': linode_id})['DATA'] if ((jobs_result[0]['JOBID'] == job_id) and (jobs_result[0]['HOST_SUCCESS'] == 1)): return True time.sleep(interval) if (not quiet): log.info('Still waiting on Job {0} for Linode {1}.'.format(job_id, linode_id)) else: log.debug('Still waiting on Job {0} for Linode {1}.'.format(job_id, linode_id)) return False
[ "def", "_wait_for_job", "(", "linode_id", ",", "job_id", ",", "timeout", "=", "300", ",", "quiet", "=", "True", ")", ":", "interval", "=", "5", "iterations", "=", "int", "(", "(", "timeout", "/", "interval", ")", ")", "for", "i", "in", "range", "(", "0", ",", "iterations", ")", ":", "jobs_result", "=", "_query", "(", "'linode'", ",", "'job.list'", ",", "args", "=", "{", "'LinodeID'", ":", "linode_id", "}", ")", "[", "'DATA'", "]", "if", "(", "(", "jobs_result", "[", "0", "]", "[", "'JOBID'", "]", "==", "job_id", ")", "and", "(", "jobs_result", "[", "0", "]", "[", "'HOST_SUCCESS'", "]", "==", "1", ")", ")", ":", "return", "True", "time", ".", "sleep", "(", "interval", ")", "if", "(", "not", "quiet", ")", ":", "log", ".", "info", "(", "'Still waiting on Job {0} for Linode {1}.'", ".", "format", "(", "job_id", ",", "linode_id", ")", ")", "else", ":", "log", ".", "debug", "(", "'Still waiting on Job {0} for Linode {1}.'", ".", "format", "(", "job_id", ",", "linode_id", ")", ")", "return", "False" ]
wait for a job to return .
train
false
4,588
def _CheckFacetDiscoveryLimit(facet_limit): if (facet_limit is None): return None else: return _CheckInteger(facet_limit, 'discover_facet_limit', upper_bound=MAXIMUM_FACETS_TO_RETURN)
[ "def", "_CheckFacetDiscoveryLimit", "(", "facet_limit", ")", ":", "if", "(", "facet_limit", "is", "None", ")", ":", "return", "None", "else", ":", "return", "_CheckInteger", "(", "facet_limit", ",", "'discover_facet_limit'", ",", "upper_bound", "=", "MAXIMUM_FACETS_TO_RETURN", ")" ]
checks the facet limit is an integer within range .
train
false
4,589
def createoutputdirs(outputs): for output in list(outputs.values()): dirname = os.path.dirname(output) if (not os.path.isdir(dirname)): os.makedirs(dirname)
[ "def", "createoutputdirs", "(", "outputs", ")", ":", "for", "output", "in", "list", "(", "outputs", ".", "values", "(", ")", ")", ":", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "output", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "dirname", ")", ")", ":", "os", ".", "makedirs", "(", "dirname", ")" ]
create all output directories .
train
false
4,590
def flocker_volume_options(cls): original_parameters = getattr(cls, 'optParameters', []) cls.optParameters = (original_parameters + [['config', None, DEFAULT_CONFIG_PATH.path, 'The path to the Flocker volume configuration file, containing the node ID of the Flocker volume service on this node. This file will be created if it does not already exist.'], ['pool', None, FLOCKER_POOL, 'The ZFS pool to use for volumes.'], ['mountpoint', None, FLOCKER_MOUNTPOINT.path, 'The path where ZFS filesystems will be mounted.']]) original_postOptions = cls.postOptions def postOptions(self): self['config'] = FilePath(self['config']) original_postOptions(self) cls.postOptions = postOptions return cls
[ "def", "flocker_volume_options", "(", "cls", ")", ":", "original_parameters", "=", "getattr", "(", "cls", ",", "'optParameters'", ",", "[", "]", ")", "cls", ".", "optParameters", "=", "(", "original_parameters", "+", "[", "[", "'config'", ",", "None", ",", "DEFAULT_CONFIG_PATH", ".", "path", ",", "'The path to the Flocker volume configuration file, containing the node ID of the Flocker volume service on this node. This file will be created if it does not already exist.'", "]", ",", "[", "'pool'", ",", "None", ",", "FLOCKER_POOL", ",", "'The ZFS pool to use for volumes.'", "]", ",", "[", "'mountpoint'", ",", "None", ",", "FLOCKER_MOUNTPOINT", ".", "path", ",", "'The path where ZFS filesystems will be mounted.'", "]", "]", ")", "original_postOptions", "=", "cls", ".", "postOptions", "def", "postOptions", "(", "self", ")", ":", "self", "[", "'config'", "]", "=", "FilePath", "(", "self", "[", "'config'", "]", ")", "original_postOptions", "(", "self", ")", "cls", ".", "postOptions", "=", "postOptions", "return", "cls" ]
a class decorator to add volumeservice specific command line options to flocker commands .
train
false
4,591
def getAddIndexedLoops(loop, vertexes, zList): indexedLoops = [] for z in zList: indexedLoop = getAddIndexedLoop(loop, vertexes, z) indexedLoops.append(indexedLoop) return indexedLoops
[ "def", "getAddIndexedLoops", "(", "loop", ",", "vertexes", ",", "zList", ")", ":", "indexedLoops", "=", "[", "]", "for", "z", "in", "zList", ":", "indexedLoop", "=", "getAddIndexedLoop", "(", "loop", ",", "vertexes", ",", "z", ")", "indexedLoops", ".", "append", "(", "indexedLoop", ")", "return", "indexedLoops" ]
get and add indexed loops .
train
false
4,592
def _filterargs(source): argsregex = "}\\('(.*)', *(\\d+), *(\\d+), *'(.*)'\\.split\\('\\|'\\), *(\\d+), *(.*)\\)\\)" args = re.search(argsregex, source, re.DOTALL).groups() try: return (args[0], args[3].split('|'), int(args[1]), int(args[2])) except ValueError: raise UnpackingError('Corrupted p.a.c.k.e.r. data.')
[ "def", "_filterargs", "(", "source", ")", ":", "argsregex", "=", "\"}\\\\('(.*)', *(\\\\d+), *(\\\\d+), *'(.*)'\\\\.split\\\\('\\\\|'\\\\), *(\\\\d+), *(.*)\\\\)\\\\)\"", "args", "=", "re", ".", "search", "(", "argsregex", ",", "source", ",", "re", ".", "DOTALL", ")", ".", "groups", "(", ")", "try", ":", "return", "(", "args", "[", "0", "]", ",", "args", "[", "3", "]", ".", "split", "(", "'|'", ")", ",", "int", "(", "args", "[", "1", "]", ")", ",", "int", "(", "args", "[", "2", "]", ")", ")", "except", "ValueError", ":", "raise", "UnpackingError", "(", "'Corrupted p.a.c.k.e.r. data.'", ")" ]
juice from a source file the four args needed by decoder .
train
false
4,593
def is_dyad(frac): if (isinstance(frac, numbers.Integral) and (frac >= 0)): return True elif (isinstance(frac, Fraction) and (frac >= 0) and is_power2(frac.denominator)): return True else: return False
[ "def", "is_dyad", "(", "frac", ")", ":", "if", "(", "isinstance", "(", "frac", ",", "numbers", ".", "Integral", ")", "and", "(", "frac", ">=", "0", ")", ")", ":", "return", "True", "elif", "(", "isinstance", "(", "frac", ",", "Fraction", ")", "and", "(", "frac", ">=", "0", ")", "and", "is_power2", "(", "frac", ".", "denominator", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
test if frac is a nonnegative dyadic fraction or integer .
train
false
4,594
def has_course_started(start_date): return (datetime.now(utc) > start_date)
[ "def", "has_course_started", "(", "start_date", ")", ":", "return", "(", "datetime", ".", "now", "(", "utc", ")", ">", "start_date", ")" ]
given a courses start datetime .
train
false
4,595
def idzp_rid(eps, m, n, matveca): proj = np.empty(((m + 1) + ((2 * n) * (min(m, n) + 1))), dtype=np.complex128, order='F') (k, idx, proj, ier) = _id.idzp_rid(eps, m, n, matveca, proj) if ier: raise _RETCODE_ERROR proj = proj[:(k * (n - k))].reshape((k, (n - k)), order='F') return (k, idx, proj)
[ "def", "idzp_rid", "(", "eps", ",", "m", ",", "n", ",", "matveca", ")", ":", "proj", "=", "np", ".", "empty", "(", "(", "(", "m", "+", "1", ")", "+", "(", "(", "2", "*", "n", ")", "*", "(", "min", "(", "m", ",", "n", ")", "+", "1", ")", ")", ")", ",", "dtype", "=", "np", ".", "complex128", ",", "order", "=", "'F'", ")", "(", "k", ",", "idx", ",", "proj", ",", "ier", ")", "=", "_id", ".", "idzp_rid", "(", "eps", ",", "m", ",", "n", ",", "matveca", ",", "proj", ")", "if", "ier", ":", "raise", "_RETCODE_ERROR", "proj", "=", "proj", "[", ":", "(", "k", "*", "(", "n", "-", "k", ")", ")", "]", ".", "reshape", "(", "(", "k", ",", "(", "n", "-", "k", ")", ")", ",", "order", "=", "'F'", ")", "return", "(", "k", ",", "idx", ",", "proj", ")" ]
compute id of a complex matrix to a specified relative precision using random matrix-vector multiplication .
train
false
4,596
@register.tag def regroup(parser, token): bits = token.split_contents() if (len(bits) != 6): raise TemplateSyntaxError("'regroup' tag takes five arguments") target = parser.compile_filter(bits[1]) if (bits[2] != 'by'): raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'") if (bits[4] != 'as'): raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must be 'as'") var_name = bits[5] expression = parser.compile_filter(((var_name + VARIABLE_ATTRIBUTE_SEPARATOR) + bits[3])) return RegroupNode(target, expression, var_name)
[ "@", "register", ".", "tag", "def", "regroup", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "!=", "6", ")", ":", "raise", "TemplateSyntaxError", "(", "\"'regroup' tag takes five arguments\"", ")", "target", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "if", "(", "bits", "[", "2", "]", "!=", "'by'", ")", ":", "raise", "TemplateSyntaxError", "(", "\"second argument to 'regroup' tag must be 'by'\"", ")", "if", "(", "bits", "[", "4", "]", "!=", "'as'", ")", ":", "raise", "TemplateSyntaxError", "(", "\"next-to-last argument to 'regroup' tag must be 'as'\"", ")", "var_name", "=", "bits", "[", "5", "]", "expression", "=", "parser", ".", "compile_filter", "(", "(", "(", "var_name", "+", "VARIABLE_ATTRIBUTE_SEPARATOR", ")", "+", "bits", "[", "3", "]", ")", ")", "return", "RegroupNode", "(", "target", ",", "expression", ",", "var_name", ")" ]
regroups a list of alike objects by a common attribute .
train
false
4,597
def add_permissions(user, permissions): for permission in permissions: (app_label, __, codename) = permission.partition('.') perm = Permission.objects.get(content_type__app_label=app_label, codename=codename) user.user_permissions.add(perm)
[ "def", "add_permissions", "(", "user", ",", "permissions", ")", ":", "for", "permission", "in", "permissions", ":", "(", "app_label", ",", "__", ",", "codename", ")", "=", "permission", ".", "partition", "(", "'.'", ")", "perm", "=", "Permission", ".", "objects", ".", "get", "(", "content_type__app_label", "=", "app_label", ",", "codename", "=", "codename", ")", "user", ".", "user_permissions", ".", "add", "(", "perm", ")" ]
grant permissions to the passed user .
train
false
4,599
def check_dataset_edition_permission(authorize_get=False): def inner(view_func): def decorate(request, *args, **kwargs): dataset = kwargs.get('dataset') if ((dataset is not None) and (not (authorize_get and (request.method == 'GET')))): Job.objects.can_edit_or_exception(request, dataset.coordinator) return view_func(request, *args, **kwargs) return wraps(view_func)(decorate) return inner
[ "def", "check_dataset_edition_permission", "(", "authorize_get", "=", "False", ")", ":", "def", "inner", "(", "view_func", ")", ":", "def", "decorate", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "dataset", "=", "kwargs", ".", "get", "(", "'dataset'", ")", "if", "(", "(", "dataset", "is", "not", "None", ")", "and", "(", "not", "(", "authorize_get", "and", "(", "request", ".", "method", "==", "'GET'", ")", ")", ")", ")", ":", "Job", ".", "objects", ".", "can_edit_or_exception", "(", "request", ",", "dataset", ".", "coordinator", ")", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wraps", "(", "view_func", ")", "(", "decorate", ")", "return", "inner" ]
decorator ensuring that the user has the permissions to modify a dataset .
train
false
4,600
def HT_TRENDMODE(ds, count): return call_talib_with_ds(ds, count, talib.HT_TRENDMODE)
[ "def", "HT_TRENDMODE", "(", "ds", ",", "count", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "HT_TRENDMODE", ")" ]
hilbert transform - trend vs cycle mode .
train
false
4,601
def clear_compatversion_cache_on_delete(sender, instance, **kw): try: if (not (instance.addon.type == amo.ADDON_EXTENSION)): return except ObjectDoesNotExist: return if (not kw.get('raw')): instance.addon.invalidate_d2c_versions()
[ "def", "clear_compatversion_cache_on_delete", "(", "sender", ",", "instance", ",", "**", "kw", ")", ":", "try", ":", "if", "(", "not", "(", "instance", ".", "addon", ".", "type", "==", "amo", ".", "ADDON_EXTENSION", ")", ")", ":", "return", "except", "ObjectDoesNotExist", ":", "return", "if", "(", "not", "kw", ".", "get", "(", "'raw'", ")", ")", ":", "instance", ".", "addon", ".", "invalidate_d2c_versions", "(", ")" ]
clears compatversion cache when version deleted .
train
false
4,603
def try_deserialize_handler(serialized_handler): if serialized_handler: return pickle.loads(serialized_handler)
[ "def", "try_deserialize_handler", "(", "serialized_handler", ")", ":", "if", "serialized_handler", ":", "return", "pickle", ".", "loads", "(", "serialized_handler", ")" ]
reverse function of try_serialize_handler .
train
false
4,604
def AssignScriptMaps(script_maps, target, update='replace'): script_map_func = ('_AssignScriptMaps' + update.capitalize()) try: script_map_func = eval(script_map_func) except NameError: msg = ("Unknown ScriptMapUpdate option '%s'" % update) raise ConfigurationError(msg) script_maps = [str(s) for s in script_maps] script_map_func(target, script_maps) target.SetInfo()
[ "def", "AssignScriptMaps", "(", "script_maps", ",", "target", ",", "update", "=", "'replace'", ")", ":", "script_map_func", "=", "(", "'_AssignScriptMaps'", "+", "update", ".", "capitalize", "(", ")", ")", "try", ":", "script_map_func", "=", "eval", "(", "script_map_func", ")", "except", "NameError", ":", "msg", "=", "(", "\"Unknown ScriptMapUpdate option '%s'\"", "%", "update", ")", "raise", "ConfigurationError", "(", "msg", ")", "script_maps", "=", "[", "str", "(", "s", ")", "for", "s", "in", "script_maps", "]", "script_map_func", "(", "target", ",", "script_maps", ")", "target", ".", "SetInfo", "(", ")" ]
updates iis with the supplied script map information .
train
false
4,605
def wait_set(name, value, profile=None): return {'name': name, 'changes': {}, 'result': True, 'comment': ''}
[ "def", "wait_set", "(", "name", ",", "value", ",", "profile", "=", "None", ")", ":", "return", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", "}" ]
set a key in etcd only if the watch statement calls it .
train
false
4,606
def _validate_core_properties(properties): props_string = '' for (prop_name, prop_value) in six.iteritems(properties): if (prop_name in STRING_PROPS_LIST): if (not isinstance(prop_value, six.string_types)): raise ValueError((('In option "properties", core property "' + prop_name) + '" value must be a string')) props_string = ((((props_string + '&') + prop_name) + '=') + prop_value) elif (prop_name in BOOL_PROPS_LIST): if (not isinstance(prop_value, bool)): raise ValueError((('Option "' + prop_name) + '" value must be an boolean')) props_string = ((((props_string + '&property.') + prop_name) + '=') + ('true' if prop_value else 'false')) return props_string
[ "def", "_validate_core_properties", "(", "properties", ")", ":", "props_string", "=", "''", "for", "(", "prop_name", ",", "prop_value", ")", "in", "six", ".", "iteritems", "(", "properties", ")", ":", "if", "(", "prop_name", "in", "STRING_PROPS_LIST", ")", ":", "if", "(", "not", "isinstance", "(", "prop_value", ",", "six", ".", "string_types", ")", ")", ":", "raise", "ValueError", "(", "(", "(", "'In option \"properties\", core property \"'", "+", "prop_name", ")", "+", "'\" value must be a string'", ")", ")", "props_string", "=", "(", "(", "(", "(", "props_string", "+", "'&'", ")", "+", "prop_name", ")", "+", "'='", ")", "+", "prop_value", ")", "elif", "(", "prop_name", "in", "BOOL_PROPS_LIST", ")", ":", "if", "(", "not", "isinstance", "(", "prop_value", ",", "bool", ")", ")", ":", "raise", "ValueError", "(", "(", "(", "'Option \"'", "+", "prop_name", ")", "+", "'\" value must be an boolean'", ")", ")", "props_string", "=", "(", "(", "(", "(", "props_string", "+", "'&property.'", ")", "+", "prop_name", ")", "+", "'='", ")", "+", "(", "'true'", "if", "prop_value", "else", "'false'", ")", ")", "return", "props_string" ]
internal function to validate core properties .
train
true
4,608
def _is_author(cc_content, context): return (context['cc_requester']['id'] == cc_content['user_id'])
[ "def", "_is_author", "(", "cc_content", ",", "context", ")", ":", "return", "(", "context", "[", "'cc_requester'", "]", "[", "'id'", "]", "==", "cc_content", "[", "'user_id'", "]", ")" ]
return true if the requester authored the given content .
train
false
4,609
def location_contact(): return s3_rest_controller(hide_filter=False)
[ "def", "location_contact", "(", ")", ":", "return", "s3_rest_controller", "(", "hide_filter", "=", "False", ")" ]
restful crud controller for community contacts .
train
false
4,610
def local_diff_branch(): remote = os.getenv(LOCAL_REMOTE_ENV) branch = os.getenv(LOCAL_BRANCH_ENV) if ((remote is not None) and (branch is not None)): return ('%s/%s' % (remote, branch))
[ "def", "local_diff_branch", "(", ")", ":", "remote", "=", "os", ".", "getenv", "(", "LOCAL_REMOTE_ENV", ")", "branch", "=", "os", ".", "getenv", "(", "LOCAL_BRANCH_ENV", ")", "if", "(", "(", "remote", "is", "not", "None", ")", "and", "(", "branch", "is", "not", "None", ")", ")", ":", "return", "(", "'%s/%s'", "%", "(", "remote", ",", "branch", ")", ")" ]
get a remote branch to diff against in a local checkout .
train
false
4,611
def make_fasta(rec): gi = rec.annotations.get('gi', '') org = rec.annotations.get('organism', '') date = rec.annotations.get('date', '') head = ('>gi:%s, id:%s, org:%s, date:%s\n' % (gi, rec.id, org, date)) body = '\n'.join(textwrap.wrap(rec.seq.data, width=80)) return (head, body)
[ "def", "make_fasta", "(", "rec", ")", ":", "gi", "=", "rec", ".", "annotations", ".", "get", "(", "'gi'", ",", "''", ")", "org", "=", "rec", ".", "annotations", ".", "get", "(", "'organism'", ",", "''", ")", "date", "=", "rec", ".", "annotations", ".", "get", "(", "'date'", ",", "''", ")", "head", "=", "(", "'>gi:%s, id:%s, org:%s, date:%s\\n'", "%", "(", "gi", ",", "rec", ".", "id", ",", "org", ",", "date", ")", ")", "body", "=", "'\\n'", ".", "join", "(", "textwrap", ".", "wrap", "(", "rec", ".", "seq", ".", "data", ",", "width", "=", "80", ")", ")", "return", "(", "head", ",", "body", ")" ]
creates fasta format from a record .
train
false
4,612
def find_css_class_with_wait(context, css_class, **kwargs): return _find_elem_with_wait(context, (By.CLASS_NAME, css_class), **kwargs)
[ "def", "find_css_class_with_wait", "(", "context", ",", "css_class", ",", "**", "kwargs", ")", ":", "return", "_find_elem_with_wait", "(", "context", ",", "(", "By", ".", "CLASS_NAME", ",", "css_class", ")", ",", "**", "kwargs", ")" ]
tries to find an element with given css class with an explicit timeout .
train
false
4,613
def degrees(radians=0, arcminutes=0, arcseconds=0): deg = 0.0 if radians: deg = math.degrees(radians) if arcminutes: deg += (arcminutes / arcmin(degrees=1.0)) if arcseconds: deg += (arcseconds / arcsec(degrees=1.0)) return deg
[ "def", "degrees", "(", "radians", "=", "0", ",", "arcminutes", "=", "0", ",", "arcseconds", "=", "0", ")", ":", "deg", "=", "0.0", "if", "radians", ":", "deg", "=", "math", ".", "degrees", "(", "radians", ")", "if", "arcminutes", ":", "deg", "+=", "(", "arcminutes", "/", "arcmin", "(", "degrees", "=", "1.0", ")", ")", "if", "arcseconds", ":", "deg", "+=", "(", "arcseconds", "/", "arcsec", "(", "degrees", "=", "1.0", ")", ")", "return", "deg" ]
return the degrees of the two node sets in the bipartite graph b .
train
true
4,615
def poweroff(): return shutdown()
[ "def", "poweroff", "(", ")", ":", "return", "shutdown", "(", ")" ]
power server off cli example: .
train
false
4,616
def package_relationship_delete(context, data_dict): model = context['model'] user = context['user'] (id, id2, rel) = _get_or_bust(data_dict, ['subject', 'object', 'type']) pkg1 = model.Package.get(id) pkg2 = model.Package.get(id2) if (not pkg1): raise NotFound(('Subject package %r was not found.' % id)) if (not pkg2): return NotFound(('Object package %r was not found.' % id2)) existing_rels = pkg1.get_relationships_with(pkg2, rel) if (not existing_rels): raise NotFound relationship = existing_rels[0] revisioned_details = ('Package Relationship: %s %s %s' % (id, rel, id2)) context['relationship'] = relationship _check_access('package_relationship_delete', context, data_dict) rev = model.repo.new_revision() rev.author = user rev.message = (_(u'REST API: Delete %s') % revisioned_details) relationship.delete() model.repo.commit()
[ "def", "package_relationship_delete", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "user", "=", "context", "[", "'user'", "]", "(", "id", ",", "id2", ",", "rel", ")", "=", "_get_or_bust", "(", "data_dict", ",", "[", "'subject'", ",", "'object'", ",", "'type'", "]", ")", "pkg1", "=", "model", ".", "Package", ".", "get", "(", "id", ")", "pkg2", "=", "model", ".", "Package", ".", "get", "(", "id2", ")", "if", "(", "not", "pkg1", ")", ":", "raise", "NotFound", "(", "(", "'Subject package %r was not found.'", "%", "id", ")", ")", "if", "(", "not", "pkg2", ")", ":", "return", "NotFound", "(", "(", "'Object package %r was not found.'", "%", "id2", ")", ")", "existing_rels", "=", "pkg1", ".", "get_relationships_with", "(", "pkg2", ",", "rel", ")", "if", "(", "not", "existing_rels", ")", ":", "raise", "NotFound", "relationship", "=", "existing_rels", "[", "0", "]", "revisioned_details", "=", "(", "'Package Relationship: %s %s %s'", "%", "(", "id", ",", "rel", ",", "id2", ")", ")", "context", "[", "'relationship'", "]", "=", "relationship", "_check_access", "(", "'package_relationship_delete'", ",", "context", ",", "data_dict", ")", "rev", "=", "model", ".", "repo", ".", "new_revision", "(", ")", "rev", ".", "author", "=", "user", "rev", ".", "message", "=", "(", "_", "(", "u'REST API: Delete %s'", ")", "%", "revisioned_details", ")", "relationship", ".", "delete", "(", ")", "model", ".", "repo", ".", "commit", "(", ")" ]
delete a dataset relationship .
train
false
4,617
def logout_user(): user = _get_user() if ('user_id' in session): session.pop('user_id') if ('_fresh' in session): session.pop('_fresh') cookie_name = current_app.config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME) if (cookie_name in request.cookies): session['remember'] = 'clear' user_logged_out.send(current_app._get_current_object(), user=user) current_app.login_manager.reload_user() return True
[ "def", "logout_user", "(", ")", ":", "user", "=", "_get_user", "(", ")", "if", "(", "'user_id'", "in", "session", ")", ":", "session", ".", "pop", "(", "'user_id'", ")", "if", "(", "'_fresh'", "in", "session", ")", ":", "session", ".", "pop", "(", "'_fresh'", ")", "cookie_name", "=", "current_app", ".", "config", ".", "get", "(", "'REMEMBER_COOKIE_NAME'", ",", "COOKIE_NAME", ")", "if", "(", "cookie_name", "in", "request", ".", "cookies", ")", ":", "session", "[", "'remember'", "]", "=", "'clear'", "user_logged_out", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "user", "=", "user", ")", "current_app", ".", "login_manager", ".", "reload_user", "(", ")", "return", "True" ]
logs out the current .
train
true
4,618
def doctest(*paths, **kwargs): subprocess = kwargs.pop('subprocess', True) rerun = kwargs.pop('rerun', 0) print_counter = (lambda i: (print(('rerun %d' % (rerun - i))) if (rerun - i) else None)) if subprocess: for i in range(rerun, (-1), (-1)): print_counter(i) ret = run_in_subprocess_with_hash_randomization('_doctest', function_args=paths, function_kwargs=kwargs) if (ret is False): break val = (not bool(ret)) if ((not val) or (i == 0)): return val for i in range(rerun, (-1), (-1)): print_counter(i) val = (not bool(_doctest(*paths, **kwargs))) if ((not val) or (i == 0)): return val
[ "def", "doctest", "(", "*", "paths", ",", "**", "kwargs", ")", ":", "subprocess", "=", "kwargs", ".", "pop", "(", "'subprocess'", ",", "True", ")", "rerun", "=", "kwargs", ".", "pop", "(", "'rerun'", ",", "0", ")", "print_counter", "=", "(", "lambda", "i", ":", "(", "print", "(", "(", "'rerun %d'", "%", "(", "rerun", "-", "i", ")", ")", ")", "if", "(", "rerun", "-", "i", ")", "else", "None", ")", ")", "if", "subprocess", ":", "for", "i", "in", "range", "(", "rerun", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "print_counter", "(", "i", ")", "ret", "=", "run_in_subprocess_with_hash_randomization", "(", "'_doctest'", ",", "function_args", "=", "paths", ",", "function_kwargs", "=", "kwargs", ")", "if", "(", "ret", "is", "False", ")", ":", "break", "val", "=", "(", "not", "bool", "(", "ret", ")", ")", "if", "(", "(", "not", "val", ")", "or", "(", "i", "==", "0", ")", ")", ":", "return", "val", "for", "i", "in", "range", "(", "rerun", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "print_counter", "(", "i", ")", "val", "=", "(", "not", "bool", "(", "_doctest", "(", "*", "paths", ",", "**", "kwargs", ")", ")", ")", "if", "(", "(", "not", "val", ")", "or", "(", "i", "==", "0", ")", ")", ":", "return", "val" ]
execute sphinx doctest target .
train
false
4,619
def __build_word_numeral(*args): re_ = None for word_list in args: for word in word_list: if (not re_): re_ = '(?:(?=\\w+)' else: re_ += '|' re_ += word re_ += ')' return re_
[ "def", "__build_word_numeral", "(", "*", "args", ")", ":", "re_", "=", "None", "for", "word_list", "in", "args", ":", "for", "word", "in", "word_list", ":", "if", "(", "not", "re_", ")", ":", "re_", "=", "'(?:(?=\\\\w+)'", "else", ":", "re_", "+=", "'|'", "re_", "+=", "word", "re_", "+=", "')'", "return", "re_" ]
build word numeral regexp from list .
train
false
4,620
def reshape_axes(axes, shape, newshape): if (len(axes) != len(shape)): raise ValueError('axes do not match shape') if (product(shape) != product(newshape)): raise ValueError(('can not reshape %s to %s' % (shape, newshape))) if ((not axes) or (not newshape)): return '' lendiff = max(0, (len(shape) - len(newshape))) if lendiff: newshape = (newshape + ((1,) * lendiff)) i = (len(shape) - 1) prodns = 1 prods = 1 result = [] for ns in newshape[::(-1)]: prodns *= ns while ((i > 0) and (shape[i] == 1) and (ns != 1)): i -= 1 if ((ns == shape[i]) and (prodns == (prods * shape[i]))): prods *= shape[i] result.append(axes[i]) i -= 1 else: result.append('Q') return ''.join(reversed(result[lendiff:]))
[ "def", "reshape_axes", "(", "axes", ",", "shape", ",", "newshape", ")", ":", "if", "(", "len", "(", "axes", ")", "!=", "len", "(", "shape", ")", ")", ":", "raise", "ValueError", "(", "'axes do not match shape'", ")", "if", "(", "product", "(", "shape", ")", "!=", "product", "(", "newshape", ")", ")", ":", "raise", "ValueError", "(", "(", "'can not reshape %s to %s'", "%", "(", "shape", ",", "newshape", ")", ")", ")", "if", "(", "(", "not", "axes", ")", "or", "(", "not", "newshape", ")", ")", ":", "return", "''", "lendiff", "=", "max", "(", "0", ",", "(", "len", "(", "shape", ")", "-", "len", "(", "newshape", ")", ")", ")", "if", "lendiff", ":", "newshape", "=", "(", "newshape", "+", "(", "(", "1", ",", ")", "*", "lendiff", ")", ")", "i", "=", "(", "len", "(", "shape", ")", "-", "1", ")", "prodns", "=", "1", "prods", "=", "1", "result", "=", "[", "]", "for", "ns", "in", "newshape", "[", ":", ":", "(", "-", "1", ")", "]", ":", "prodns", "*=", "ns", "while", "(", "(", "i", ">", "0", ")", "and", "(", "shape", "[", "i", "]", "==", "1", ")", "and", "(", "ns", "!=", "1", ")", ")", ":", "i", "-=", "1", "if", "(", "(", "ns", "==", "shape", "[", "i", "]", ")", "and", "(", "prodns", "==", "(", "prods", "*", "shape", "[", "i", "]", ")", ")", ")", ":", "prods", "*=", "shape", "[", "i", "]", "result", ".", "append", "(", "axes", "[", "i", "]", ")", "i", "-=", "1", "else", ":", "result", ".", "append", "(", "'Q'", ")", "return", "''", ".", "join", "(", "reversed", "(", "result", "[", "lendiff", ":", "]", ")", ")" ]
return axes matching new shape .
train
true
4,621
def parse_header(line): plist = _parse_header_params((';' + line)) key = plist.pop(0).lower().decode(u'ascii') pdict = {} for p in plist: i = p.find('=') if (i >= 0): name = p[:i].strip().lower().decode(u'ascii') value = p[(i + 1):].strip() if ((len(value) >= 2) and (value[:1] == value[(-1):] == '"')): value = value[1:(-1)] value = value.replace('\\\\', '\\').replace('\\"', '"') pdict[name] = value return (key, pdict)
[ "def", "parse_header", "(", "line", ")", ":", "plist", "=", "_parse_header_params", "(", "(", "';'", "+", "line", ")", ")", "key", "=", "plist", ".", "pop", "(", "0", ")", ".", "lower", "(", ")", ".", "decode", "(", "u'ascii'", ")", "pdict", "=", "{", "}", "for", "p", "in", "plist", ":", "i", "=", "p", ".", "find", "(", "'='", ")", "if", "(", "i", ">=", "0", ")", ":", "name", "=", "p", "[", ":", "i", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", ".", "decode", "(", "u'ascii'", ")", "value", "=", "p", "[", "(", "i", "+", "1", ")", ":", "]", ".", "strip", "(", ")", "if", "(", "(", "len", "(", "value", ")", ">=", "2", ")", "and", "(", "value", "[", ":", "1", "]", "==", "value", "[", "(", "-", "1", ")", ":", "]", "==", "'\"'", ")", ")", ":", "value", "=", "value", "[", "1", ":", "(", "-", "1", ")", "]", "value", "=", "value", ".", "replace", "(", "'\\\\\\\\'", ",", "'\\\\'", ")", ".", "replace", "(", "'\\\\\"'", ",", "'\"'", ")", "pdict", "[", "name", "]", "=", "value", "return", "(", "key", ",", "pdict", ")" ]
parse a content-type like header .
train
false
4,622
def set_log_format(log_format, server=_DEFAULT_SERVER): setting = 'LogPluginClsid' log_format_types = get_log_format_types() format_id = log_format_types.get(log_format, None) if (not format_id): message = "Invalid log format '{0}' specified. Valid formats: {1}".format(log_format, log_format_types.keys()) raise SaltInvocationError(message) _LOG.debug("Id for '%s' found: %s", log_format, format_id) current_log_format = get_log_format(server) if (log_format == current_log_format): _LOG.debug('%s already contains the provided format.', setting) return True _set_wmi_setting('IIsSmtpServerSetting', setting, format_id, server) new_log_format = get_log_format(server) ret = (log_format == new_log_format) if ret: _LOG.debug('Setting %s configured successfully: %s', setting, log_format) else: _LOG.error('Unable to configure %s with value: %s', setting, log_format) return ret
[ "def", "set_log_format", "(", "log_format", ",", "server", "=", "_DEFAULT_SERVER", ")", ":", "setting", "=", "'LogPluginClsid'", "log_format_types", "=", "get_log_format_types", "(", ")", "format_id", "=", "log_format_types", ".", "get", "(", "log_format", ",", "None", ")", "if", "(", "not", "format_id", ")", ":", "message", "=", "\"Invalid log format '{0}' specified. Valid formats: {1}\"", ".", "format", "(", "log_format", ",", "log_format_types", ".", "keys", "(", ")", ")", "raise", "SaltInvocationError", "(", "message", ")", "_LOG", ".", "debug", "(", "\"Id for '%s' found: %s\"", ",", "log_format", ",", "format_id", ")", "current_log_format", "=", "get_log_format", "(", "server", ")", "if", "(", "log_format", "==", "current_log_format", ")", ":", "_LOG", ".", "debug", "(", "'%s already contains the provided format.'", ",", "setting", ")", "return", "True", "_set_wmi_setting", "(", "'IIsSmtpServerSetting'", ",", "setting", ",", "format_id", ",", "server", ")", "new_log_format", "=", "get_log_format", "(", "server", ")", "ret", "=", "(", "log_format", "==", "new_log_format", ")", "if", "ret", ":", "_LOG", ".", "debug", "(", "'Setting %s configured successfully: %s'", ",", "setting", ",", "log_format", ")", "else", ":", "_LOG", ".", "error", "(", "'Unable to configure %s with value: %s'", ",", "setting", ",", "log_format", ")", "return", "ret" ]
set the active log format for the smtp virtual server .
train
true
4,624
def _fit_binary(estimator, X, y, classes=None): unique_y = np.unique(y) if (len(unique_y) == 1): if (classes is not None): if (y[0] == (-1)): c = 0 else: c = y[0] warnings.warn(('Label %s is present in all training examples.' % str(classes[c]))) estimator = _ConstantPredictor().fit(X, unique_y) else: estimator = clone(estimator) estimator.fit(X, y) return estimator
[ "def", "_fit_binary", "(", "estimator", ",", "X", ",", "y", ",", "classes", "=", "None", ")", ":", "unique_y", "=", "np", ".", "unique", "(", "y", ")", "if", "(", "len", "(", "unique_y", ")", "==", "1", ")", ":", "if", "(", "classes", "is", "not", "None", ")", ":", "if", "(", "y", "[", "0", "]", "==", "(", "-", "1", ")", ")", ":", "c", "=", "0", "else", ":", "c", "=", "y", "[", "0", "]", "warnings", ".", "warn", "(", "(", "'Label %s is present in all training examples.'", "%", "str", "(", "classes", "[", "c", "]", ")", ")", ")", "estimator", "=", "_ConstantPredictor", "(", ")", ".", "fit", "(", "X", ",", "unique_y", ")", "else", ":", "estimator", "=", "clone", "(", "estimator", ")", "estimator", ".", "fit", "(", "X", ",", "y", ")", "return", "estimator" ]
fit a single binary estimator .
train
false
4,625
def _enable_privilege(privilege_name): return _change_privilege_state(privilege_name, True)
[ "def", "_enable_privilege", "(", "privilege_name", ")", ":", "return", "_change_privilege_state", "(", "privilege_name", ",", "True", ")" ]
enables the named privilege for this process .
train
false
4,627
def _launch(appfile): _finder.open(_application_file(('ID ', appfile)))
[ "def", "_launch", "(", "appfile", ")", ":", "_finder", ".", "open", "(", "_application_file", "(", "(", "'ID '", ",", "appfile", ")", ")", ")" ]
open a file thru the finder .
train
false
4,628
def test_transform_data(): (n_sensors, n_vertices, n_times) = (10, 20, 4) kernel = rng.randn(n_vertices, n_sensors) sens_data = rng.randn(n_sensors, n_times) vertices = np.arange(n_vertices) data = np.dot(kernel, sens_data) for (idx, tmin_idx, tmax_idx) in zip([None, np.arange((n_vertices // 2), n_vertices)], [None, 1], [None, 3]): if (idx is None): idx_use = slice(None, None) else: idx_use = idx (data_f, _) = _my_trans(data[idx_use, tmin_idx:tmax_idx]) for stc_data in (data, (kernel, sens_data)): stc = VolSourceEstimate(stc_data, vertices=vertices, tmin=0.0, tstep=1.0) stc_data_t = stc.transform_data(_my_trans, idx=idx, tmin_idx=tmin_idx, tmax_idx=tmax_idx) assert_allclose(data_f, stc_data_t)
[ "def", "test_transform_data", "(", ")", ":", "(", "n_sensors", ",", "n_vertices", ",", "n_times", ")", "=", "(", "10", ",", "20", ",", "4", ")", "kernel", "=", "rng", ".", "randn", "(", "n_vertices", ",", "n_sensors", ")", "sens_data", "=", "rng", ".", "randn", "(", "n_sensors", ",", "n_times", ")", "vertices", "=", "np", ".", "arange", "(", "n_vertices", ")", "data", "=", "np", ".", "dot", "(", "kernel", ",", "sens_data", ")", "for", "(", "idx", ",", "tmin_idx", ",", "tmax_idx", ")", "in", "zip", "(", "[", "None", ",", "np", ".", "arange", "(", "(", "n_vertices", "//", "2", ")", ",", "n_vertices", ")", "]", ",", "[", "None", ",", "1", "]", ",", "[", "None", ",", "3", "]", ")", ":", "if", "(", "idx", "is", "None", ")", ":", "idx_use", "=", "slice", "(", "None", ",", "None", ")", "else", ":", "idx_use", "=", "idx", "(", "data_f", ",", "_", ")", "=", "_my_trans", "(", "data", "[", "idx_use", ",", "tmin_idx", ":", "tmax_idx", "]", ")", "for", "stc_data", "in", "(", "data", ",", "(", "kernel", ",", "sens_data", ")", ")", ":", "stc", "=", "VolSourceEstimate", "(", "stc_data", ",", "vertices", "=", "vertices", ",", "tmin", "=", "0.0", ",", "tstep", "=", "1.0", ")", "stc_data_t", "=", "stc", ".", "transform_data", "(", "_my_trans", ",", "idx", "=", "idx", ",", "tmin_idx", "=", "tmin_idx", ",", "tmax_idx", "=", "tmax_idx", ")", "assert_allclose", "(", "data_f", ",", "stc_data_t", ")" ]
test applying linear transform to data .
train
false
4,629
def _window_too_small(): return Window(TokenListControl.static([(Token.WindowTooSmall, u' Window too small... ')]))
[ "def", "_window_too_small", "(", ")", ":", "return", "Window", "(", "TokenListControl", ".", "static", "(", "[", "(", "Token", ".", "WindowTooSmall", ",", "u' Window too small... '", ")", "]", ")", ")" ]
create a window that displays the window too small text .
train
false
4,631
def event_source_mapping_absent(name, EventSourceArn, FunctionName, region=None, key=None, keyid=None, profile=None): ret = {'name': None, 'result': True, 'comment': '', 'changes': {}} desc = __salt__['boto_lambda.describe_event_source_mapping'](EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region, key=key, keyid=keyid, profile=profile) if ('error' in desc): ret['result'] = False ret['comment'] = 'Failed to delete event source mapping: {0}.'.format(desc['error']['message']) return ret if (not desc.get('event_source_mapping')): ret['comment'] = 'Event source mapping does not exist.' return ret ret['name'] = desc['event_source_mapping']['UUID'] if __opts__['test']: ret['comment'] = 'Event source mapping is set to be removed.' ret['result'] = None return ret r = __salt__['boto_lambda.delete_event_source_mapping'](EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region, key=key, keyid=keyid, profile=profile) if (not r['deleted']): ret['result'] = False ret['comment'] = 'Failed to delete event source mapping: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = desc ret['changes']['new'] = {'event_source_mapping': None} ret['comment'] = 'Event source mapping deleted.' return ret
[ "def", "event_source_mapping_absent", "(", "name", ",", "EventSourceArn", ",", "FunctionName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "None", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "desc", "=", "__salt__", "[", "'boto_lambda.describe_event_source_mapping'", "]", "(", "EventSourceArn", "=", "EventSourceArn", ",", "FunctionName", "=", "FunctionName", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "'error'", "in", "desc", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to delete event source mapping: {0}.'", ".", "format", "(", "desc", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "ret", "if", "(", "not", "desc", ".", "get", "(", "'event_source_mapping'", ")", ")", ":", "ret", "[", "'comment'", "]", "=", "'Event source mapping does not exist.'", "return", "ret", "ret", "[", "'name'", "]", "=", "desc", "[", "'event_source_mapping'", "]", "[", "'UUID'", "]", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Event source mapping is set to be removed.'", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "r", "=", "__salt__", "[", "'boto_lambda.delete_event_source_mapping'", "]", "(", "EventSourceArn", "=", "EventSourceArn", ",", "FunctionName", "=", "FunctionName", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "r", "[", "'deleted'", "]", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to delete event source mapping: {0}.'", ".", "format", "(", "r", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "ret", "ret", "[", "'changes'", "]", "[", "'old'", "]", "=", "desc", "ret", "[", "'changes'", "]", "[", "'new'", "]", "=", "{", "'event_source_mapping'", ":", "None", "}", "ret", "[", "'comment'", "]", "=", "'Event source mapping deleted.'", "return", "ret" ]
ensure event source mapping with passed properties is absent .
train
true
4,632
def RetryNoBackoff(callable_func, retry_notify_func, delay=5, max_tries=200): return RetryWithBackoff(callable_func, retry_notify_func, delay, 1, delay, max_tries)
[ "def", "RetryNoBackoff", "(", "callable_func", ",", "retry_notify_func", ",", "delay", "=", "5", ",", "max_tries", "=", "200", ")", ":", "return", "RetryWithBackoff", "(", "callable_func", ",", "retry_notify_func", ",", "delay", ",", "1", ",", "delay", ",", "max_tries", ")" ]
calls a function multiple times .
train
false
4,634
def version_handler(min_ver, max_ver=None): def decorator(f): min_version_float = float(min_ver) if max_ver: max_version_float = float(max_ver) else: max_version_float = float(max_version_string()) qualified_name = _fully_qualified_name(f) VERSIONED_METHODS[qualified_name].append((min_version_float, max_version_float, f)) def decorated_func(req, *args, **kwargs): version_float = float(req.environ[MICROVERSION_ENVIRON]) return _find_method(f, version_float)(req, *args, **kwargs) VERSIONED_METHODS[qualified_name].sort(key=(lambda x: x[0]), reverse=True) return decorated_func return decorator
[ "def", "version_handler", "(", "min_ver", ",", "max_ver", "=", "None", ")", ":", "def", "decorator", "(", "f", ")", ":", "min_version_float", "=", "float", "(", "min_ver", ")", "if", "max_ver", ":", "max_version_float", "=", "float", "(", "max_ver", ")", "else", ":", "max_version_float", "=", "float", "(", "max_version_string", "(", ")", ")", "qualified_name", "=", "_fully_qualified_name", "(", "f", ")", "VERSIONED_METHODS", "[", "qualified_name", "]", ".", "append", "(", "(", "min_version_float", ",", "max_version_float", ",", "f", ")", ")", "def", "decorated_func", "(", "req", ",", "*", "args", ",", "**", "kwargs", ")", ":", "version_float", "=", "float", "(", "req", ".", "environ", "[", "MICROVERSION_ENVIRON", "]", ")", "return", "_find_method", "(", "f", ",", "version_float", ")", "(", "req", ",", "*", "args", ",", "**", "kwargs", ")", "VERSIONED_METHODS", "[", "qualified_name", "]", ".", "sort", "(", "key", "=", "(", "lambda", "x", ":", "x", "[", "0", "]", ")", ",", "reverse", "=", "True", ")", "return", "decorated_func", "return", "decorator" ]
decorator for versioning api methods .
train
false
4,636
def get_deprecated_login_lock_out_by_combination_browser_user_agent(): return AUTH.LOGIN_LOCK_OUT_BY_COMBINATION_BROWSER_USER_AGENT_AND_IP.get()
[ "def", "get_deprecated_login_lock_out_by_combination_browser_user_agent", "(", ")", ":", "return", "AUTH", ".", "LOGIN_LOCK_OUT_BY_COMBINATION_BROWSER_USER_AGENT_AND_IP", ".", "get", "(", ")" ]
return value of deprecated login_lock_out_by_combination_browser_user_agent_and_ip config .
train
false
4,637
def cors_tool(): req_head = cherrypy.request.headers resp_head = cherrypy.response.headers resp_head['Access-Control-Allow-Origin'] = req_head.get('Origin', '*') resp_head['Access-Control-Expose-Headers'] = 'GET, POST' resp_head['Access-Control-Allow-Credentials'] = 'true' if (cherrypy.request.method == 'OPTIONS'): cherrypy.serving.request.handler = cors_handler
[ "def", "cors_tool", "(", ")", ":", "req_head", "=", "cherrypy", ".", "request", ".", "headers", "resp_head", "=", "cherrypy", ".", "response", ".", "headers", "resp_head", "[", "'Access-Control-Allow-Origin'", "]", "=", "req_head", ".", "get", "(", "'Origin'", ",", "'*'", ")", "resp_head", "[", "'Access-Control-Expose-Headers'", "]", "=", "'GET, POST'", "resp_head", "[", "'Access-Control-Allow-Credentials'", "]", "=", "'true'", "if", "(", "cherrypy", ".", "request", ".", "method", "==", "'OPTIONS'", ")", ":", "cherrypy", ".", "serving", ".", "request", ".", "handler", "=", "cors_handler" ]
handle both simple and complex cors requests add cors headers to each response .
train
false
4,638
def test_ast_valid_let(): can_compile(u'(let [a b])') can_compile(u'(let [a 1])') can_compile(u'(let [a 1 b None])')
[ "def", "test_ast_valid_let", "(", ")", ":", "can_compile", "(", "u'(let [a b])'", ")", "can_compile", "(", "u'(let [a 1])'", ")", "can_compile", "(", "u'(let [a 1 b None])'", ")" ]
make sure ast can compile valid let .
train
false
4,639
def test_hsl_to_rgb_part_9(): assert (hsl_to_rgb(120, 20, 50) == (102, 153, 102)) assert (hsl_to_rgb(120, 60, 50) == (51, 204, 51)) assert (hsl_to_rgb(120, 100, 50) == (0, 255, 0))
[ "def", "test_hsl_to_rgb_part_9", "(", ")", ":", "assert", "(", "hsl_to_rgb", "(", "120", ",", "20", ",", "50", ")", "==", "(", "102", ",", "153", ",", "102", ")", ")", "assert", "(", "hsl_to_rgb", "(", "120", ",", "60", ",", "50", ")", "==", "(", "51", ",", "204", ",", "51", ")", ")", "assert", "(", "hsl_to_rgb", "(", "120", ",", "100", ",", "50", ")", "==", "(", "0", ",", "255", ",", "0", ")", ")" ]
test hsl to rgb color function .
train
false
4,641
def _CheckOffset(offset): return _CheckInteger(offset, 'offset', zero_ok=True, upper_bound=MAXIMUM_SEARCH_OFFSET)
[ "def", "_CheckOffset", "(", "offset", ")", ":", "return", "_CheckInteger", "(", "offset", ",", "'offset'", ",", "zero_ok", "=", "True", ",", "upper_bound", "=", "MAXIMUM_SEARCH_OFFSET", ")" ]
checks the offset in document list is an integer within range .
train
false
4,642
def env_func(f, argtypes): f.argtypes = argtypes f.restype = None f.errcheck = check_envelope return f
[ "def", "env_func", "(", "f", ",", "argtypes", ")", ":", "f", ".", "argtypes", "=", "argtypes", "f", ".", "restype", "=", "None", "f", ".", "errcheck", "=", "check_envelope", "return", "f" ]
for getting ogrenvelopes .
train
false
4,644
def delete_cached_file(filename): global _FILE_CACHE if (filename in _FILE_CACHE): del _FILE_CACHE[filename]
[ "def", "delete_cached_file", "(", "filename", ")", ":", "global", "_FILE_CACHE", "if", "(", "filename", "in", "_FILE_CACHE", ")", ":", "del", "_FILE_CACHE", "[", "filename", "]" ]
delete cached file if present .
train
false
4,645
def test_hsl_to_rgb_part_7(): assert (hsl_to_rgb(0, 20, 50) == (153, 102, 102)) assert (hsl_to_rgb(0, 60, 50) == (204, 51, 51)) assert (hsl_to_rgb(0, 100, 50) == (255, 0, 0))
[ "def", "test_hsl_to_rgb_part_7", "(", ")", ":", "assert", "(", "hsl_to_rgb", "(", "0", ",", "20", ",", "50", ")", "==", "(", "153", ",", "102", ",", "102", ")", ")", "assert", "(", "hsl_to_rgb", "(", "0", ",", "60", ",", "50", ")", "==", "(", "204", ",", "51", ",", "51", ")", ")", "assert", "(", "hsl_to_rgb", "(", "0", ",", "100", ",", "50", ")", "==", "(", "255", ",", "0", ",", "0", ")", ")" ]
test hsl to rgb color function .
train
false
4,646
def render_pep440(pieces): if pieces['closest-tag']: rendered = pieces['closest-tag'] if (pieces['distance'] or pieces['dirty']): rendered += plus_or_dot(pieces) rendered += ('%d.g%s' % (pieces['distance'], pieces['short'])) if pieces['dirty']: rendered += '.dirty' else: rendered = ('0+untagged.%d.g%s' % (pieces['distance'], pieces['short'])) if pieces['dirty']: rendered += '.dirty' return rendered
[ "def", "render_pep440", "(", "pieces", ")", ":", "if", "pieces", "[", "'closest-tag'", "]", ":", "rendered", "=", "pieces", "[", "'closest-tag'", "]", "if", "(", "pieces", "[", "'distance'", "]", "or", "pieces", "[", "'dirty'", "]", ")", ":", "rendered", "+=", "plus_or_dot", "(", "pieces", ")", "rendered", "+=", "(", "'%d.g%s'", "%", "(", "pieces", "[", "'distance'", "]", ",", "pieces", "[", "'short'", "]", ")", ")", "if", "pieces", "[", "'dirty'", "]", ":", "rendered", "+=", "'.dirty'", "else", ":", "rendered", "=", "(", "'0+untagged.%d.g%s'", "%", "(", "pieces", "[", "'distance'", "]", ",", "pieces", "[", "'short'", "]", ")", ")", "if", "pieces", "[", "'dirty'", "]", ":", "rendered", "+=", "'.dirty'", "return", "rendered" ]
build up version string .
train
true
4,648
def get_plugin(name): if (internal._REGISTRY is None): raise NoSuchPluginException(("Could not load plugin {0} because the registry either hasn't " + 'been loaded or has just been unloaded.'.format(name))) return internal._REGISTRY[name]
[ "def", "get_plugin", "(", "name", ")", ":", "if", "(", "internal", ".", "_REGISTRY", "is", "None", ")", ":", "raise", "NoSuchPluginException", "(", "(", "\"Could not load plugin {0} because the registry either hasn't \"", "+", "'been loaded or has just been unloaded.'", ".", "format", "(", "name", ")", ")", ")", "return", "internal", ".", "_REGISTRY", "[", "name", "]" ]
get an instance of a active plugin by name .
train
false
4,649
def _format_issue(issue): ret = {'id': issue.get('id'), 'issue_number': issue.get('number'), 'state': issue.get('state'), 'title': issue.get('title'), 'user': issue.get('user').get('login'), 'html_url': issue.get('html_url')} assignee = issue.get('assignee') if assignee: assignee = assignee.get('login') labels = issue.get('labels') label_names = [] for label in labels: label_names.append(label.get('name')) milestone = issue.get('milestone') if milestone: milestone = milestone.get('title') ret['assignee'] = assignee ret['labels'] = label_names ret['milestone'] = milestone return ret
[ "def", "_format_issue", "(", "issue", ")", ":", "ret", "=", "{", "'id'", ":", "issue", ".", "get", "(", "'id'", ")", ",", "'issue_number'", ":", "issue", ".", "get", "(", "'number'", ")", ",", "'state'", ":", "issue", ".", "get", "(", "'state'", ")", ",", "'title'", ":", "issue", ".", "get", "(", "'title'", ")", ",", "'user'", ":", "issue", ".", "get", "(", "'user'", ")", ".", "get", "(", "'login'", ")", ",", "'html_url'", ":", "issue", ".", "get", "(", "'html_url'", ")", "}", "assignee", "=", "issue", ".", "get", "(", "'assignee'", ")", "if", "assignee", ":", "assignee", "=", "assignee", ".", "get", "(", "'login'", ")", "labels", "=", "issue", ".", "get", "(", "'labels'", ")", "label_names", "=", "[", "]", "for", "label", "in", "labels", ":", "label_names", ".", "append", "(", "label", ".", "get", "(", "'name'", ")", ")", "milestone", "=", "issue", ".", "get", "(", "'milestone'", ")", "if", "milestone", ":", "milestone", "=", "milestone", ".", "get", "(", "'title'", ")", "ret", "[", "'assignee'", "]", "=", "assignee", "ret", "[", "'labels'", "]", "=", "label_names", "ret", "[", "'milestone'", "]", "=", "milestone", "return", "ret" ]
helper function to format api return information into a more manageable and useful dictionary for issue information .
train
true
4,650
def remove_handler(key, handler): if (type(key) is Predicate): key = key.name getattr(Q, key).remove_handler(handler)
[ "def", "remove_handler", "(", "key", ",", "handler", ")", ":", "if", "(", "type", "(", "key", ")", "is", "Predicate", ")", ":", "key", "=", "key", ".", "name", "getattr", "(", "Q", ",", "key", ")", ".", "remove_handler", "(", "handler", ")" ]
removes a handler from the ask system .
train
false
4,652
def iterModules(): return theSystemPath.iterModules()
[ "def", "iterModules", "(", ")", ":", "return", "theSystemPath", ".", "iterModules", "(", ")" ]
iterate all modules and top-level packages on the global python path .
train
false
4,653
def irange(start, end): return range(start, (end + 1))
[ "def", "irange", "(", "start", ",", "end", ")", ":", "return", "range", "(", "start", ",", "(", "end", "+", "1", ")", ")" ]
inclusive range from start to end irange -> 1 .
train
false
4,654
def divmod_by_constant(builder, val, divisor): assert (divisor > 0) divisor = val.type(divisor) one = val.type(1) quot = alloca_once(builder, val.type) with builder.if_else(is_neg_int(builder, val)) as (if_neg, if_pos): with if_pos: quot_val = builder.sdiv(val, divisor) builder.store(quot_val, quot) with if_neg: val_plus_one = builder.add(val, one) quot_val = builder.sdiv(val_plus_one, divisor) builder.store(builder.sub(quot_val, one), quot) quot_val = builder.load(quot) rem_val = builder.sub(val, builder.mul(quot_val, divisor)) return (quot_val, rem_val)
[ "def", "divmod_by_constant", "(", "builder", ",", "val", ",", "divisor", ")", ":", "assert", "(", "divisor", ">", "0", ")", "divisor", "=", "val", ".", "type", "(", "divisor", ")", "one", "=", "val", ".", "type", "(", "1", ")", "quot", "=", "alloca_once", "(", "builder", ",", "val", ".", "type", ")", "with", "builder", ".", "if_else", "(", "is_neg_int", "(", "builder", ",", "val", ")", ")", "as", "(", "if_neg", ",", "if_pos", ")", ":", "with", "if_pos", ":", "quot_val", "=", "builder", ".", "sdiv", "(", "val", ",", "divisor", ")", "builder", ".", "store", "(", "quot_val", ",", "quot", ")", "with", "if_neg", ":", "val_plus_one", "=", "builder", ".", "add", "(", "val", ",", "one", ")", "quot_val", "=", "builder", ".", "sdiv", "(", "val_plus_one", ",", "divisor", ")", "builder", ".", "store", "(", "builder", ".", "sub", "(", "quot_val", ",", "one", ")", ",", "quot", ")", "quot_val", "=", "builder", ".", "load", "(", "quot", ")", "rem_val", "=", "builder", ".", "sub", "(", "val", ",", "builder", ".", "mul", "(", "quot_val", ",", "divisor", ")", ")", "return", "(", "quot_val", ",", "rem_val", ")" ]
compute the of *val* divided by the constant positive *divisor* .
train
false
4,655
def bellman_ford_predecessor_and_distance(G, source, target=None, cutoff=None, weight='weight'): if (source not in G): raise nx.NodeNotFound(('Node %s is not found in the graph' % source)) weight = _weight_function(G, weight) if any(((weight(u, v, d) < 0) for (u, v, d) in G.selfloop_edges(data=True))): raise nx.NetworkXUnbounded('Negative cost cycle detected.') dist = {source: 0} pred = {source: [None]} if (len(G) == 1): return (pred, dist) weight = _weight_function(G, weight) return (pred, _bellman_ford(G, [source], weight, pred=pred, dist=dist, cutoff=cutoff, target=target))
[ "def", "bellman_ford_predecessor_and_distance", "(", "G", ",", "source", ",", "target", "=", "None", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "if", "(", "source", "not", "in", "G", ")", ":", "raise", "nx", ".", "NodeNotFound", "(", "(", "'Node %s is not found in the graph'", "%", "source", ")", ")", "weight", "=", "_weight_function", "(", "G", ",", "weight", ")", "if", "any", "(", "(", "(", "weight", "(", "u", ",", "v", ",", "d", ")", "<", "0", ")", "for", "(", "u", ",", "v", ",", "d", ")", "in", "G", ".", "selfloop_edges", "(", "data", "=", "True", ")", ")", ")", ":", "raise", "nx", ".", "NetworkXUnbounded", "(", "'Negative cost cycle detected.'", ")", "dist", "=", "{", "source", ":", "0", "}", "pred", "=", "{", "source", ":", "[", "None", "]", "}", "if", "(", "len", "(", "G", ")", "==", "1", ")", ":", "return", "(", "pred", ",", "dist", ")", "weight", "=", "_weight_function", "(", "G", ",", "weight", ")", "return", "(", "pred", ",", "_bellman_ford", "(", "G", ",", "[", "source", "]", ",", "weight", ",", "pred", "=", "pred", ",", "dist", "=", "dist", ",", "cutoff", "=", "cutoff", ",", "target", "=", "target", ")", ")" ]
compute shortest path lengths and predecessors on shortest paths in weighted graphs .
train
false
4,656
def test_album_info(*args, **kwargs): track_info = TrackInfo(title=u'new title', track_id=u'trackid', index=0) album_info = AlbumInfo(artist=u'artist', album=u'album', tracks=[track_info], album_id=u'albumid', artist_id=u'artistid') return iter([album_info])
[ "def", "test_album_info", "(", "*", "args", ",", "**", "kwargs", ")", ":", "track_info", "=", "TrackInfo", "(", "title", "=", "u'new title'", ",", "track_id", "=", "u'trackid'", ",", "index", "=", "0", ")", "album_info", "=", "AlbumInfo", "(", "artist", "=", "u'artist'", ",", "album", "=", "u'album'", ",", "tracks", "=", "[", "track_info", "]", ",", "album_id", "=", "u'albumid'", ",", "artist_id", "=", "u'artistid'", ")", "return", "iter", "(", "[", "album_info", "]", ")" ]
create an albuminfo object for testing .
train
false
4,658
def _normalizeargs(sequence, output=None): if (output is None): output = [] cls = sequence.__class__ if ((InterfaceClass in cls.__mro__) or (Implements in cls.__mro__)): output.append(sequence) else: for v in sequence: _normalizeargs(v, output) return output
[ "def", "_normalizeargs", "(", "sequence", ",", "output", "=", "None", ")", ":", "if", "(", "output", "is", "None", ")", ":", "output", "=", "[", "]", "cls", "=", "sequence", ".", "__class__", "if", "(", "(", "InterfaceClass", "in", "cls", ".", "__mro__", ")", "or", "(", "Implements", "in", "cls", ".", "__mro__", ")", ")", ":", "output", ".", "append", "(", "sequence", ")", "else", ":", "for", "v", "in", "sequence", ":", "_normalizeargs", "(", "v", ",", "output", ")", "return", "output" ]
normalize declaration arguments normalization arguments might contain declarions .
train
false
4,659
@mock_ec2 def test_igw_filter_by_tags(): conn = boto.connect_vpc(u'the_key', u'the_secret') igw1 = conn.create_internet_gateway() igw2 = conn.create_internet_gateway() igw1.add_tag(u'tests', u'yes') result = conn.get_all_internet_gateways(filters={u'tag:tests': u'yes'}) result.should.have.length_of(1) result[0].id.should.equal(igw1.id)
[ "@", "mock_ec2", "def", "test_igw_filter_by_tags", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "igw1", "=", "conn", ".", "create_internet_gateway", "(", ")", "igw2", "=", "conn", ".", "create_internet_gateway", "(", ")", "igw1", ".", "add_tag", "(", "u'tests'", ",", "u'yes'", ")", "result", "=", "conn", ".", "get_all_internet_gateways", "(", "filters", "=", "{", "u'tag:tests'", ":", "u'yes'", "}", ")", "result", ".", "should", ".", "have", ".", "length_of", "(", "1", ")", "result", "[", "0", "]", ".", "id", ".", "should", ".", "equal", "(", "igw1", ".", "id", ")" ]
internet gateway filter by vpc id .
train
false
4,660
def load_sorts(): g = globals() if g['_sorting_init']: return def gt(a, b): return (a > b) default_sort = quicksort.make_jit_quicksort() reversed_sort = quicksort.make_jit_quicksort(lt=gt) g['run_default_sort'] = default_sort.run_quicksort g['run_reversed_sort'] = reversed_sort.run_quicksort g['_sorting_init'] = True
[ "def", "load_sorts", "(", ")", ":", "g", "=", "globals", "(", ")", "if", "g", "[", "'_sorting_init'", "]", ":", "return", "def", "gt", "(", "a", ",", "b", ")", ":", "return", "(", "a", ">", "b", ")", "default_sort", "=", "quicksort", ".", "make_jit_quicksort", "(", ")", "reversed_sort", "=", "quicksort", ".", "make_jit_quicksort", "(", "lt", "=", "gt", ")", "g", "[", "'run_default_sort'", "]", "=", "default_sort", ".", "run_quicksort", "g", "[", "'run_reversed_sort'", "]", "=", "reversed_sort", ".", "run_quicksort", "g", "[", "'_sorting_init'", "]", "=", "True" ]
load quicksort lazily .
train
false
4,661
def require_support_permission(func): @wraps(func) def inner(request, *args, **kwargs): if has_access(request.user, 'support', 'global'): return func(request, *args, **kwargs) else: return HttpResponseForbidden() return login_required(inner)
[ "def", "require_support_permission", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "has_access", "(", "request", ".", "user", ",", "'support'", ",", "'global'", ")", ":", "return", "func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "HttpResponseForbidden", "(", ")", "return", "login_required", "(", "inner", ")" ]
view decorator that requires the user to have permission to use the support ui .
train
false
4,662
def find_missing(input_list): total = ((len(input_list) * (len(input_list) + 1)) / 2) summed = 0 for element in input_list: summed += element missing = (total - summed) return missing
[ "def", "find_missing", "(", "input_list", ")", ":", "total", "=", "(", "(", "len", "(", "input_list", ")", "*", "(", "len", "(", "input_list", ")", "+", "1", ")", ")", "/", "2", ")", "summed", "=", "0", "for", "element", "in", "input_list", ":", "summed", "+=", "element", "missing", "=", "(", "total", "-", "summed", ")", "return", "missing" ]
find the missing number in shuffled list .
train
false
4,663
def hostinterface_get(hostids, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'hostinterface.get' params = {'output': 'extend'} if hostids: params.setdefault('hostids', hostids) params = _params_extend(params, **connection_args) ret = _query(method, params, conn_args['url'], conn_args['auth']) return (ret['result'] if (len(ret['result']) > 0) else False) else: raise KeyError except KeyError: return False
[ "def", "hostinterface_get", "(", "hostids", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'hostinterface.get'", "params", "=", "{", "'output'", ":", "'extend'", "}", "if", "hostids", ":", "params", ".", "setdefault", "(", "'hostids'", ",", "hostids", ")", "params", "=", "_params_extend", "(", "params", ",", "**", "connection_args", ")", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "(", "ret", "[", "'result'", "]", "if", "(", "len", "(", "ret", "[", "'result'", "]", ")", ">", "0", ")", "else", "False", ")", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "False" ]
retrieve host groups according to the given parameters .
train
false
4,664
@utils.arg('monitor_id', metavar='<monitor-id>', help='ID of the monitor to upload to an image') @utils.arg('--force', metavar='<True|False>', help="Optional flag to indicate whether to upload a monitor even if it's attached to an instance. (Default=False)", default=False) @utils.arg('--container-format', metavar='<container-format>', help='Optional type for container format (Default=bare)', default='bare') @utils.arg('--disk-format', metavar='<disk-format>', help='Optional type for disk format (Default=raw)', default='raw') @utils.arg('image_name', metavar='<image-name>', help='Name for created image') @utils.service_type('monitor') def do_upload_to_image(cs, args): monitor = _find_monitor(cs, args.monitor_id) monitor.upload_to_image(args.force, args.image_name, args.container_format, args.disk_format)
[ "@", "utils", ".", "arg", "(", "'monitor_id'", ",", "metavar", "=", "'<monitor-id>'", ",", "help", "=", "'ID of the monitor to upload to an image'", ")", "@", "utils", ".", "arg", "(", "'--force'", ",", "metavar", "=", "'<True|False>'", ",", "help", "=", "\"Optional flag to indicate whether to upload a monitor even if it's attached to an instance. (Default=False)\"", ",", "default", "=", "False", ")", "@", "utils", ".", "arg", "(", "'--container-format'", ",", "metavar", "=", "'<container-format>'", ",", "help", "=", "'Optional type for container format (Default=bare)'", ",", "default", "=", "'bare'", ")", "@", "utils", ".", "arg", "(", "'--disk-format'", ",", "metavar", "=", "'<disk-format>'", ",", "help", "=", "'Optional type for disk format (Default=raw)'", ",", "default", "=", "'raw'", ")", "@", "utils", ".", "arg", "(", "'image_name'", ",", "metavar", "=", "'<image-name>'", ",", "help", "=", "'Name for created image'", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_upload_to_image", "(", "cs", ",", "args", ")", ":", "monitor", "=", "_find_monitor", "(", "cs", ",", "args", ".", "monitor_id", ")", "monitor", ".", "upload_to_image", "(", "args", ".", "force", ",", "args", ".", "image_name", ",", "args", ".", "container_format", ",", "args", ".", "disk_format", ")" ]
upload monitor to image service as image .
train
false
4,666
def uninstall(pecls): if isinstance(pecls, six.string_types): pecls = [pecls] return _pecl('uninstall {0}'.format(_cmd_quote(' '.join(pecls))))
[ "def", "uninstall", "(", "pecls", ")", ":", "if", "isinstance", "(", "pecls", ",", "six", ".", "string_types", ")", ":", "pecls", "=", "[", "pecls", "]", "return", "_pecl", "(", "'uninstall {0}'", ".", "format", "(", "_cmd_quote", "(", "' '", ".", "join", "(", "pecls", ")", ")", ")", ")" ]
remove one or more packages .
train
true
4,667
def returner_argspec(module=''): returners_ = salt.loader.returners(__opts__, []) return salt.utils.argspec_report(returners_, module)
[ "def", "returner_argspec", "(", "module", "=", "''", ")", ":", "returners_", "=", "salt", ".", "loader", ".", "returners", "(", "__opts__", ",", "[", "]", ")", "return", "salt", ".", "utils", ".", "argspec_report", "(", "returners_", ",", "module", ")" ]
return the argument specification of functions in salt returner modules .
train
true
4,668
@treeio_login_required @handle_response_format def currency_edit(request, currency_id, response_format='html'): currency = get_object_or_404(Currency, pk=currency_id) if ((not request.user.profile.has_permission(currency, mode='w')) and (not request.user.profile.is_admin('treeio_finance'))): return user_denied(request, "You don't have access to this Currency", response_format) if request.POST: if ('cancel' not in request.POST): form = CurrencyForm(request.user.profile, request.POST, instance=currency) if form.is_valid(): currency = form.save() return HttpResponseRedirect(reverse('finance_currency_view', args=[currency.id])) else: return HttpResponseRedirect(reverse('finance_currency_view', args=[currency.id])) else: form = CurrencyForm(request.user.profile, instance=currency) return render_to_response('finance/currency_edit', {'form': form, 'currency': currency}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "currency_edit", "(", "request", ",", "currency_id", ",", "response_format", "=", "'html'", ")", ":", "currency", "=", "get_object_or_404", "(", "Currency", ",", "pk", "=", "currency_id", ")", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "currency", ",", "mode", "=", "'w'", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio_finance'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Currency\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "CurrencyForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "currency", ")", "if", "form", ".", "is_valid", "(", ")", ":", "currency", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_currency_view'", ",", "args", "=", "[", "currency", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_currency_view'", ",", "args", "=", "[", "currency", ".", "id", "]", ")", ")", "else", ":", "form", "=", "CurrencyForm", "(", "request", ".", "user", ".", "profile", ",", "instance", "=", "currency", ")", "return", "render_to_response", "(", "'finance/currency_edit'", ",", "{", "'form'", ":", "form", ",", "'currency'", ":", "currency", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
currency edit .
train
false
4,669
@pytest.mark.django_db def test_make_aware_use_tz_false(settings): settings.USE_TZ = False datetime_object = datetime(2016, 1, 2, 21, 52, 25) assert timezone.is_naive(datetime_object) datetime_aware = make_aware(datetime_object) assert timezone.is_naive(datetime_aware)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_make_aware_use_tz_false", "(", "settings", ")", ":", "settings", ".", "USE_TZ", "=", "False", "datetime_object", "=", "datetime", "(", "2016", ",", "1", ",", "2", ",", "21", ",", "52", ",", "25", ")", "assert", "timezone", ".", "is_naive", "(", "datetime_object", ")", "datetime_aware", "=", "make_aware", "(", "datetime_object", ")", "assert", "timezone", ".", "is_naive", "(", "datetime_aware", ")" ]
tests datetimes are left intact if use_tz is not in effect .
train
false
4,670
def binary_partitions(n): from math import ceil, log pow = int((2 ** ceil(log(n, 2)))) sum = 0 partition = [] while pow: if ((sum + pow) <= n): partition.append(pow) sum += pow pow >>= 1 last_num = ((len(partition) - 1) - (n & 1)) while (last_num >= 0): (yield partition) if (partition[last_num] == 2): partition[last_num] = 1 partition.append(1) last_num -= 1 continue partition.append(1) partition[last_num] >>= 1 x = partition[(last_num + 1)] = partition[last_num] last_num += 1 while (x > 1): if (x <= ((len(partition) - last_num) - 1)): del partition[((- x) + 1):] last_num += 1 partition[last_num] = x else: x >>= 1 (yield ([1] * n))
[ "def", "binary_partitions", "(", "n", ")", ":", "from", "math", "import", "ceil", ",", "log", "pow", "=", "int", "(", "(", "2", "**", "ceil", "(", "log", "(", "n", ",", "2", ")", ")", ")", ")", "sum", "=", "0", "partition", "=", "[", "]", "while", "pow", ":", "if", "(", "(", "sum", "+", "pow", ")", "<=", "n", ")", ":", "partition", ".", "append", "(", "pow", ")", "sum", "+=", "pow", "pow", ">>=", "1", "last_num", "=", "(", "(", "len", "(", "partition", ")", "-", "1", ")", "-", "(", "n", "&", "1", ")", ")", "while", "(", "last_num", ">=", "0", ")", ":", "(", "yield", "partition", ")", "if", "(", "partition", "[", "last_num", "]", "==", "2", ")", ":", "partition", "[", "last_num", "]", "=", "1", "partition", ".", "append", "(", "1", ")", "last_num", "-=", "1", "continue", "partition", ".", "append", "(", "1", ")", "partition", "[", "last_num", "]", ">>=", "1", "x", "=", "partition", "[", "(", "last_num", "+", "1", ")", "]", "=", "partition", "[", "last_num", "]", "last_num", "+=", "1", "while", "(", "x", ">", "1", ")", ":", "if", "(", "x", "<=", "(", "(", "len", "(", "partition", ")", "-", "last_num", ")", "-", "1", ")", ")", ":", "del", "partition", "[", "(", "(", "-", "x", ")", "+", "1", ")", ":", "]", "last_num", "+=", "1", "partition", "[", "last_num", "]", "=", "x", "else", ":", "x", ">>=", "1", "(", "yield", "(", "[", "1", "]", "*", "n", ")", ")" ]
generates the binary partition of n .
train
false
4,671
def results_extractor(train_obj): return DD()
[ "def", "results_extractor", "(", "train_obj", ")", ":", "return", "DD", "(", ")" ]
default results extractor that does nothing .
train
false
4,672
def _cert_file(name, cert_type): return os.path.join(LE_LIVE, name, '{0}.pem'.format(cert_type))
[ "def", "_cert_file", "(", "name", ",", "cert_type", ")", ":", "return", "os", ".", "path", ".", "join", "(", "LE_LIVE", ",", "name", ",", "'{0}.pem'", ".", "format", "(", "cert_type", ")", ")" ]
return expected path of a lets encrypt live cert .
train
true
4,674
def unregister_webapi_capabilities(capabilities_id): try: del _registered_capabilities[capabilities_id] except KeyError: logging.error(u'Failed to unregister unknown web API capabilities "%s".', capabilities_id) raise KeyError((u'"%s" is not a registered web API capabilities set' % capabilities_id))
[ "def", "unregister_webapi_capabilities", "(", "capabilities_id", ")", ":", "try", ":", "del", "_registered_capabilities", "[", "capabilities_id", "]", "except", "KeyError", ":", "logging", ".", "error", "(", "u'Failed to unregister unknown web API capabilities \"%s\".'", ",", "capabilities_id", ")", "raise", "KeyError", "(", "(", "u'\"%s\" is not a registered web API capabilities set'", "%", "capabilities_id", ")", ")" ]
unregisters a previously registered set of web api capabilities .
train
false
4,675
def parse_troubleshooting(troubleshooting_json): if (not troubleshooting_json): return None try: parsed = json.loads(troubleshooting_json) except ValueError: return None spec = (((), dict), (('accessibility',), dict), (('accessibility', 'isActive'), bool), (('application',), dict), (('application', 'name'), basestring), (('application', 'supportURL'), basestring), (('application', 'userAgent'), basestring), (('application', 'version'), basestring), (('extensions',), list), (('graphics',), dict), (('javaScript',), dict), (('modifiedPreferences',), dict), (('userJS',), dict), (('userJS', 'exists'), bool)) for (path, type_) in spec: item = parsed for piece in path: item = item.get(piece) if (item is None): return None if (not isinstance(item, type_)): return None parsed['modifiedPreferences'] = dict(((key, val) for (key, val) in parsed['modifiedPreferences'].items() if (not key.startswith('print')))) return parsed
[ "def", "parse_troubleshooting", "(", "troubleshooting_json", ")", ":", "if", "(", "not", "troubleshooting_json", ")", ":", "return", "None", "try", ":", "parsed", "=", "json", ".", "loads", "(", "troubleshooting_json", ")", "except", "ValueError", ":", "return", "None", "spec", "=", "(", "(", "(", ")", ",", "dict", ")", ",", "(", "(", "'accessibility'", ",", ")", ",", "dict", ")", ",", "(", "(", "'accessibility'", ",", "'isActive'", ")", ",", "bool", ")", ",", "(", "(", "'application'", ",", ")", ",", "dict", ")", ",", "(", "(", "'application'", ",", "'name'", ")", ",", "basestring", ")", ",", "(", "(", "'application'", ",", "'supportURL'", ")", ",", "basestring", ")", ",", "(", "(", "'application'", ",", "'userAgent'", ")", ",", "basestring", ")", ",", "(", "(", "'application'", ",", "'version'", ")", ",", "basestring", ")", ",", "(", "(", "'extensions'", ",", ")", ",", "list", ")", ",", "(", "(", "'graphics'", ",", ")", ",", "dict", ")", ",", "(", "(", "'javaScript'", ",", ")", ",", "dict", ")", ",", "(", "(", "'modifiedPreferences'", ",", ")", ",", "dict", ")", ",", "(", "(", "'userJS'", ",", ")", ",", "dict", ")", ",", "(", "(", "'userJS'", ",", "'exists'", ")", ",", "bool", ")", ")", "for", "(", "path", ",", "type_", ")", "in", "spec", ":", "item", "=", "parsed", "for", "piece", "in", "path", ":", "item", "=", "item", ".", "get", "(", "piece", ")", "if", "(", "item", "is", "None", ")", ":", "return", "None", "if", "(", "not", "isinstance", "(", "item", ",", "type_", ")", ")", ":", "return", "None", "parsed", "[", "'modifiedPreferences'", "]", "=", "dict", "(", "(", "(", "key", ",", "val", ")", "for", "(", "key", ",", "val", ")", "in", "parsed", "[", "'modifiedPreferences'", "]", ".", "items", "(", ")", "if", "(", "not", "key", ".", "startswith", "(", "'print'", ")", ")", ")", ")", "return", "parsed" ]
normalizes the troubleshooting data from question .
train
false
4,676
def addToThreadsRemove(extrusionHalfWidth, nestedRings, oldOrderedLocation, skein, threadSequence): while (len(nestedRings) > 0): getTransferClosestNestedRing(extrusionHalfWidth, nestedRings, oldOrderedLocation, skein, threadSequence)
[ "def", "addToThreadsRemove", "(", "extrusionHalfWidth", ",", "nestedRings", ",", "oldOrderedLocation", ",", "skein", ",", "threadSequence", ")", ":", "while", "(", "len", "(", "nestedRings", ")", ">", "0", ")", ":", "getTransferClosestNestedRing", "(", "extrusionHalfWidth", ",", "nestedRings", ",", "oldOrderedLocation", ",", "skein", ",", "threadSequence", ")" ]
add to threads from the last location from nested rings .
train
false
4,677
def _CopyProperties(target_dict, source_dict): for (key, value) in source_dict['properties'].items(): assert ((key not in target_dict['properties']) or (target_dict['properties'][key] == value)), (source_dict, target_dict) target_dict['properties'][key] = deepcopy(value)
[ "def", "_CopyProperties", "(", "target_dict", ",", "source_dict", ")", ":", "for", "(", "key", ",", "value", ")", "in", "source_dict", "[", "'properties'", "]", ".", "items", "(", ")", ":", "assert", "(", "(", "key", "not", "in", "target_dict", "[", "'properties'", "]", ")", "or", "(", "target_dict", "[", "'properties'", "]", "[", "key", "]", "==", "value", ")", ")", ",", "(", "source_dict", ",", "target_dict", ")", "target_dict", "[", "'properties'", "]", "[", "key", "]", "=", "deepcopy", "(", "value", ")" ]
deep copies properties in source_dict[properties] to target_dict[properties] .
train
false
4,679
def user_chpass(user, host='localhost', password=None, password_hash=None, allow_passwordless=False, unix_socket=None, password_column=None, **connection_args): args = {} if (password is not None): password_sql = 'PASSWORD(%(password)s)' args['password'] = password elif (password_hash is not None): password_sql = '%(password)s' args['password'] = password_hash elif (not salt.utils.is_true(allow_passwordless)): log.error('password or password_hash must be specified, unless allow_passwordless=True') return False else: password_sql = "''" dbc = _connect(**connection_args) if (dbc is None): return False if (not password_column): password_column = __password_column(**connection_args) cur = dbc.cursor() qry = (((('UPDATE mysql.user SET ' + password_column) + '=') + password_sql) + ' WHERE User=%(user)s AND Host = %(host)s;') args['user'] = user args['host'] = host if (salt.utils.is_true(allow_passwordless) and salt.utils.is_true(unix_socket)): if (host == 'localhost'): qry += ' IDENTIFIED VIA unix_socket' else: log.error('Auth via unix_socket can be set only for host=localhost') try: result = _execute(cur, qry, args) except MySQLdb.OperationalError as exc: err = 'MySQL Error {0}: {1}'.format(*exc) __context__['mysql.error'] = err log.error(err) return False if result: _execute(cur, 'FLUSH PRIVILEGES;') log.info("Password for user '{0}'@'{1}' has been {2}".format(user, host, ('changed' if any((password, password_hash)) else 'cleared'))) return True log.info("Password for user '{0}'@'{1}' was not {2}".format(user, host, ('changed' if any((password, password_hash)) else 'cleared'))) return False
[ "def", "user_chpass", "(", "user", ",", "host", "=", "'localhost'", ",", "password", "=", "None", ",", "password_hash", "=", "None", ",", "allow_passwordless", "=", "False", ",", "unix_socket", "=", "None", ",", "password_column", "=", "None", ",", "**", "connection_args", ")", ":", "args", "=", "{", "}", "if", "(", "password", "is", "not", "None", ")", ":", "password_sql", "=", "'PASSWORD(%(password)s)'", "args", "[", "'password'", "]", "=", "password", "elif", "(", "password_hash", "is", "not", "None", ")", ":", "password_sql", "=", "'%(password)s'", "args", "[", "'password'", "]", "=", "password_hash", "elif", "(", "not", "salt", ".", "utils", ".", "is_true", "(", "allow_passwordless", ")", ")", ":", "log", ".", "error", "(", "'password or password_hash must be specified, unless allow_passwordless=True'", ")", "return", "False", "else", ":", "password_sql", "=", "\"''\"", "dbc", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "dbc", "is", "None", ")", ":", "return", "False", "if", "(", "not", "password_column", ")", ":", "password_column", "=", "__password_column", "(", "**", "connection_args", ")", "cur", "=", "dbc", ".", "cursor", "(", ")", "qry", "=", "(", "(", "(", "(", "'UPDATE mysql.user SET '", "+", "password_column", ")", "+", "'='", ")", "+", "password_sql", ")", "+", "' WHERE User=%(user)s AND Host = %(host)s;'", ")", "args", "[", "'user'", "]", "=", "user", "args", "[", "'host'", "]", "=", "host", "if", "(", "salt", ".", "utils", ".", "is_true", "(", "allow_passwordless", ")", "and", "salt", ".", "utils", ".", "is_true", "(", "unix_socket", ")", ")", ":", "if", "(", "host", "==", "'localhost'", ")", ":", "qry", "+=", "' IDENTIFIED VIA unix_socket'", "else", ":", "log", ".", "error", "(", "'Auth via unix_socket can be set only for host=localhost'", ")", "try", ":", "result", "=", "_execute", "(", "cur", ",", "qry", ",", "args", ")", "except", "MySQLdb", ".", "OperationalError", "as", "exc", ":", "err", "=", "'MySQL Error {0}: {1}'", ".", "format", "(", "*", "exc", ")", "__context__", "[", "'mysql.error'", "]", "=", "err", "log", ".", "error", "(", "err", ")", "return", "False", "if", "result", ":", "_execute", "(", "cur", ",", "'FLUSH PRIVILEGES;'", ")", "log", ".", "info", "(", "\"Password for user '{0}'@'{1}' has been {2}\"", ".", "format", "(", "user", ",", "host", ",", "(", "'changed'", "if", "any", "(", "(", "password", ",", "password_hash", ")", ")", "else", "'cleared'", ")", ")", ")", "return", "True", "log", ".", "info", "(", "\"Password for user '{0}'@'{1}' was not {2}\"", ".", "format", "(", "user", ",", "host", ",", "(", "'changed'", "if", "any", "(", "(", "password", ",", "password_hash", ")", ")", "else", "'cleared'", ")", ")", ")", "return", "False" ]
change password for a cluster admin or a database user .
train
false
4,682
def getFirstTranslatorFileNameUnmodified(fileName): if (fileName != ''): return fileName unmodified = getGNUTranslatorFilesUnmodified() if (len(unmodified) == 0): print 'There are no unmodified gcode files in this folder.' return '' return unmodified[0]
[ "def", "getFirstTranslatorFileNameUnmodified", "(", "fileName", ")", ":", "if", "(", "fileName", "!=", "''", ")", ":", "return", "fileName", "unmodified", "=", "getGNUTranslatorFilesUnmodified", "(", ")", "if", "(", "len", "(", "unmodified", ")", "==", "0", ")", ":", "print", "'There are no unmodified gcode files in this folder.'", "return", "''", "return", "unmodified", "[", "0", "]" ]
get the first file name from the translators in the import plugins folder .
train
false
4,683
def postfixes(seq): n = len(seq) for i in range(n): (yield seq[((n - i) - 1):])
[ "def", "postfixes", "(", "seq", ")", ":", "n", "=", "len", "(", "seq", ")", "for", "i", "in", "range", "(", "n", ")", ":", "(", "yield", "seq", "[", "(", "(", "n", "-", "i", ")", "-", "1", ")", ":", "]", ")" ]
generate all postfixes of a sequence .
train
false
4,685
def is_unavailable_exception(e): try: if ((e.errcode == (-1)) or (e.headers is None)): return True exc_mess = e.headers.get('X-exception') except AttributeError: exc_mess = str(e) if (exc_mess and ('temporarily unavailable' in exc_mess.lower())): return True
[ "def", "is_unavailable_exception", "(", "e", ")", ":", "try", ":", "if", "(", "(", "e", ".", "errcode", "==", "(", "-", "1", ")", ")", "or", "(", "e", ".", "headers", "is", "None", ")", ")", ":", "return", "True", "exc_mess", "=", "e", ".", "headers", ".", "get", "(", "'X-exception'", ")", "except", "AttributeError", ":", "exc_mess", "=", "str", "(", "e", ")", "if", "(", "exc_mess", "and", "(", "'temporarily unavailable'", "in", "exc_mess", ".", "lower", "(", ")", ")", ")", ":", "return", "True" ]
returns true if the given protocolerror is the product of a server-side exception caused by the temporarily unavailable response sometimes given by operations on non-blocking sockets .
train
false
4,687
def symmath_check_simple(expect, ans, adict={}, symtab=None, extra_options=None): options = {'__MATRIX__': False, '__ABC__': False, '__LOWER__': False} if extra_options: options.update(extra_options) for op in options: if (op in expect): expect = expect.replace(op, '') options[op] = True expect = expect.replace('__OR__', '__or__') if options['__LOWER__']: expect = expect.lower() ans = ans.lower() try: ret = check(expect, ans, matrix=options['__MATRIX__'], abcsym=options['__ABC__'], symtab=symtab) except Exception as err: return {'ok': False, 'msg': ('Error %s<br/>Failed in evaluating check(%s,%s)' % (err, expect, ans))} return ret
[ "def", "symmath_check_simple", "(", "expect", ",", "ans", ",", "adict", "=", "{", "}", ",", "symtab", "=", "None", ",", "extra_options", "=", "None", ")", ":", "options", "=", "{", "'__MATRIX__'", ":", "False", ",", "'__ABC__'", ":", "False", ",", "'__LOWER__'", ":", "False", "}", "if", "extra_options", ":", "options", ".", "update", "(", "extra_options", ")", "for", "op", "in", "options", ":", "if", "(", "op", "in", "expect", ")", ":", "expect", "=", "expect", ".", "replace", "(", "op", ",", "''", ")", "options", "[", "op", "]", "=", "True", "expect", "=", "expect", ".", "replace", "(", "'__OR__'", ",", "'__or__'", ")", "if", "options", "[", "'__LOWER__'", "]", ":", "expect", "=", "expect", ".", "lower", "(", ")", "ans", "=", "ans", ".", "lower", "(", ")", "try", ":", "ret", "=", "check", "(", "expect", ",", "ans", ",", "matrix", "=", "options", "[", "'__MATRIX__'", "]", ",", "abcsym", "=", "options", "[", "'__ABC__'", "]", ",", "symtab", "=", "symtab", ")", "except", "Exception", "as", "err", ":", "return", "{", "'ok'", ":", "False", ",", "'msg'", ":", "(", "'Error %s<br/>Failed in evaluating check(%s,%s)'", "%", "(", "err", ",", "expect", ",", "ans", ")", ")", "}", "return", "ret" ]
check a symbolic mathematical expression using sympy .
train
false
4,688
def get_tests_from_fs(parent_dir, control_pattern, add_noncompliant=False): tests = {} profilers = False if ('client/profilers' in parent_dir): profilers = True for dir in [parent_dir]: files = recursive_walk(dir, control_pattern) for file in files: if (('__init__.py' in file) or ('.svn' in file)): continue if (not profilers): if (not add_noncompliant): try: found_test = control_data.parse_control(file, raise_warnings=True) tests[file] = found_test except control_data.ControlVariableException as e: logging.warn('Skipping %s\n%s', file, e) except Exception as e: logging.error('Bad %s\n%s', file, e) else: found_test = control_data.parse_control(file) tests[file] = found_test else: tests[file] = compiler.parseFile(file).doc return tests
[ "def", "get_tests_from_fs", "(", "parent_dir", ",", "control_pattern", ",", "add_noncompliant", "=", "False", ")", ":", "tests", "=", "{", "}", "profilers", "=", "False", "if", "(", "'client/profilers'", "in", "parent_dir", ")", ":", "profilers", "=", "True", "for", "dir", "in", "[", "parent_dir", "]", ":", "files", "=", "recursive_walk", "(", "dir", ",", "control_pattern", ")", "for", "file", "in", "files", ":", "if", "(", "(", "'__init__.py'", "in", "file", ")", "or", "(", "'.svn'", "in", "file", ")", ")", ":", "continue", "if", "(", "not", "profilers", ")", ":", "if", "(", "not", "add_noncompliant", ")", ":", "try", ":", "found_test", "=", "control_data", ".", "parse_control", "(", "file", ",", "raise_warnings", "=", "True", ")", "tests", "[", "file", "]", "=", "found_test", "except", "control_data", ".", "ControlVariableException", "as", "e", ":", "logging", ".", "warn", "(", "'Skipping %s\\n%s'", ",", "file", ",", "e", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "'Bad %s\\n%s'", ",", "file", ",", "e", ")", "else", ":", "found_test", "=", "control_data", ".", "parse_control", "(", "file", ")", "tests", "[", "file", "]", "=", "found_test", "else", ":", "tests", "[", "file", "]", "=", "compiler", ".", "parseFile", "(", "file", ")", ".", "doc", "return", "tests" ]
find control files in file system and load a list with their info .
train
false
4,689
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
4,690
def current_request(): return getattr(_thread_local, u'request', None)
[ "def", "current_request", "(", ")", ":", "return", "getattr", "(", "_thread_local", ",", "u'request'", ",", "None", ")" ]
retrieves the request from the current thread .
train
false
4,692
def b64encode(t): return base64.b64encode(t)
[ "def", "b64encode", "(", "t", ")", ":", "return", "base64", ".", "b64encode", "(", "t", ")" ]
encode a string using base64 .
train
false
4,693
def GetRealPath(filename): if os.path.isabs(filename): return filename if (filename.startswith('./') or filename.startswith('../')): return os.path.abspath(filename) path = os.getenv('PATH', '') for directory in path.split(':'): tryname = os.path.join(directory, filename) if os.path.exists(tryname): if (not os.path.isabs(directory)): return os.path.abspath(tryname) return tryname if os.path.exists(filename): return os.path.abspath(filename) return None
[ "def", "GetRealPath", "(", "filename", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "filename", ")", ":", "return", "filename", "if", "(", "filename", ".", "startswith", "(", "'./'", ")", "or", "filename", ".", "startswith", "(", "'../'", ")", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "filename", ")", "path", "=", "os", ".", "getenv", "(", "'PATH'", ",", "''", ")", "for", "directory", "in", "path", ".", "split", "(", "':'", ")", ":", "tryname", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "filename", ")", "if", "os", ".", "path", ".", "exists", "(", "tryname", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isabs", "(", "directory", ")", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "tryname", ")", "return", "tryname", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "filename", ")", "return", "None" ]
given an executable filename .
train
true
4,694
@requires_sklearn def test_ica_rank_reduction(): raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data() picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')[:10] n_components = 5 max_pca_components = len(picks) for n_pca_components in [6, 10]: with warnings.catch_warnings(record=True): warnings.simplefilter('always') ica = ICA(n_components=n_components, max_pca_components=max_pca_components, n_pca_components=n_pca_components, method='fastica', max_iter=1).fit(raw, picks=picks) rank_before = raw.estimate_rank(picks=picks) assert_equal(rank_before, len(picks)) raw_clean = ica.apply(raw.copy()) rank_after = raw_clean.estimate_rank(picks=picks) assert_true((n_components < n_pca_components <= rank_after <= rank_before))
[ "@", "requires_sklearn", "def", "test_ica_rank_reduction", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", ".", "crop", "(", "0.5", ",", "stop", ")", ".", "load_data", "(", ")", "picks", "=", "pick_types", "(", "raw", ".", "info", ",", "meg", "=", "True", ",", "stim", "=", "False", ",", "ecg", "=", "False", ",", "eog", "=", "False", ",", "exclude", "=", "'bads'", ")", "[", ":", "10", "]", "n_components", "=", "5", "max_pca_components", "=", "len", "(", "picks", ")", "for", "n_pca_components", "in", "[", "6", ",", "10", "]", ":", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", ":", "warnings", ".", "simplefilter", "(", "'always'", ")", "ica", "=", "ICA", "(", "n_components", "=", "n_components", ",", "max_pca_components", "=", "max_pca_components", ",", "n_pca_components", "=", "n_pca_components", ",", "method", "=", "'fastica'", ",", "max_iter", "=", "1", ")", ".", "fit", "(", "raw", ",", "picks", "=", "picks", ")", "rank_before", "=", "raw", ".", "estimate_rank", "(", "picks", "=", "picks", ")", "assert_equal", "(", "rank_before", ",", "len", "(", "picks", ")", ")", "raw_clean", "=", "ica", ".", "apply", "(", "raw", ".", "copy", "(", ")", ")", "rank_after", "=", "raw_clean", ".", "estimate_rank", "(", "picks", "=", "picks", ")", "assert_true", "(", "(", "n_components", "<", "n_pca_components", "<=", "rank_after", "<=", "rank_before", ")", ")" ]
test recovery ica rank reduction .
train
false
4,695
def _encode_datetime(name, value, dummy0, dummy1): millis = _datetime_to_millis(value) return ((' DCTB ' + name) + _PACK_LONG(millis))
[ "def", "_encode_datetime", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "millis", "=", "_datetime_to_millis", "(", "value", ")", "return", "(", "(", "' DCTB '", "+", "name", ")", "+", "_PACK_LONG", "(", "millis", ")", ")" ]
encode datetime .
train
true
4,696
@pytest.fixture def import_fake(monkeypatch): fake = ImportFake() monkeypatch.setattr('builtins.__import__', fake.fake_import) monkeypatch.setattr('qutebrowser.utils.version.importlib.import_module', fake.fake_importlib_import) return fake
[ "@", "pytest", ".", "fixture", "def", "import_fake", "(", "monkeypatch", ")", ":", "fake", "=", "ImportFake", "(", ")", "monkeypatch", ".", "setattr", "(", "'builtins.__import__'", ",", "fake", ".", "fake_import", ")", "monkeypatch", ".", "setattr", "(", "'qutebrowser.utils.version.importlib.import_module'", ",", "fake", ".", "fake_importlib_import", ")", "return", "fake" ]
fixture to patch imports using importfake .
train
false
4,697
def _reset_config(app): user = factories.Sysadmin() env = {'REMOTE_USER': user['name'].encode('ascii')} app.post(url=url_for(controller='admin', action='reset_config'), extra_environ=env)
[ "def", "_reset_config", "(", "app", ")", ":", "user", "=", "factories", ".", "Sysadmin", "(", ")", "env", "=", "{", "'REMOTE_USER'", ":", "user", "[", "'name'", "]", ".", "encode", "(", "'ascii'", ")", "}", "app", ".", "post", "(", "url", "=", "url_for", "(", "controller", "=", "'admin'", ",", "action", "=", "'reset_config'", ")", ",", "extra_environ", "=", "env", ")" ]
reset config via action .
train
false
4,699
@register.simple_tag def check_description(check): try: return escape(CHECKS[check].description) except KeyError: return escape(check)
[ "@", "register", ".", "simple_tag", "def", "check_description", "(", "check", ")", ":", "try", ":", "return", "escape", "(", "CHECKS", "[", "check", "]", ".", "description", ")", "except", "KeyError", ":", "return", "escape", "(", "check", ")" ]
returns check description .
train
false
4,700
@contextmanager def check_exact_number_of_calls(object_with_method, method_name, num_calls): with check_number_of_calls(object_with_method, method_name, num_calls, num_calls): (yield)
[ "@", "contextmanager", "def", "check_exact_number_of_calls", "(", "object_with_method", ",", "method_name", ",", "num_calls", ")", ":", "with", "check_number_of_calls", "(", "object_with_method", ",", "method_name", ",", "num_calls", ",", "num_calls", ")", ":", "(", "yield", ")" ]
instruments the given method on the given object to verify the number of calls to the method is exactly equal to num_calls .
train
false
4,701
def method_params(doc): doclines = doc.splitlines() if ('Args:' in doclines): begin = doclines.index('Args:') if ('Returns:' in doclines[(begin + 1):]): end = doclines.index('Returns:', begin) args = doclines[(begin + 1):end] else: args = doclines[(begin + 1):] parameters = [] for line in args: m = re.search('^\\s+([a-zA-Z0-9_]+): (.*)', line) if (m is None): continue pname = m.group(1) desc = m.group(2) if ('(required)' not in desc): pname = (pname + '=None') parameters.append(pname) parameters = ', '.join(parameters) else: parameters = '' return parameters
[ "def", "method_params", "(", "doc", ")", ":", "doclines", "=", "doc", ".", "splitlines", "(", ")", "if", "(", "'Args:'", "in", "doclines", ")", ":", "begin", "=", "doclines", ".", "index", "(", "'Args:'", ")", "if", "(", "'Returns:'", "in", "doclines", "[", "(", "begin", "+", "1", ")", ":", "]", ")", ":", "end", "=", "doclines", ".", "index", "(", "'Returns:'", ",", "begin", ")", "args", "=", "doclines", "[", "(", "begin", "+", "1", ")", ":", "end", "]", "else", ":", "args", "=", "doclines", "[", "(", "begin", "+", "1", ")", ":", "]", "parameters", "=", "[", "]", "for", "line", "in", "args", ":", "m", "=", "re", ".", "search", "(", "'^\\\\s+([a-zA-Z0-9_]+): (.*)'", ",", "line", ")", "if", "(", "m", "is", "None", ")", ":", "continue", "pname", "=", "m", ".", "group", "(", "1", ")", "desc", "=", "m", ".", "group", "(", "2", ")", "if", "(", "'(required)'", "not", "in", "desc", ")", ":", "pname", "=", "(", "pname", "+", "'=None'", ")", "parameters", ".", "append", "(", "pname", ")", "parameters", "=", "', '", ".", "join", "(", "parameters", ")", "else", ":", "parameters", "=", "''", "return", "parameters" ]
document the parameters of a method .
train
false
4,702
def iter_multipart_mime_documents(wsgi_input, boundary, read_chunk_size=4096): boundary = ('--' + boundary) blen = (len(boundary) + 2) try: got = wsgi_input.readline(blen) while (got == '\r\n'): got = wsgi_input.readline(blen) except (IOError, ValueError) as e: raise swift.common.exceptions.ChunkReadError(str(e)) if (got.strip() != boundary): raise swift.common.exceptions.MimeInvalid('invalid starting boundary: wanted %r, got %r', (boundary, got)) boundary = ('\r\n' + boundary) input_buffer = '' done = False while (not done): it = _MultipartMimeFileLikeObject(wsgi_input, boundary, input_buffer, read_chunk_size) (yield it) done = it.no_more_files input_buffer = it.input_buffer
[ "def", "iter_multipart_mime_documents", "(", "wsgi_input", ",", "boundary", ",", "read_chunk_size", "=", "4096", ")", ":", "boundary", "=", "(", "'--'", "+", "boundary", ")", "blen", "=", "(", "len", "(", "boundary", ")", "+", "2", ")", "try", ":", "got", "=", "wsgi_input", ".", "readline", "(", "blen", ")", "while", "(", "got", "==", "'\\r\\n'", ")", ":", "got", "=", "wsgi_input", ".", "readline", "(", "blen", ")", "except", "(", "IOError", ",", "ValueError", ")", "as", "e", ":", "raise", "swift", ".", "common", ".", "exceptions", ".", "ChunkReadError", "(", "str", "(", "e", ")", ")", "if", "(", "got", ".", "strip", "(", ")", "!=", "boundary", ")", ":", "raise", "swift", ".", "common", ".", "exceptions", ".", "MimeInvalid", "(", "'invalid starting boundary: wanted %r, got %r'", ",", "(", "boundary", ",", "got", ")", ")", "boundary", "=", "(", "'\\r\\n'", "+", "boundary", ")", "input_buffer", "=", "''", "done", "=", "False", "while", "(", "not", "done", ")", ":", "it", "=", "_MultipartMimeFileLikeObject", "(", "wsgi_input", ",", "boundary", ",", "input_buffer", ",", "read_chunk_size", ")", "(", "yield", "it", ")", "done", "=", "it", ".", "no_more_files", "input_buffer", "=", "it", ".", "input_buffer" ]
given a multi-part-mime-encoded input file object and boundary .
train
false
4,703
@command('(open|view)\\s*(\\d{1,4})') def open_view_bynum(action, num): srt = sorted(g.userpl) name = srt[(int(num) - 1)] open_save_view(action, name)
[ "@", "command", "(", "'(open|view)\\\\s*(\\\\d{1,4})'", ")", "def", "open_view_bynum", "(", "action", ",", "num", ")", ":", "srt", "=", "sorted", "(", "g", ".", "userpl", ")", "name", "=", "srt", "[", "(", "int", "(", "num", ")", "-", "1", ")", "]", "open_save_view", "(", "action", ",", "name", ")" ]
open or view a saved playlist by number .
train
false
4,704
def is_container_agent_running(node): d = node.run_script('service_running', 'flocker-container-agent') def not_existing(failure): failure.trap(ProcessTerminated) return False d.addCallbacks((lambda result: True), not_existing) return d
[ "def", "is_container_agent_running", "(", "node", ")", ":", "d", "=", "node", ".", "run_script", "(", "'service_running'", ",", "'flocker-container-agent'", ")", "def", "not_existing", "(", "failure", ")", ":", "failure", ".", "trap", "(", "ProcessTerminated", ")", "return", "False", "d", ".", "addCallbacks", "(", "(", "lambda", "result", ":", "True", ")", ",", "not_existing", ")", "return", "d" ]
check if the container agent is running on the specified node .
train
false