id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
53,619
def find_xpath_with_wait(context, id_str, **kwargs): return _find_elem_with_wait(context, (By.XPATH, id_str), **kwargs)
[ "def", "find_xpath_with_wait", "(", "context", ",", "id_str", ",", "**", "kwargs", ")", ":", "return", "_find_elem_with_wait", "(", "context", ",", "(", "By", ".", "XPATH", ",", "id_str", ")", ",", "**", "kwargs", ")" ]
tries to find an element with given xpath with an explicit timeout .
train
false
53,620
def _current_component(view_func, dashboard=None, panel=None): @functools.wraps(view_func, assigned=available_attrs(view_func)) def dec(request, *args, **kwargs): if dashboard: request.horizon['dashboard'] = dashboard if panel: request.horizon['panel'] = panel return view_func(request, *args, **kwargs) return dec
[ "def", "_current_component", "(", "view_func", ",", "dashboard", "=", "None", ",", "panel", "=", "None", ")", ":", "@", "functools", ".", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "dec", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "dashboard", ":", "request", ".", "horizon", "[", "'dashboard'", "]", "=", "dashboard", "if", "panel", ":", "request", ".", "horizon", "[", "'panel'", "]", "=", "panel", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "dec" ]
sets the currently-active dashboard and/or panel on the request .
train
true
53,621
def modify_profile(hostname, username, password, profile_type, name, **kwargs): bigip_session = _build_session(username, password) payload = {} payload['name'] = name for (key, value) in six.iteritems(kwargs): if (not key.startswith('__')): if (key not in ['hostname', 'username', 'password', 'profile_type']): key = key.replace('_', '-') try: payload[key] = _set_value(value) except salt.exceptions.CommandExecutionError: return 'Error: Unable to Parse JSON data for parameter: {key}\n{value}'.format(key=key, value=value) try: response = bigip_session.put((BIG_IP_URL_BASE.format(host=hostname) + '/ltm/profile/{type}/{name}'.format(type=profile_type, name=name)), data=json.dumps(payload)) except requests.exceptions.ConnectionError as e: return _load_connection_error(hostname, e) return _load_response(response)
[ "def", "modify_profile", "(", "hostname", ",", "username", ",", "password", ",", "profile_type", ",", "name", ",", "**", "kwargs", ")", ":", "bigip_session", "=", "_build_session", "(", "username", ",", "password", ")", "payload", "=", "{", "}", "payload", "[", "'name'", "]", "=", "name", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "kwargs", ")", ":", "if", "(", "not", "key", ".", "startswith", "(", "'__'", ")", ")", ":", "if", "(", "key", "not", "in", "[", "'hostname'", ",", "'username'", ",", "'password'", ",", "'profile_type'", "]", ")", ":", "key", "=", "key", ".", "replace", "(", "'_'", ",", "'-'", ")", "try", ":", "payload", "[", "key", "]", "=", "_set_value", "(", "value", ")", "except", "salt", ".", "exceptions", ".", "CommandExecutionError", ":", "return", "'Error: Unable to Parse JSON data for parameter: {key}\\n{value}'", ".", "format", "(", "key", "=", "key", ",", "value", "=", "value", ")", "try", ":", "response", "=", "bigip_session", ".", "put", "(", "(", "BIG_IP_URL_BASE", ".", "format", "(", "host", "=", "hostname", ")", "+", "'/ltm/profile/{type}/{name}'", ".", "format", "(", "type", "=", "profile_type", ",", "name", "=", "name", ")", ")", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", "as", "e", ":", "return", "_load_connection_error", "(", "hostname", ",", "e", ")", "return", "_load_response", "(", "response", ")" ]
a function to connect to a bigip device and create a profile .
train
true
53,622
def is_string_secure(string): if re.match(VALID_CHARS_REGEX, string): return True else: return False
[ "def", "is_string_secure", "(", "string", ")", ":", "if", "re", ".", "match", "(", "VALID_CHARS_REGEX", ",", "string", ")", ":", "return", "True", "else", ":", "return", "False" ]
validates that this string does not contain any possible characters that are indicative of a security breach .
train
false
53,624
def convertFsDirWavToWav(dirName, Fs, nC): types = (((dirName + os.sep) + '*.wav'),) filesToProcess = [] for files in types: filesToProcess.extend(glob.glob(files)) newDir = ((((((dirName + os.sep) + 'Fs') + str(Fs)) + '_') + 'NC') + str(nC)) if (os.path.exists(newDir) and (newDir != '.')): shutil.rmtree(newDir) os.makedirs(newDir) for f in filesToProcess: (_, wavFileName) = ntpath.split(f) command = (((((((((('avconv -i "' + f) + '" -ar ') + str(Fs)) + ' -ac ') + str(nC)) + ' "') + newDir) + os.sep) + wavFileName) + '"') print command os.system(command)
[ "def", "convertFsDirWavToWav", "(", "dirName", ",", "Fs", ",", "nC", ")", ":", "types", "=", "(", "(", "(", "dirName", "+", "os", ".", "sep", ")", "+", "'*.wav'", ")", ",", ")", "filesToProcess", "=", "[", "]", "for", "files", "in", "types", ":", "filesToProcess", ".", "extend", "(", "glob", ".", "glob", "(", "files", ")", ")", "newDir", "=", "(", "(", "(", "(", "(", "(", "dirName", "+", "os", ".", "sep", ")", "+", "'Fs'", ")", "+", "str", "(", "Fs", ")", ")", "+", "'_'", ")", "+", "'NC'", ")", "+", "str", "(", "nC", ")", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "newDir", ")", "and", "(", "newDir", "!=", "'.'", ")", ")", ":", "shutil", ".", "rmtree", "(", "newDir", ")", "os", ".", "makedirs", "(", "newDir", ")", "for", "f", "in", "filesToProcess", ":", "(", "_", ",", "wavFileName", ")", "=", "ntpath", ".", "split", "(", "f", ")", "command", "=", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "'avconv -i \"'", "+", "f", ")", "+", "'\" -ar '", ")", "+", "str", "(", "Fs", ")", ")", "+", "' -ac '", ")", "+", "str", "(", "nC", ")", ")", "+", "' \"'", ")", "+", "newDir", ")", "+", "os", ".", "sep", ")", "+", "wavFileName", ")", "+", "'\"'", ")", "print", "command", "os", ".", "system", "(", "command", ")" ]
this function converts the wav files stored in a folder to wav using a different sampling freq and number of channels .
train
true
53,625
def _read_signify_ed25519_signature(signature_file): with open(signature_file) as f: sig = binascii.a2b_base64(f.read().splitlines()[1])[10:] if (len(sig) != 64): raise Exception('bogus Ed25519 signature: raw signature length was {}, but expected 64'.format(len(sig))) return sig
[ "def", "_read_signify_ed25519_signature", "(", "signature_file", ")", ":", "with", "open", "(", "signature_file", ")", "as", "f", ":", "sig", "=", "binascii", ".", "a2b_base64", "(", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", "[", "1", "]", ")", "[", "10", ":", "]", "if", "(", "len", "(", "sig", ")", "!=", "64", ")", ":", "raise", "Exception", "(", "'bogus Ed25519 signature: raw signature length was {}, but expected 64'", ".", "format", "(", "len", "(", "sig", ")", ")", ")", "return", "sig" ]
read a ed25519 signature file created with openbsd signify .
train
false
53,627
@must_be_valid_project @must_be_contributor_or_public @must_not_be_registration def togglewatch_post(auth, node, **kwargs): user = auth.user watch_config = WatchConfig(node=node, digest=request.json.get('digest', False), immediate=request.json.get('immediate', False)) try: if user.is_watching(node): user.unwatch(watch_config) else: user.watch(watch_config) except ValueError: raise HTTPError(http.BAD_REQUEST) user.save() return {'status': 'success', 'watchCount': node.watches.count(), 'watched': user.is_watching(node)}
[ "@", "must_be_valid_project", "@", "must_be_contributor_or_public", "@", "must_not_be_registration", "def", "togglewatch_post", "(", "auth", ",", "node", ",", "**", "kwargs", ")", ":", "user", "=", "auth", ".", "user", "watch_config", "=", "WatchConfig", "(", "node", "=", "node", ",", "digest", "=", "request", ".", "json", ".", "get", "(", "'digest'", ",", "False", ")", ",", "immediate", "=", "request", ".", "json", ".", "get", "(", "'immediate'", ",", "False", ")", ")", "try", ":", "if", "user", ".", "is_watching", "(", "node", ")", ":", "user", ".", "unwatch", "(", "watch_config", ")", "else", ":", "user", ".", "watch", "(", "watch_config", ")", "except", "ValueError", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ")", "user", ".", "save", "(", ")", "return", "{", "'status'", ":", "'success'", ",", "'watchCount'", ":", "node", ".", "watches", ".", "count", "(", ")", ",", "'watched'", ":", "user", ".", "is_watching", "(", "node", ")", "}" ]
view for toggling watch mode for a node .
train
false
53,628
def import_library(taglib_module): try: mod = import_module(taglib_module) except ImportError as e: if is_library_missing(taglib_module): return None else: raise InvalidTemplateLibrary(('ImportError raised loading %s: %s' % (taglib_module, e))) try: return mod.register except AttributeError: raise InvalidTemplateLibrary(("Template library %s does not have a variable named 'register'" % taglib_module))
[ "def", "import_library", "(", "taglib_module", ")", ":", "try", ":", "mod", "=", "import_module", "(", "taglib_module", ")", "except", "ImportError", "as", "e", ":", "if", "is_library_missing", "(", "taglib_module", ")", ":", "return", "None", "else", ":", "raise", "InvalidTemplateLibrary", "(", "(", "'ImportError raised loading %s: %s'", "%", "(", "taglib_module", ",", "e", ")", ")", ")", "try", ":", "return", "mod", ".", "register", "except", "AttributeError", ":", "raise", "InvalidTemplateLibrary", "(", "(", "\"Template library %s does not have a variable named 'register'\"", "%", "taglib_module", ")", ")" ]
load a template tag library module .
train
false
53,629
def p_namespace_scope(p): p[0] = p[1]
[ "def", "p_namespace_scope", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
namespace_scope : * | identifier .
train
false
53,630
def waist2rayleigh(w, wavelen): (w, wavelen) = map(sympify, (w, wavelen)) return (((w ** 2) * pi) / wavelen)
[ "def", "waist2rayleigh", "(", "w", ",", "wavelen", ")", ":", "(", "w", ",", "wavelen", ")", "=", "map", "(", "sympify", ",", "(", "w", ",", "wavelen", ")", ")", "return", "(", "(", "(", "w", "**", "2", ")", "*", "pi", ")", "/", "wavelen", ")" ]
calculate the rayleigh range from the waist of a gaussian beam .
train
false
53,632
def reverse_url(handler_name, key_name=None, key_value=None, kwargs=None): kwargs_for_reverse = ({key_name: unicode(key_value)} if key_name else None) if kwargs: kwargs_for_reverse.update(kwargs) return reverse(('contentstore.views.' + handler_name), kwargs=kwargs_for_reverse)
[ "def", "reverse_url", "(", "handler_name", ",", "key_name", "=", "None", ",", "key_value", "=", "None", ",", "kwargs", "=", "None", ")", ":", "kwargs_for_reverse", "=", "(", "{", "key_name", ":", "unicode", "(", "key_value", ")", "}", "if", "key_name", "else", "None", ")", "if", "kwargs", ":", "kwargs_for_reverse", ".", "update", "(", "kwargs", ")", "return", "reverse", "(", "(", "'contentstore.views.'", "+", "handler_name", ")", ",", "kwargs", "=", "kwargs_for_reverse", ")" ]
creates the url for the given handler .
train
false
53,633
def scan_postfix_cleanup_line(date, _, collector): collector['activity-by-hour']['smtp-receives'][date.hour] += 1
[ "def", "scan_postfix_cleanup_line", "(", "date", ",", "_", ",", "collector", ")", ":", "collector", "[", "'activity-by-hour'", "]", "[", "'smtp-receives'", "]", "[", "date", ".", "hour", "]", "+=", "1" ]
scan a postfix cleanup log line and extract interesting data it is assumed that every log of postfix/cleanup indicates an email that was successfulfy received by postfix .
train
false
53,634
def test_different_caller(): assert_signature('[str][0](', 'str', 0) assert_signature('[str][0]()', 'str', 0, column=len('[str][0](')) assert_signature('(str)(', 'str', 0) assert_signature('(str)()', 'str', 0, column=len('(str)('))
[ "def", "test_different_caller", "(", ")", ":", "assert_signature", "(", "'[str][0]('", ",", "'str'", ",", "0", ")", "assert_signature", "(", "'[str][0]()'", ",", "'str'", ",", "0", ",", "column", "=", "len", "(", "'[str][0]('", ")", ")", "assert_signature", "(", "'(str)('", ",", "'str'", ",", "0", ")", "assert_signature", "(", "'(str)()'", ",", "'str'", ",", "0", ",", "column", "=", "len", "(", "'(str)('", ")", ")" ]
its possible to not use names .
train
false
53,635
def test_all_fields(script): result = script.pip('show', 'pip') lines = result.stdout.splitlines() expected = set(['Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'License', 'Location', 'Requires']) actual = set((re.sub(':.*$', '', line) for line in lines)) assert (actual == expected)
[ "def", "test_all_fields", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'show'", ",", "'pip'", ")", "lines", "=", "result", ".", "stdout", ".", "splitlines", "(", ")", "expected", "=", "set", "(", "[", "'Name'", ",", "'Version'", ",", "'Summary'", ",", "'Home-page'", ",", "'Author'", ",", "'Author-email'", ",", "'License'", ",", "'Location'", ",", "'Requires'", "]", ")", "actual", "=", "set", "(", "(", "re", ".", "sub", "(", "':.*$'", ",", "''", ",", "line", ")", "for", "line", "in", "lines", ")", ")", "assert", "(", "actual", "==", "expected", ")" ]
test that all the fields are present .
train
false
53,636
def neg_sampling(W_list, b_list, nsamples, beta=1.0, pa_bias=None, marginalize_odd=True, theano_rng=None): depth = len(b_list) new_nsamples = [nsamples[i] for i in xrange(depth)] _sample_even_odd(W_list, b_list, new_nsamples, beta, odd=marginalize_odd) _activation_even_odd(W_list, b_list, new_nsamples, beta, odd=(not marginalize_odd)) new_nsamples[(not marginalize_odd)] += (pa_bias * (1.0 - beta)) for i in xrange((not marginalize_odd), depth, 2): new_nsamples[i] = T.nnet.sigmoid(new_nsamples[i]) new_nsamples[i] = theano_rng.binomial(size=nsamples[i].get_value().shape, n=1, p=new_nsamples[i], dtype=floatX) return new_nsamples
[ "def", "neg_sampling", "(", "W_list", ",", "b_list", ",", "nsamples", ",", "beta", "=", "1.0", ",", "pa_bias", "=", "None", ",", "marginalize_odd", "=", "True", ",", "theano_rng", "=", "None", ")", ":", "depth", "=", "len", "(", "b_list", ")", "new_nsamples", "=", "[", "nsamples", "[", "i", "]", "for", "i", "in", "xrange", "(", "depth", ")", "]", "_sample_even_odd", "(", "W_list", ",", "b_list", ",", "new_nsamples", ",", "beta", ",", "odd", "=", "marginalize_odd", ")", "_activation_even_odd", "(", "W_list", ",", "b_list", ",", "new_nsamples", ",", "beta", ",", "odd", "=", "(", "not", "marginalize_odd", ")", ")", "new_nsamples", "[", "(", "not", "marginalize_odd", ")", "]", "+=", "(", "pa_bias", "*", "(", "1.0", "-", "beta", ")", ")", "for", "i", "in", "xrange", "(", "(", "not", "marginalize_odd", ")", ",", "depth", ",", "2", ")", ":", "new_nsamples", "[", "i", "]", "=", "T", ".", "nnet", ".", "sigmoid", "(", "new_nsamples", "[", "i", "]", ")", "new_nsamples", "[", "i", "]", "=", "theano_rng", ".", "binomial", "(", "size", "=", "nsamples", "[", "i", "]", ".", "get_value", "(", ")", ".", "shape", ",", "n", "=", "1", ",", "p", "=", "new_nsamples", "[", "i", "]", ",", "dtype", "=", "floatX", ")", "return", "new_nsamples" ]
generate a sample from the intermediate distribution defined at inverse temperature beta .
train
false
53,637
def _pipeline_present_with_definition(name, expected_pipeline_objects, expected_parameter_objects, expected_parameter_values, region, key, keyid, profile): result_pipeline_id = __salt__['boto_datapipeline.pipeline_id_from_name'](name, region=region, key=key, keyid=keyid, profile=profile) if ('error' in result_pipeline_id): return (False, {}) pipeline_id = result_pipeline_id['result'] pipeline_definition_result = __salt__['boto_datapipeline.get_pipeline_definition'](pipeline_id, version='active', region=region, key=key, keyid=keyid, profile=profile) if ('error' in pipeline_definition_result): return (False, {}) pipeline_definition = _standardize(pipeline_definition_result['result']) pipeline_objects = pipeline_definition.get('pipelineObjects') parameter_objects = pipeline_definition.get('parameterObjects') paramater_values = pipeline_definition.get('parameterValues') present = (_recursive_compare(_cleaned(pipeline_objects), _cleaned(expected_pipeline_objects)) and _recursive_compare(parameter_objects, expected_parameter_objects) and _recursive_compare(paramater_values, expected_parameter_values)) return (present, pipeline_definition)
[ "def", "_pipeline_present_with_definition", "(", "name", ",", "expected_pipeline_objects", ",", "expected_parameter_objects", ",", "expected_parameter_values", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", ":", "result_pipeline_id", "=", "__salt__", "[", "'boto_datapipeline.pipeline_id_from_name'", "]", "(", "name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "'error'", "in", "result_pipeline_id", ")", ":", "return", "(", "False", ",", "{", "}", ")", "pipeline_id", "=", "result_pipeline_id", "[", "'result'", "]", "pipeline_definition_result", "=", "__salt__", "[", "'boto_datapipeline.get_pipeline_definition'", "]", "(", "pipeline_id", ",", "version", "=", "'active'", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "'error'", "in", "pipeline_definition_result", ")", ":", "return", "(", "False", ",", "{", "}", ")", "pipeline_definition", "=", "_standardize", "(", "pipeline_definition_result", "[", "'result'", "]", ")", "pipeline_objects", "=", "pipeline_definition", ".", "get", "(", "'pipelineObjects'", ")", "parameter_objects", "=", "pipeline_definition", ".", "get", "(", "'parameterObjects'", ")", "paramater_values", "=", "pipeline_definition", ".", "get", "(", "'parameterValues'", ")", "present", "=", "(", "_recursive_compare", "(", "_cleaned", "(", "pipeline_objects", ")", ",", "_cleaned", "(", "expected_pipeline_objects", ")", ")", "and", "_recursive_compare", "(", "parameter_objects", ",", "expected_parameter_objects", ")", "and", "_recursive_compare", "(", "paramater_values", ",", "expected_parameter_values", ")", ")", "return", "(", "present", ",", "pipeline_definition", ")" ]
return true if the pipeline exists and the definition matches .
train
true
53,638
@require_admin_context def instance_type_access_add(context, flavor_id, project_id): session = get_session() with session.begin(): instance_type_ref = instance_type_get_by_flavor_id(context, flavor_id, session=session) instance_type_id = instance_type_ref['id'] access_ref = _instance_type_access_query(context, session=session).filter_by(instance_type_id=instance_type_id).filter_by(project_id=project_id).first() if access_ref: raise exception.FlavorAccessExists(flavor_id=flavor_id, project_id=project_id) access_ref = models.InstanceTypeProjects() access_ref.update({'instance_type_id': instance_type_id, 'project_id': project_id}) access_ref.save(session=session) return access_ref
[ "@", "require_admin_context", "def", "instance_type_access_add", "(", "context", ",", "flavor_id", ",", "project_id", ")", ":", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "instance_type_ref", "=", "instance_type_get_by_flavor_id", "(", "context", ",", "flavor_id", ",", "session", "=", "session", ")", "instance_type_id", "=", "instance_type_ref", "[", "'id'", "]", "access_ref", "=", "_instance_type_access_query", "(", "context", ",", "session", "=", "session", ")", ".", "filter_by", "(", "instance_type_id", "=", "instance_type_id", ")", ".", "filter_by", "(", "project_id", "=", "project_id", ")", ".", "first", "(", ")", "if", "access_ref", ":", "raise", "exception", ".", "FlavorAccessExists", "(", "flavor_id", "=", "flavor_id", ",", "project_id", "=", "project_id", ")", "access_ref", "=", "models", ".", "InstanceTypeProjects", "(", ")", "access_ref", ".", "update", "(", "{", "'instance_type_id'", ":", "instance_type_id", ",", "'project_id'", ":", "project_id", "}", ")", "access_ref", ".", "save", "(", "session", "=", "session", ")", "return", "access_ref" ]
add given tenant to the flavor access list .
train
false
53,640
def convert_case(s): return ''.join([a.title() for a in s.split('_') if a])
[ "def", "convert_case", "(", "s", ")", ":", "return", "''", ".", "join", "(", "[", "a", ".", "title", "(", ")", "for", "a", "in", "s", ".", "split", "(", "'_'", ")", "if", "a", "]", ")" ]
given a string in snake case .
train
false
53,641
def list_of_array_equal(s, t): eq_(len(s), len(t)) all((assert_array_equal(x, y) for (x, y) in zip(s, t)))
[ "def", "list_of_array_equal", "(", "s", ",", "t", ")", ":", "eq_", "(", "len", "(", "s", ")", ",", "len", "(", "t", ")", ")", "all", "(", "(", "assert_array_equal", "(", "x", ",", "y", ")", "for", "(", "x", ",", "y", ")", "in", "zip", "(", "s", ",", "t", ")", ")", ")" ]
compare two lists of ndarrays s .
train
false
53,642
def merge_ownership_periods(mappings): return valmap((lambda v: tuple((OwnershipPeriod(a.start, b.start, a.sid, a.value) for (a, b) in sliding_window(2, concatv(sorted(v), [OwnershipPeriod(pd.Timestamp.max.tz_localize('utc'), None, None, None)]))))), mappings)
[ "def", "merge_ownership_periods", "(", "mappings", ")", ":", "return", "valmap", "(", "(", "lambda", "v", ":", "tuple", "(", "(", "OwnershipPeriod", "(", "a", ".", "start", ",", "b", ".", "start", ",", "a", ".", "sid", ",", "a", ".", "value", ")", "for", "(", "a", ",", "b", ")", "in", "sliding_window", "(", "2", ",", "concatv", "(", "sorted", "(", "v", ")", ",", "[", "OwnershipPeriod", "(", "pd", ".", "Timestamp", ".", "max", ".", "tz_localize", "(", "'utc'", ")", ",", "None", ",", "None", ",", "None", ")", "]", ")", ")", ")", ")", ")", ",", "mappings", ")" ]
given a dict of mappings where the values are lists of ownershipperiod objects .
train
true
53,643
def _key_split(matchobj): keys = [k.strip() for k in matchobj.group(1).split('+')] return ' + '.join([('<span class="key">%s</span>' % key) for key in keys])
[ "def", "_key_split", "(", "matchobj", ")", ":", "keys", "=", "[", "k", ".", "strip", "(", ")", "for", "k", "in", "matchobj", ".", "group", "(", "1", ")", ".", "split", "(", "'+'", ")", "]", "return", "' + '", ".", "join", "(", "[", "(", "'<span class=\"key\">%s</span>'", "%", "key", ")", "for", "key", "in", "keys", "]", ")" ]
expands a {key a+b+c} syntax into <span class="key">a</span> + .
train
false
53,644
@require_POST def post_receive_hook_close_submitted(request, local_site_name=None, repository_id=None, hosting_service_id=None, hooks_uuid=None): repository = get_repository_for_hook(repository_id, hosting_service_id, local_site_name, hooks_uuid) try: payload = json.loads(request.body) except ValueError as e: logging.error(u'The payload is not in JSON format: %s', e, exc_info=1) return HttpResponseBadRequest(u'Invalid payload format') server_url = get_server_url(request=request) review_request_id_to_commits_map = close_review_requests(payload, server_url) if review_request_id_to_commits_map: close_all_review_requests(review_request_id_to_commits_map, local_site_name, repository, hosting_service_id) return HttpResponse()
[ "@", "require_POST", "def", "post_receive_hook_close_submitted", "(", "request", ",", "local_site_name", "=", "None", ",", "repository_id", "=", "None", ",", "hosting_service_id", "=", "None", ",", "hooks_uuid", "=", "None", ")", ":", "repository", "=", "get_repository_for_hook", "(", "repository_id", ",", "hosting_service_id", ",", "local_site_name", ",", "hooks_uuid", ")", "try", ":", "payload", "=", "json", ".", "loads", "(", "request", ".", "body", ")", "except", "ValueError", "as", "e", ":", "logging", ".", "error", "(", "u'The payload is not in JSON format: %s'", ",", "e", ",", "exc_info", "=", "1", ")", "return", "HttpResponseBadRequest", "(", "u'Invalid payload format'", ")", "server_url", "=", "get_server_url", "(", "request", "=", "request", ")", "review_request_id_to_commits_map", "=", "close_review_requests", "(", "payload", ",", "server_url", ")", "if", "review_request_id_to_commits_map", ":", "close_all_review_requests", "(", "review_request_id_to_commits_map", ",", "local_site_name", ",", "repository", ",", "hosting_service_id", ")", "return", "HttpResponse", "(", ")" ]
closes review requests as submitted automatically after a push .
train
false
53,647
def mixing_dict(xy, normalized=False): d = {} psum = 0.0 for (x, y) in xy: if (x not in d): d[x] = {} if (y not in d): d[y] = {} v = d[x].get(y, 0) d[x][y] = (v + 1) psum += 1 if normalized: for (k, jdict) in d.items(): for j in jdict: jdict[j] /= psum return d
[ "def", "mixing_dict", "(", "xy", ",", "normalized", "=", "False", ")", ":", "d", "=", "{", "}", "psum", "=", "0.0", "for", "(", "x", ",", "y", ")", "in", "xy", ":", "if", "(", "x", "not", "in", "d", ")", ":", "d", "[", "x", "]", "=", "{", "}", "if", "(", "y", "not", "in", "d", ")", ":", "d", "[", "y", "]", "=", "{", "}", "v", "=", "d", "[", "x", "]", ".", "get", "(", "y", ",", "0", ")", "d", "[", "x", "]", "[", "y", "]", "=", "(", "v", "+", "1", ")", "psum", "+=", "1", "if", "normalized", ":", "for", "(", "k", ",", "jdict", ")", "in", "d", ".", "items", "(", ")", ":", "for", "j", "in", "jdict", ":", "jdict", "[", "j", "]", "/=", "psum", "return", "d" ]
return a dictionary representation of mixing matrix .
train
false
53,648
def move_by_taskmap(map, **kwargs): def task_name_in_map(body, message): return map.get(body[u'task']) return move(task_name_in_map, **kwargs)
[ "def", "move_by_taskmap", "(", "map", ",", "**", "kwargs", ")", ":", "def", "task_name_in_map", "(", "body", ",", "message", ")", ":", "return", "map", ".", "get", "(", "body", "[", "u'task'", "]", ")", "return", "move", "(", "task_name_in_map", ",", "**", "kwargs", ")" ]
move tasks by matching from a task_name: queue mapping .
train
false
53,650
def convert_time_to_utc(timestr): combined = datetime.combine(dt_util.start_of_local_day(), dt_util.parse_time(timestr)) if (combined < datetime.now()): combined = (combined + timedelta(days=1)) return dt_util.as_timestamp(combined)
[ "def", "convert_time_to_utc", "(", "timestr", ")", ":", "combined", "=", "datetime", ".", "combine", "(", "dt_util", ".", "start_of_local_day", "(", ")", ",", "dt_util", ".", "parse_time", "(", "timestr", ")", ")", "if", "(", "combined", "<", "datetime", ".", "now", "(", ")", ")", ":", "combined", "=", "(", "combined", "+", "timedelta", "(", "days", "=", "1", ")", ")", "return", "dt_util", ".", "as_timestamp", "(", "combined", ")" ]
take a string like 08:00:00 and convert it to a unix timestamp .
train
false
53,651
def share_db(): client = MongoClient(wiki_settings.SHAREJS_DB_URL) return client[wiki_settings.SHAREJS_DB_NAME]
[ "def", "share_db", "(", ")", ":", "client", "=", "MongoClient", "(", "wiki_settings", ".", "SHAREJS_DB_URL", ")", "return", "client", "[", "wiki_settings", ".", "SHAREJS_DB_NAME", "]" ]
generate db client for sharejs db .
train
false
53,652
def comparison_type(logical_line, noqa): match = COMPARE_TYPE_REGEX.search(logical_line) if (match and (not noqa)): inst = match.group(1) if (inst and isidentifier(inst) and (inst not in SINGLETONS)): return (yield (match.start(), "E721 do not compare types, use 'isinstance()'"))
[ "def", "comparison_type", "(", "logical_line", ",", "noqa", ")", ":", "match", "=", "COMPARE_TYPE_REGEX", ".", "search", "(", "logical_line", ")", "if", "(", "match", "and", "(", "not", "noqa", ")", ")", ":", "inst", "=", "match", ".", "group", "(", "1", ")", "if", "(", "inst", "and", "isidentifier", "(", "inst", ")", "and", "(", "inst", "not", "in", "SINGLETONS", ")", ")", ":", "return", "(", "yield", "(", "match", ".", "start", "(", ")", ",", "\"E721 do not compare types, use 'isinstance()'\"", ")", ")" ]
object type comparisons should always use isinstance() .
train
true
53,653
def create_patch_ports(source, destination): common = common_utils.get_rand_name(max_length=4, prefix='') prefix = ('%s-%s-' % (PATCH_PREFIX, common)) source_name = common_utils.get_rand_device_name(prefix=prefix) destination_name = common_utils.get_rand_device_name(prefix=prefix) source.add_patch_port(source_name, destination_name) destination.add_patch_port(destination_name, source_name)
[ "def", "create_patch_ports", "(", "source", ",", "destination", ")", ":", "common", "=", "common_utils", ".", "get_rand_name", "(", "max_length", "=", "4", ",", "prefix", "=", "''", ")", "prefix", "=", "(", "'%s-%s-'", "%", "(", "PATCH_PREFIX", ",", "common", ")", ")", "source_name", "=", "common_utils", ".", "get_rand_device_name", "(", "prefix", "=", "prefix", ")", "destination_name", "=", "common_utils", ".", "get_rand_device_name", "(", "prefix", "=", "prefix", ")", "source", ".", "add_patch_port", "(", "source_name", ",", "destination_name", ")", "destination", ".", "add_patch_port", "(", "destination_name", ",", "source_name", ")" ]
hook up two ovs bridges .
train
false
53,655
def get_ring(): ring_output = check_output([NODE_TOOL, 'ring', KEYSPACE]) ring = [] index = 0 for line in ring_output.splitlines(): fields = line.split() if (len(fields) != 8): continue ring.append({'index': index, 'ip': fields[0], 'status': fields[2], 'state': fields[3], 'load': load_bytes(float(fields[4]), fields[5]), 'token': fields[7]}) index += 1 assert (len(ring) > 0) ideal_load = (sum((node['load'] for node in ring)) / len(ring)) for (index, node) in enumerate(ring): try: node['skew'] = (abs((node['load'] - ideal_load)) / ideal_load) except ZeroDivisionError: node['skew'] = 0 node['diff'] = abs((node['load'] - ring[(index - 1)]['load'])) return ring
[ "def", "get_ring", "(", ")", ":", "ring_output", "=", "check_output", "(", "[", "NODE_TOOL", ",", "'ring'", ",", "KEYSPACE", "]", ")", "ring", "=", "[", "]", "index", "=", "0", "for", "line", "in", "ring_output", ".", "splitlines", "(", ")", ":", "fields", "=", "line", ".", "split", "(", ")", "if", "(", "len", "(", "fields", ")", "!=", "8", ")", ":", "continue", "ring", ".", "append", "(", "{", "'index'", ":", "index", ",", "'ip'", ":", "fields", "[", "0", "]", ",", "'status'", ":", "fields", "[", "2", "]", ",", "'state'", ":", "fields", "[", "3", "]", ",", "'load'", ":", "load_bytes", "(", "float", "(", "fields", "[", "4", "]", ")", ",", "fields", "[", "5", "]", ")", ",", "'token'", ":", "fields", "[", "7", "]", "}", ")", "index", "+=", "1", "assert", "(", "len", "(", "ring", ")", ">", "0", ")", "ideal_load", "=", "(", "sum", "(", "(", "node", "[", "'load'", "]", "for", "node", "in", "ring", ")", ")", "/", "len", "(", "ring", ")", ")", "for", "(", "index", ",", "node", ")", "in", "enumerate", "(", "ring", ")", ":", "try", ":", "node", "[", "'skew'", "]", "=", "(", "abs", "(", "(", "node", "[", "'load'", "]", "-", "ideal_load", ")", ")", "/", "ideal_load", ")", "except", "ZeroDivisionError", ":", "node", "[", "'skew'", "]", "=", "0", "node", "[", "'diff'", "]", "=", "abs", "(", "(", "node", "[", "'load'", "]", "-", "ring", "[", "(", "index", "-", "1", ")", "]", "[", "'load'", "]", ")", ")", "return", "ring" ]
return the ring status in a structured way .
train
false
53,658
def get_int(int_str, default=_no_default): if (default == _no_default): return int(int_str) else: try: return int(int_str) except ValueError: return default
[ "def", "get_int", "(", "int_str", ",", "default", "=", "_no_default", ")", ":", "if", "(", "default", "==", "_no_default", ")", ":", "return", "int", "(", "int_str", ")", "else", ":", "try", ":", "return", "int", "(", "int_str", ")", "except", "ValueError", ":", "return", "default" ]
for convenience a get-like method for taking the int() of a string .
train
false
53,660
def test_show_verbose_installer(script, data): wheel_file = data.packages.join('simple.dist-0.1-py2.py3-none-any.whl') script.pip('install', '--no-index', wheel_file) result = script.pip('show', '--verbose', 'simple.dist') lines = result.stdout.splitlines() assert ('Name: simple.dist' in lines) assert ('Installer: pip' in lines)
[ "def", "test_show_verbose_installer", "(", "script", ",", "data", ")", ":", "wheel_file", "=", "data", ".", "packages", ".", "join", "(", "'simple.dist-0.1-py2.py3-none-any.whl'", ")", "script", ".", "pip", "(", "'install'", ",", "'--no-index'", ",", "wheel_file", ")", "result", "=", "script", ".", "pip", "(", "'show'", ",", "'--verbose'", ",", "'simple.dist'", ")", "lines", "=", "result", ".", "stdout", ".", "splitlines", "(", ")", "assert", "(", "'Name: simple.dist'", "in", "lines", ")", "assert", "(", "'Installer: pip'", "in", "lines", ")" ]
test that the installer is shown .
train
false
53,661
def base64_encodestring(instr): if six.PY3: b = salt.utils.to_bytes(instr) b64 = base64.encodebytes(b) return salt.utils.to_str(b64) return base64.encodestring(instr)
[ "def", "base64_encodestring", "(", "instr", ")", ":", "if", "six", ".", "PY3", ":", "b", "=", "salt", ".", "utils", ".", "to_bytes", "(", "instr", ")", "b64", "=", "base64", ".", "encodebytes", "(", "b", ")", "return", "salt", ".", "utils", ".", "to_str", "(", "b64", ")", "return", "base64", ".", "encodestring", "(", "instr", ")" ]
encode a string as base64 using the "legacy" python interface .
train
false
53,663
def course_detail(request, username, course_key): user = get_effective_user(request.user, username) return get_course_overview_with_access(user, get_permission_for_course_about(), course_key)
[ "def", "course_detail", "(", "request", ",", "username", ",", "course_key", ")", ":", "user", "=", "get_effective_user", "(", "request", ".", "user", ",", "username", ")", "return", "get_course_overview_with_access", "(", "user", ",", "get_permission_for_course_about", "(", ")", ",", "course_key", ")" ]
return a single course identified by course_key .
train
false
53,664
def extended_linecache_checkcache(filename=None, orig_checkcache=linecache.checkcache): cache = linecache.cache save = {} for filename in cache.keys(): if ((filename[:1] + filename[(-1):]) == '<>'): save[filename] = cache[filename] orig_checkcache() cache.update(save)
[ "def", "extended_linecache_checkcache", "(", "filename", "=", "None", ",", "orig_checkcache", "=", "linecache", ".", "checkcache", ")", ":", "cache", "=", "linecache", ".", "cache", "save", "=", "{", "}", "for", "filename", "in", "cache", ".", "keys", "(", ")", ":", "if", "(", "(", "filename", "[", ":", "1", "]", "+", "filename", "[", "(", "-", "1", ")", ":", "]", ")", "==", "'<>'", ")", ":", "save", "[", "filename", "]", "=", "cache", "[", "filename", "]", "orig_checkcache", "(", ")", "cache", ".", "update", "(", "save", ")" ]
extend linecache .
train
false
53,665
def pop(queue, quantity=1): cmd = 'SELECT name FROM {0}'.format(queue) if (quantity != 'all'): try: quantity = int(quantity) except ValueError as exc: error_txt = 'Quantity must be an integer or "all".\nError: "{0}".'.format(exc) raise SaltInvocationError(error_txt) cmd = ''.join([cmd, ' LIMIT {0}'.format(quantity)]) log.debug('SQL Query: {0}'.format(cmd)) con = _conn(queue) items = [] with con: cur = con.cursor() result = cur.execute(cmd).fetchall() if (len(result) > 0): items = [item[0] for item in result] itemlist = '","'.join(items) _quote_escape(itemlist) del_cmd = 'DELETE FROM {0} WHERE name IN ("{1}")'.format(queue, itemlist) log.debug('SQL Query: {0}'.format(del_cmd)) cur.execute(del_cmd) con.commit() log.info(items) return items
[ "def", "pop", "(", "queue", ",", "quantity", "=", "1", ")", ":", "cmd", "=", "'SELECT name FROM {0}'", ".", "format", "(", "queue", ")", "if", "(", "quantity", "!=", "'all'", ")", ":", "try", ":", "quantity", "=", "int", "(", "quantity", ")", "except", "ValueError", "as", "exc", ":", "error_txt", "=", "'Quantity must be an integer or \"all\".\\nError: \"{0}\".'", ".", "format", "(", "exc", ")", "raise", "SaltInvocationError", "(", "error_txt", ")", "cmd", "=", "''", ".", "join", "(", "[", "cmd", ",", "' LIMIT {0}'", ".", "format", "(", "quantity", ")", "]", ")", "log", ".", "debug", "(", "'SQL Query: {0}'", ".", "format", "(", "cmd", ")", ")", "con", "=", "_conn", "(", "queue", ")", "items", "=", "[", "]", "with", "con", ":", "cur", "=", "con", ".", "cursor", "(", ")", "result", "=", "cur", ".", "execute", "(", "cmd", ")", ".", "fetchall", "(", ")", "if", "(", "len", "(", "result", ")", ">", "0", ")", ":", "items", "=", "[", "item", "[", "0", "]", "for", "item", "in", "result", "]", "itemlist", "=", "'\",\"'", ".", "join", "(", "items", ")", "_quote_escape", "(", "itemlist", ")", "del_cmd", "=", "'DELETE FROM {0} WHERE name IN (\"{1}\")'", ".", "format", "(", "queue", ",", "itemlist", ")", "log", ".", "debug", "(", "'SQL Query: {0}'", ".", "format", "(", "del_cmd", ")", ")", "cur", ".", "execute", "(", "del_cmd", ")", "con", ".", "commit", "(", ")", "log", ".", "info", "(", "items", ")", "return", "items" ]
pop one or more or all items from the queue return them .
train
false
53,666
def test_barn_prefixes(): assert (u.fbarn is u.femtobarn) assert (u.pbarn is u.picobarn)
[ "def", "test_barn_prefixes", "(", ")", ":", "assert", "(", "u", ".", "fbarn", "is", "u", ".", "femtobarn", ")", "assert", "(", "u", ".", "pbarn", "is", "u", ".", "picobarn", ")" ]
regression test for URL .
train
false
53,668
def hex_digest(x): assert isinstance(x, np.ndarray) rval = hashlib.md5(x.tostring()).hexdigest() rval = (((rval + '|strides=[') + ','.join((str(stride) for stride in x.strides))) + ']') rval = (((rval + '|shape=[') + ','.join((str(s) for s in x.shape))) + ']') return rval
[ "def", "hex_digest", "(", "x", ")", ":", "assert", "isinstance", "(", "x", ",", "np", ".", "ndarray", ")", "rval", "=", "hashlib", ".", "md5", "(", "x", ".", "tostring", "(", ")", ")", ".", "hexdigest", "(", ")", "rval", "=", "(", "(", "(", "rval", "+", "'|strides=['", ")", "+", "','", ".", "join", "(", "(", "str", "(", "stride", ")", "for", "stride", "in", "x", ".", "strides", ")", ")", ")", "+", "']'", ")", "rval", "=", "(", "(", "(", "rval", "+", "'|shape=['", ")", "+", "','", ".", "join", "(", "(", "str", "(", "s", ")", "for", "s", "in", "x", ".", "shape", ")", ")", ")", "+", "']'", ")", "return", "rval" ]
returns a short .
train
false
53,669
def starts_with(text, substring): assert text.startswith(substring), ("%r doesn't start with %r" % (text, substring))
[ "def", "starts_with", "(", "text", ",", "substring", ")", ":", "assert", "text", ".", "startswith", "(", "substring", ")", ",", "(", "\"%r doesn't start with %r\"", "%", "(", "text", ",", "substring", ")", ")" ]
assert text starts with substring .
train
false
53,671
def entropy_of_byte(packets, position): counter = [0 for _ in range(256)] for pkt in packets: if ((- position) <= len(pkt)): counter[ord(pkt[position])] += 1 entropy = 0 length = len(packets) for count in counter: if (count > 0): ratio = (float(count) / length) entropy -= (ratio * math.log(ratio, 2)) return entropy
[ "def", "entropy_of_byte", "(", "packets", ",", "position", ")", ":", "counter", "=", "[", "0", "for", "_", "in", "range", "(", "256", ")", "]", "for", "pkt", "in", "packets", ":", "if", "(", "(", "-", "position", ")", "<=", "len", "(", "pkt", ")", ")", ":", "counter", "[", "ord", "(", "pkt", "[", "position", "]", ")", "]", "+=", "1", "entropy", "=", "0", "length", "=", "len", "(", "packets", ")", "for", "count", "in", "counter", ":", "if", "(", "count", ">", "0", ")", ":", "ratio", "=", "(", "float", "(", "count", ")", "/", "length", ")", "entropy", "-=", "(", "ratio", "*", "math", ".", "log", "(", "ratio", ",", "2", ")", ")", "return", "entropy" ]
compute the entropy of a byte at a given offset .
train
false
53,672
def read_stored_checksum(target, timestamped=True): return read_stored_info(target, field='sha1', timestamped=timestamped)
[ "def", "read_stored_checksum", "(", "target", ",", "timestamped", "=", "True", ")", ":", "return", "read_stored_info", "(", "target", ",", "field", "=", "'sha1'", ",", "timestamped", "=", "timestamped", ")" ]
read the checksum .
train
false
53,673
def _enable_libraries(libraries): library_dirs = [] library_pattern = os.path.join(os.path.dirname(os.path.dirname(google.__file__)), _THIRD_PARTY_LIBRARY_FORMAT_STRING) for library in libraries: library_dir = os.path.abspath((library_pattern % {'name': library.name, 'version': library.version})) library_dirs.append(library_dir) return library_dirs
[ "def", "_enable_libraries", "(", "libraries", ")", ":", "library_dirs", "=", "[", "]", "library_pattern", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "google", ".", "__file__", ")", ")", ",", "_THIRD_PARTY_LIBRARY_FORMAT_STRING", ")", "for", "library", "in", "libraries", ":", "library_dir", "=", "os", ".", "path", ".", "abspath", "(", "(", "library_pattern", "%", "{", "'name'", ":", "library", ".", "name", ",", "'version'", ":", "library", ".", "version", "}", ")", ")", "library_dirs", ".", "append", "(", "library_dir", ")", "return", "library_dirs" ]
add enabled libraries to the path .
train
false
53,674
@util.positional(2) def new_webhook_channel(url, token=None, expiration=None, params=None): expiration_ms = 0 if expiration: delta = (expiration - EPOCH) expiration_ms = ((delta.microseconds / 1000) + ((delta.seconds + ((delta.days * 24) * 3600)) * 1000)) if (expiration_ms < 0): expiration_ms = 0 return Channel('web_hook', str(uuid.uuid4()), token, url, expiration=expiration_ms, params=params)
[ "@", "util", ".", "positional", "(", "2", ")", "def", "new_webhook_channel", "(", "url", ",", "token", "=", "None", ",", "expiration", "=", "None", ",", "params", "=", "None", ")", ":", "expiration_ms", "=", "0", "if", "expiration", ":", "delta", "=", "(", "expiration", "-", "EPOCH", ")", "expiration_ms", "=", "(", "(", "delta", ".", "microseconds", "/", "1000", ")", "+", "(", "(", "delta", ".", "seconds", "+", "(", "(", "delta", ".", "days", "*", "24", ")", "*", "3600", ")", ")", "*", "1000", ")", ")", "if", "(", "expiration_ms", "<", "0", ")", ":", "expiration_ms", "=", "0", "return", "Channel", "(", "'web_hook'", ",", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ",", "token", ",", "url", ",", "expiration", "=", "expiration_ms", ",", "params", "=", "params", ")" ]
create a new webhook channel .
train
false
53,677
def mu_law_encode(audio, quantization_channels): with tf.name_scope('encode'): mu = (quantization_channels - 1) magnitude = (tf.log((1 + (mu * tf.abs(audio)))) / tf.log((1.0 + mu))) signal = (tf.sign(audio) * magnitude) return tf.cast(((((signal + 1) / 2) * mu) + 0.5), tf.int32)
[ "def", "mu_law_encode", "(", "audio", ",", "quantization_channels", ")", ":", "with", "tf", ".", "name_scope", "(", "'encode'", ")", ":", "mu", "=", "(", "quantization_channels", "-", "1", ")", "magnitude", "=", "(", "tf", ".", "log", "(", "(", "1", "+", "(", "mu", "*", "tf", ".", "abs", "(", "audio", ")", ")", ")", ")", "/", "tf", ".", "log", "(", "(", "1.0", "+", "mu", ")", ")", ")", "signal", "=", "(", "tf", ".", "sign", "(", "audio", ")", "*", "magnitude", ")", "return", "tf", ".", "cast", "(", "(", "(", "(", "(", "signal", "+", "1", ")", "/", "2", ")", "*", "mu", ")", "+", "0.5", ")", ",", "tf", ".", "int32", ")" ]
quantizes waveform amplitudes .
train
false
53,678
def location_to_query(location, wildcard=True, tag='i4x'): query = location.to_deprecated_son(prefix='_id.', tag=tag) if wildcard: for (key, value) in query.items(): if ((value is None) and (key != '_id.revision')): del query[key] return query
[ "def", "location_to_query", "(", "location", ",", "wildcard", "=", "True", ",", "tag", "=", "'i4x'", ")", ":", "query", "=", "location", ".", "to_deprecated_son", "(", "prefix", "=", "'_id.'", ",", "tag", "=", "tag", ")", "if", "wildcard", ":", "for", "(", "key", ",", "value", ")", "in", "query", ".", "items", "(", ")", ":", "if", "(", "(", "value", "is", "None", ")", "and", "(", "key", "!=", "'_id.revision'", ")", ")", ":", "del", "query", "[", "key", "]", "return", "query" ]
takes a location and returns a son object that will query for that location by subfields rather than subdoc .
train
false
53,679
def _dict_to_list_ids(objects): list_with_ids = [] for (key, value) in six.iteritems(objects): element = {'id': key} element.update(value) list_with_ids.append(element) return list_with_ids
[ "def", "_dict_to_list_ids", "(", "objects", ")", ":", "list_with_ids", "=", "[", "]", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "objects", ")", ":", "element", "=", "{", "'id'", ":", "key", "}", "element", ".", "update", "(", "value", ")", "list_with_ids", ".", "append", "(", "element", ")", "return", "list_with_ids" ]
convert a dictionary to a list of dictionaries .
train
true
53,680
def profile_get(user, default_hidden=True): user_profiles = [] with salt.utils.fopen('/etc/user_attr', 'r') as user_attr: for profile in user_attr: profile = profile.strip().split(':') if (len(profile) != 5): continue if (profile[0] != user): continue attrs = {} for attr in profile[4].strip().split(';'): (attr_key, attr_val) = attr.strip().split('=') if (attr_key in ['auths', 'profiles', 'roles']): attrs[attr_key] = attr_val.strip().split(',') else: attrs[attr_key] = attr_val if ('profiles' in attrs): user_profiles.extend(attrs['profiles']) if default_hidden: for profile in profile_list(default_only=True): if (profile in user_profiles): user_profiles.remove(profile) return list(set(user_profiles))
[ "def", "profile_get", "(", "user", ",", "default_hidden", "=", "True", ")", ":", "user_profiles", "=", "[", "]", "with", "salt", ".", "utils", ".", "fopen", "(", "'/etc/user_attr'", ",", "'r'", ")", "as", "user_attr", ":", "for", "profile", "in", "user_attr", ":", "profile", "=", "profile", ".", "strip", "(", ")", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "profile", ")", "!=", "5", ")", ":", "continue", "if", "(", "profile", "[", "0", "]", "!=", "user", ")", ":", "continue", "attrs", "=", "{", "}", "for", "attr", "in", "profile", "[", "4", "]", ".", "strip", "(", ")", ".", "split", "(", "';'", ")", ":", "(", "attr_key", ",", "attr_val", ")", "=", "attr", ".", "strip", "(", ")", ".", "split", "(", "'='", ")", "if", "(", "attr_key", "in", "[", "'auths'", ",", "'profiles'", ",", "'roles'", "]", ")", ":", "attrs", "[", "attr_key", "]", "=", "attr_val", ".", "strip", "(", ")", ".", "split", "(", "','", ")", "else", ":", "attrs", "[", "attr_key", "]", "=", "attr_val", "if", "(", "'profiles'", "in", "attrs", ")", ":", "user_profiles", ".", "extend", "(", "attrs", "[", "'profiles'", "]", ")", "if", "default_hidden", ":", "for", "profile", "in", "profile_list", "(", "default_only", "=", "True", ")", ":", "if", "(", "profile", "in", "user_profiles", ")", ":", "user_profiles", ".", "remove", "(", "profile", ")", "return", "list", "(", "set", "(", "user_profiles", ")", ")" ]
list profiles for user user : string username default_hidden : boolean hide default profiles cli example: .
train
true
53,683
@export_as_api def update_universe(id_or_symbols): if isinstance(id_or_symbols, six.string_types): id_or_symbols = [id_or_symbols] elif isinstance(id_or_symbols, Instrument): id_or_symbols = [Instrument.order_book_id] elif isinstance(id_or_symbols, Iterable): id_or_symbols = [(item.order_book_id if isinstance(item, Instrument) else item) for item in id_or_symbols] else: raise RuntimeError(_('unknown type')) executor = get_strategy_executor() executor.current_universe = set(id_or_symbols)
[ "@", "export_as_api", "def", "update_universe", "(", "id_or_symbols", ")", ":", "if", "isinstance", "(", "id_or_symbols", ",", "six", ".", "string_types", ")", ":", "id_or_symbols", "=", "[", "id_or_symbols", "]", "elif", "isinstance", "(", "id_or_symbols", ",", "Instrument", ")", ":", "id_or_symbols", "=", "[", "Instrument", ".", "order_book_id", "]", "elif", "isinstance", "(", "id_or_symbols", ",", "Iterable", ")", ":", "id_or_symbols", "=", "[", "(", "item", ".", "order_book_id", "if", "isinstance", "(", "item", ",", "Instrument", ")", "else", "item", ")", "for", "item", "in", "id_or_symbols", "]", "else", ":", "raise", "RuntimeError", "(", "_", "(", "'unknown type'", ")", ")", "executor", "=", "get_strategy_executor", "(", ")", "executor", ".", "current_universe", "=", "set", "(", "id_or_symbols", ")" ]
this method takes one or a list of id_or_symbol(s) as argument(s) .
train
false
53,684
def beacons(opts, functions, context=None): return LazyLoader(_module_dirs(opts, 'beacons'), opts, tag='beacons', pack={'__context__': context, '__salt__': functions}, virtual_funcs=['__validate__'])
[ "def", "beacons", "(", "opts", ",", "functions", ",", "context", "=", "None", ")", ":", "return", "LazyLoader", "(", "_module_dirs", "(", "opts", ",", "'beacons'", ")", ",", "opts", ",", "tag", "=", "'beacons'", ",", "pack", "=", "{", "'__context__'", ":", "context", ",", "'__salt__'", ":", "functions", "}", ",", "virtual_funcs", "=", "[", "'__validate__'", "]", ")" ]
load the beacon modules .
train
true
53,685
@attr('root') def onlyroot(meth): @wraps(meth) def test_inner_onlyroot(self, *args, **kwds): 'Note that this method needs to start with test_ in order for nose\n to run it!' if ((os.geteuid() == 0) or (os.getuid() == 0)): return meth(self, *args, **kwds) else: raise SkipTest('This test requires root privileges.') test_inner_onlyroot.root = True return test_inner_onlyroot
[ "@", "attr", "(", "'root'", ")", "def", "onlyroot", "(", "meth", ")", ":", "@", "wraps", "(", "meth", ")", "def", "test_inner_onlyroot", "(", "self", ",", "*", "args", ",", "**", "kwds", ")", ":", "if", "(", "(", "os", ".", "geteuid", "(", ")", "==", "0", ")", "or", "(", "os", ".", "getuid", "(", ")", "==", "0", ")", ")", ":", "return", "meth", "(", "self", ",", "*", "args", ",", "**", "kwds", ")", "else", ":", "raise", "SkipTest", "(", "'This test requires root privileges.'", ")", "test_inner_onlyroot", ".", "root", "=", "True", "return", "test_inner_onlyroot" ]
function to decorate tests that should be called as root .
train
false
53,686
def alter_db(name, character_set=None, collate=None, **connection_args): dbc = _connect(**connection_args) if (dbc is None): return [] cur = dbc.cursor() existing = db_get(name, **connection_args) qry = 'ALTER DATABASE {0} CHARACTER SET {1} COLLATE {2};'.format(name.replace('%', '\\%').replace('_', '\\_'), (character_set or existing.get('character_set')), (collate or existing.get('collate'))) args = {} _execute(cur, qry, args)
[ "def", "alter_db", "(", "name", ",", "character_set", "=", "None", ",", "collate", "=", "None", ",", "**", "connection_args", ")", ":", "dbc", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "dbc", "is", "None", ")", ":", "return", "[", "]", "cur", "=", "dbc", ".", "cursor", "(", ")", "existing", "=", "db_get", "(", "name", ",", "**", "connection_args", ")", "qry", "=", "'ALTER DATABASE {0} CHARACTER SET {1} COLLATE {2};'", ".", "format", "(", "name", ".", "replace", "(", "'%'", ",", "'\\\\%'", ")", ".", "replace", "(", "'_'", ",", "'\\\\_'", ")", ",", "(", "character_set", "or", "existing", ".", "get", "(", "'character_set'", ")", ")", ",", "(", "collate", "or", "existing", ".", "get", "(", "'collate'", ")", ")", ")", "args", "=", "{", "}", "_execute", "(", "cur", ",", "qry", ",", "args", ")" ]
modify database using alter database %s character set %s collate %s; query .
train
true
53,687
def server_cert_absent(name, region=None, key=None, keyid=None, profile=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} exists = __salt__['boto_iam.get_server_certificate'](name, region, key, keyid, profile) if (not exists): ret['comment'] = 'Certificate {0} does not exist.'.format(name) return ret if __opts__['test']: ret['comment'] = 'Server certificate {0} is set to be deleted.'.format(name) ret['result'] = None return ret deleted = __salt__['boto_iam.delete_server_cert'](name, region, key, keyid, profile) if (not deleted): ret['result'] = False ret['comment'] = 'Certificate {0} failed to be deleted.'.format(name) return ret ret['comment'] = 'Certificate {0} was deleted.'.format(name) ret['changes'] = deleted return ret
[ "def", "server_cert_absent", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "exists", "=", "__salt__", "[", "'boto_iam.get_server_certificate'", "]", "(", "name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "(", "not", "exists", ")", ":", "ret", "[", "'comment'", "]", "=", "'Certificate {0} does not exist.'", ".", "format", "(", "name", ")", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Server certificate {0} is set to be deleted.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "deleted", "=", "__salt__", "[", "'boto_iam.delete_server_cert'", "]", "(", "name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "(", "not", "deleted", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Certificate {0} failed to be deleted.'", ".", "format", "(", "name", ")", "return", "ret", "ret", "[", "'comment'", "]", "=", "'Certificate {0} was deleted.'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "=", "deleted", "return", "ret" ]
deletes a server certificate .
train
true
53,688
def generate_timestamp(): return unicode_type(int(time.time()))
[ "def", "generate_timestamp", "(", ")", ":", "return", "unicode_type", "(", "int", "(", "time", ".", "time", "(", ")", ")", ")" ]
get seconds since epoch .
train
false
53,689
def _incs_list_to_string(incs): return ' '.join([('-I ' + path) for path in incs])
[ "def", "_incs_list_to_string", "(", "incs", ")", ":", "return", "' '", ".", "join", "(", "[", "(", "'-I '", "+", "path", ")", "for", "path", "in", "incs", "]", ")" ]
convert incs list to string [thirdparty .
train
false
53,690
def _dnsmasq_pid_for(dev): pid_file = _dhcp_file(dev, 'pid') if os.path.exists(pid_file): try: with open(pid_file, 'r') as f: return int(f.read()) except (ValueError, IOError): return None
[ "def", "_dnsmasq_pid_for", "(", "dev", ")", ":", "pid_file", "=", "_dhcp_file", "(", "dev", ",", "'pid'", ")", "if", "os", ".", "path", ".", "exists", "(", "pid_file", ")", ":", "try", ":", "with", "open", "(", "pid_file", ",", "'r'", ")", "as", "f", ":", "return", "int", "(", "f", ".", "read", "(", ")", ")", "except", "(", "ValueError", ",", "IOError", ")", ":", "return", "None" ]
returns the pid for prior dnsmasq instance for a bridge/device .
train
false
53,691
def ipNum(w, x, y, z): return ((((w << 24) | (x << 16)) | (y << 8)) | z)
[ "def", "ipNum", "(", "w", ",", "x", ",", "y", ",", "z", ")", ":", "return", "(", "(", "(", "(", "w", "<<", "24", ")", "|", "(", "x", "<<", "16", ")", ")", "|", "(", "y", "<<", "8", ")", ")", "|", "z", ")" ]
generate unsigned int from components of ip address returns: w << 24 | x << 16 | y << 8 | z .
train
false
53,692
def make_instance(klass, spec, base64encode=False): return klass().loadd(spec, base64encode)
[ "def", "make_instance", "(", "klass", ",", "spec", ",", "base64encode", "=", "False", ")", ":", "return", "klass", "(", ")", ".", "loadd", "(", "spec", ",", "base64encode", ")" ]
constructs a class instance containing the specified information .
train
false
53,693
def moment(a, moment=1, axis=0, nan_policy='propagate'): (a, axis) = _chk_asarray(a, axis) (contains_nan, nan_policy) = _contains_nan(a, nan_policy) if (contains_nan and (nan_policy == 'omit')): a = ma.masked_invalid(a) return mstats_basic.moment(a, moment, axis) if (a.size == 0): if np.isscalar(moment): return np.nan else: return (np.ones(np.asarray(moment).shape, dtype=np.float64) * np.nan) if (not np.isscalar(moment)): mmnt = [_moment(a, i, axis) for i in moment] return np.array(mmnt) else: return _moment(a, moment, axis)
[ "def", "moment", "(", "a", ",", "moment", "=", "1", ",", "axis", "=", "0", ",", "nan_policy", "=", "'propagate'", ")", ":", "(", "a", ",", "axis", ")", "=", "_chk_asarray", "(", "a", ",", "axis", ")", "(", "contains_nan", ",", "nan_policy", ")", "=", "_contains_nan", "(", "a", ",", "nan_policy", ")", "if", "(", "contains_nan", "and", "(", "nan_policy", "==", "'omit'", ")", ")", ":", "a", "=", "ma", ".", "masked_invalid", "(", "a", ")", "return", "mstats_basic", ".", "moment", "(", "a", ",", "moment", ",", "axis", ")", "if", "(", "a", ".", "size", "==", "0", ")", ":", "if", "np", ".", "isscalar", "(", "moment", ")", ":", "return", "np", ".", "nan", "else", ":", "return", "(", "np", ".", "ones", "(", "np", ".", "asarray", "(", "moment", ")", ".", "shape", ",", "dtype", "=", "np", ".", "float64", ")", "*", "np", ".", "nan", ")", "if", "(", "not", "np", ".", "isscalar", "(", "moment", ")", ")", ":", "mmnt", "=", "[", "_moment", "(", "a", ",", "i", ",", "axis", ")", "for", "i", "in", "moment", "]", "return", "np", ".", "array", "(", "mmnt", ")", "else", ":", "return", "_moment", "(", "a", ",", "moment", ",", "axis", ")" ]
calculates the nth moment about the mean for a sample .
train
false
53,694
def get_mem_info(): if (not sys.platform.startswith('linux')): raise RuntimeError('Memory information implemented only for Linux') info = {} with open('/proc/meminfo', 'r') as f: for line in f: p = line.split() info[p[0].strip(':').lower()] = (float(p[1]) * 1000.0) return info
[ "def", "get_mem_info", "(", ")", ":", "if", "(", "not", "sys", ".", "platform", ".", "startswith", "(", "'linux'", ")", ")", ":", "raise", "RuntimeError", "(", "'Memory information implemented only for Linux'", ")", "info", "=", "{", "}", "with", "open", "(", "'/proc/meminfo'", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "p", "=", "line", ".", "split", "(", ")", "info", "[", "p", "[", "0", "]", ".", "strip", "(", "':'", ")", ".", "lower", "(", ")", "]", "=", "(", "float", "(", "p", "[", "1", "]", ")", "*", "1000.0", ")", "return", "info" ]
get information about available memory .
train
false
53,695
def get_resource_ignore_params(params): ignore_params = [] for param in params: result = jmespath.compile(param.target) current = result.parsed while current['children']: current = current['children'][0] if (current['type'] == 'field'): ignore_params.append(current['value']) return ignore_params
[ "def", "get_resource_ignore_params", "(", "params", ")", ":", "ignore_params", "=", "[", "]", "for", "param", "in", "params", ":", "result", "=", "jmespath", ".", "compile", "(", "param", ".", "target", ")", "current", "=", "result", ".", "parsed", "while", "current", "[", "'children'", "]", ":", "current", "=", "current", "[", "'children'", "]", "[", "0", "]", "if", "(", "current", "[", "'type'", "]", "==", "'field'", ")", ":", "ignore_params", ".", "append", "(", "current", "[", "'value'", "]", ")", "return", "ignore_params" ]
helper method to determine which parameters to ignore for actions :returns: a list of the parameter names that does not need to be included in a resources method call for documentation purposes .
train
false
53,696
def get_nominal(attribute): return attribute.split(',')
[ "def", "get_nominal", "(", "attribute", ")", ":", "return", "attribute", ".", "split", "(", "','", ")" ]
if attribute is nominal .
train
false
53,698
def get_issue(issue_number, repo_name=None, profile='github', output='min'): org_name = _get_config_value(profile, 'org_name') if (repo_name is None): repo_name = _get_config_value(profile, 'repo_name') action = '/'.join(['repos', org_name, repo_name]) command = ('issues/' + str(issue_number)) ret = {} issue_data = _query(profile, action=action, command=command) issue_id = issue_data.get('id') if (output == 'full'): ret[issue_id] = issue_data else: ret[issue_id] = _format_issue(issue_data) return ret
[ "def", "get_issue", "(", "issue_number", ",", "repo_name", "=", "None", ",", "profile", "=", "'github'", ",", "output", "=", "'min'", ")", ":", "org_name", "=", "_get_config_value", "(", "profile", ",", "'org_name'", ")", "if", "(", "repo_name", "is", "None", ")", ":", "repo_name", "=", "_get_config_value", "(", "profile", ",", "'repo_name'", ")", "action", "=", "'/'", ".", "join", "(", "[", "'repos'", ",", "org_name", ",", "repo_name", "]", ")", "command", "=", "(", "'issues/'", "+", "str", "(", "issue_number", ")", ")", "ret", "=", "{", "}", "issue_data", "=", "_query", "(", "profile", ",", "action", "=", "action", ",", "command", "=", "command", ")", "issue_id", "=", "issue_data", ".", "get", "(", "'id'", ")", "if", "(", "output", "==", "'full'", ")", ":", "ret", "[", "issue_id", "]", "=", "issue_data", "else", ":", "ret", "[", "issue_id", "]", "=", "_format_issue", "(", "issue_data", ")", "return", "ret" ]
return information about a single issue in a named repository .
train
true
53,700
def get_course_info_section_module(request, user, course, section_key): usage_key = course.id.make_usage_key('course_info', section_key) field_data_cache = FieldDataCache([], course.id, user) return get_module(user, request, usage_key, field_data_cache, log_if_not_found=False, wrap_xmodule_display=False, static_asset_path=course.static_asset_path, course=course)
[ "def", "get_course_info_section_module", "(", "request", ",", "user", ",", "course", ",", "section_key", ")", ":", "usage_key", "=", "course", ".", "id", ".", "make_usage_key", "(", "'course_info'", ",", "section_key", ")", "field_data_cache", "=", "FieldDataCache", "(", "[", "]", ",", "course", ".", "id", ",", "user", ")", "return", "get_module", "(", "user", ",", "request", ",", "usage_key", ",", "field_data_cache", ",", "log_if_not_found", "=", "False", ",", "wrap_xmodule_display", "=", "False", ",", "static_asset_path", "=", "course", ".", "static_asset_path", ",", "course", "=", "course", ")" ]
this returns the course info module for a given section_key .
train
false
53,701
def force_release(hosts_to_release, username=None): hosts = models.Host.smart_get_bulk(hosts_to_release) if (not hosts): raise Exception('At least one host must be specified') user = get_user(username) if (not user.is_superuser()): raise Exception('Must be super user to force release') acls = models.AclGroup.objects.all() for user_acl in acls: user_acl.hosts.remove(*hosts) user_acl.on_host_membership_change()
[ "def", "force_release", "(", "hosts_to_release", ",", "username", "=", "None", ")", ":", "hosts", "=", "models", ".", "Host", ".", "smart_get_bulk", "(", "hosts_to_release", ")", "if", "(", "not", "hosts", ")", ":", "raise", "Exception", "(", "'At least one host must be specified'", ")", "user", "=", "get_user", "(", "username", ")", "if", "(", "not", "user", ".", "is_superuser", "(", ")", ")", ":", "raise", "Exception", "(", "'Must be super user to force release'", ")", "acls", "=", "models", ".", "AclGroup", ".", "objects", ".", "all", "(", ")", "for", "user_acl", "in", "acls", ":", "user_acl", ".", "hosts", ".", "remove", "(", "*", "hosts", ")", "user_acl", ".", "on_host_membership_change", "(", ")" ]
force release a collection of hosts from user this will remove all acls from the hosts .
train
false
53,702
def mkXRDTag(t): return nsTag(XRD_NS_2_0, t)
[ "def", "mkXRDTag", "(", "t", ")", ":", "return", "nsTag", "(", "XRD_NS_2_0", ",", "t", ")" ]
basestring -> basestring create a tag name in the xrd 2 .
train
false
53,704
def collect_error_snapshots(): if frappe.conf.disable_error_snapshot: return try: path = get_error_snapshot_path() if (not os.path.exists(path)): return for fname in os.listdir(path): fullpath = os.path.join(path, fname) try: with open(fullpath, u'rb') as filedata: data = json.load(filedata) except ValueError: os.remove(fullpath) continue for field in [u'locals', u'exception', u'frames']: data[field] = frappe.as_json(data[field]) doc = frappe.new_doc(u'Error Snapshot') doc.update(data) doc.save() frappe.db.commit() os.remove(fullpath) clear_old_snapshots() except Exception as e: make_error_snapshot(e) raise
[ "def", "collect_error_snapshots", "(", ")", ":", "if", "frappe", ".", "conf", ".", "disable_error_snapshot", ":", "return", "try", ":", "path", "=", "get_error_snapshot_path", "(", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "return", "for", "fname", "in", "os", ".", "listdir", "(", "path", ")", ":", "fullpath", "=", "os", ".", "path", ".", "join", "(", "path", ",", "fname", ")", "try", ":", "with", "open", "(", "fullpath", ",", "u'rb'", ")", "as", "filedata", ":", "data", "=", "json", ".", "load", "(", "filedata", ")", "except", "ValueError", ":", "os", ".", "remove", "(", "fullpath", ")", "continue", "for", "field", "in", "[", "u'locals'", ",", "u'exception'", ",", "u'frames'", "]", ":", "data", "[", "field", "]", "=", "frappe", ".", "as_json", "(", "data", "[", "field", "]", ")", "doc", "=", "frappe", ".", "new_doc", "(", "u'Error Snapshot'", ")", "doc", ".", "update", "(", "data", ")", "doc", ".", "save", "(", ")", "frappe", ".", "db", ".", "commit", "(", ")", "os", ".", "remove", "(", "fullpath", ")", "clear_old_snapshots", "(", ")", "except", "Exception", "as", "e", ":", "make_error_snapshot", "(", "e", ")", "raise" ]
scheduled task to collect error snapshots from files and push into error snapshot table .
train
false
53,705
def auto_reconnect_connection(func): @wraps(func) def inner(self, *args, **kwargs): try: return func(self, *args, **kwargs) except Exception as e: if (not can_reconnect(e)): raise self.close(reconnect=True) return func(self, *args, **kwargs) return inner
[ "def", "auto_reconnect_connection", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "inner", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "(", "not", "can_reconnect", "(", "e", ")", ")", ":", "raise", "self", ".", "close", "(", "reconnect", "=", "True", ")", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "inner" ]
attempt to safely reconnect when an error is hit that resembles the bouncer disconnecting the client due to a timeout/etc .
train
false
53,706
def _getExcelCellName(col, row): return ('%s%i' % (get_column_letter((col + 1)), (row + 1)))
[ "def", "_getExcelCellName", "(", "col", ",", "row", ")", ":", "return", "(", "'%s%i'", "%", "(", "get_column_letter", "(", "(", "col", "+", "1", ")", ")", ",", "(", "row", "+", "1", ")", ")", ")" ]
returns the excel cell name for a row and column .
train
false
53,707
def client_generator(port=5557, host='localhost', hwm=20): context = zmq.Context() socket = context.socket(zmq.PULL) socket.set_hwm(hwm) socket.connect('tcp://{}:{}'.format(host, port)) logger.info('client started') while True: data = recv_arrays(socket) (yield tuple(data))
[ "def", "client_generator", "(", "port", "=", "5557", ",", "host", "=", "'localhost'", ",", "hwm", "=", "20", ")", ":", "context", "=", "zmq", ".", "Context", "(", ")", "socket", "=", "context", ".", "socket", "(", "zmq", ".", "PULL", ")", "socket", ".", "set_hwm", "(", "hwm", ")", "socket", ".", "connect", "(", "'tcp://{}:{}'", ".", "format", "(", "host", ",", "port", ")", ")", "logger", ".", "info", "(", "'client started'", ")", "while", "True", ":", "data", "=", "recv_arrays", "(", "socket", ")", "(", "yield", "tuple", "(", "data", ")", ")" ]
generator in client side should extend this generator parameters port : int hwm : int .
train
false
53,708
def dmp_mul_ground(f, c, u, K): if (not u): return dup_mul_ground(f, c, K) v = (u - 1) return [dmp_mul_ground(cf, c, v, K) for cf in f]
[ "def", "dmp_mul_ground", "(", "f", ",", "c", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_mul_ground", "(", "f", ",", "c", ",", "K", ")", "v", "=", "(", "u", "-", "1", ")", "return", "[", "dmp_mul_ground", "(", "cf", ",", "c", ",", "v", ",", "K", ")", "for", "cf", "in", "f", "]" ]
multiply f by a constant value in k[x] .
train
false
53,709
def substitute_bindings(fstruct, bindings, fs_class=u'default'): if (fs_class == u'default'): fs_class = _default_fs_class(fstruct) fstruct = copy.deepcopy(fstruct) _substitute_bindings(fstruct, bindings, fs_class, set()) return fstruct
[ "def", "substitute_bindings", "(", "fstruct", ",", "bindings", ",", "fs_class", "=", "u'default'", ")", ":", "if", "(", "fs_class", "==", "u'default'", ")", ":", "fs_class", "=", "_default_fs_class", "(", "fstruct", ")", "fstruct", "=", "copy", ".", "deepcopy", "(", "fstruct", ")", "_substitute_bindings", "(", "fstruct", ",", "bindings", ",", "fs_class", ",", "set", "(", ")", ")", "return", "fstruct" ]
return the feature structure that is obtained by replacing each variable bound by bindings with its binding .
train
false
53,710
def _make_req(node, part, method, path, _headers, stype, conn_timeout=5, response_timeout=15): with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, method, path, headers=_headers) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if (not is_success(resp.status)): raise DirectClientException(stype, method, node, part, path, resp) return resp
[ "def", "_make_req", "(", "node", ",", "part", ",", "method", ",", "path", ",", "_headers", ",", "stype", ",", "conn_timeout", "=", "5", ",", "response_timeout", "=", "15", ")", ":", "with", "Timeout", "(", "conn_timeout", ")", ":", "conn", "=", "http_connect", "(", "node", "[", "'ip'", "]", ",", "node", "[", "'port'", "]", ",", "node", "[", "'device'", "]", ",", "part", ",", "method", ",", "path", ",", "headers", "=", "_headers", ")", "with", "Timeout", "(", "response_timeout", ")", ":", "resp", "=", "conn", ".", "getresponse", "(", ")", "resp", ".", "read", "(", ")", "if", "(", "not", "is_success", "(", "resp", ".", "status", ")", ")", ":", "raise", "DirectClientException", "(", "stype", ",", "method", ",", "node", ",", "part", ",", "path", ",", "resp", ")", "return", "resp" ]
make request to backend storage node .
train
false
53,712
def parse_alpha(args): if ((len(args) == 1) and (args[0].type in (u'NUMBER', u'INTEGER'))): return min(1, max(0, args[0].value))
[ "def", "parse_alpha", "(", "args", ")", ":", "if", "(", "(", "len", "(", "args", ")", "==", "1", ")", "and", "(", "args", "[", "0", "]", ".", "type", "in", "(", "u'NUMBER'", ",", "u'INTEGER'", ")", ")", ")", ":", "return", "min", "(", "1", ",", "max", "(", "0", ",", "args", "[", "0", "]", ".", "value", ")", ")" ]
if args is a list of a single integer or number token .
train
false
53,713
def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): fft = (kernel == 'gau') kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: (grid, y) = (kde.support, kde.cdf) else: (grid, y) = (kde.support, kde.density) return (grid, y)
[ "def", "_statsmodels_univariate_kde", "(", "data", ",", "kernel", ",", "bw", ",", "gridsize", ",", "cut", ",", "clip", ",", "cumulative", "=", "False", ")", ":", "fft", "=", "(", "kernel", "==", "'gau'", ")", "kde", "=", "smnp", ".", "KDEUnivariate", "(", "data", ")", "kde", ".", "fit", "(", "kernel", ",", "bw", ",", "fft", ",", "gridsize", "=", "gridsize", ",", "cut", "=", "cut", ",", "clip", "=", "clip", ")", "if", "cumulative", ":", "(", "grid", ",", "y", ")", "=", "(", "kde", ".", "support", ",", "kde", ".", "cdf", ")", "else", ":", "(", "grid", ",", "y", ")", "=", "(", "kde", ".", "support", ",", "kde", ".", "density", ")", "return", "(", "grid", ",", "y", ")" ]
compute a univariate kernel density estimate using statsmodels .
train
false
53,715
def checkCrash(player, upperPipes, lowerPipes): pi = player['index'] player['w'] = IMAGES['player'][0].get_width() player['h'] = IMAGES['player'][0].get_height() if ((player['y'] + player['h']) >= (BASEY - 1)): return True else: playerRect = pygame.Rect(player['x'], player['y'], player['w'], player['h']) for (uPipe, lPipe) in zip(upperPipes, lowerPipes): uPipeRect = pygame.Rect(uPipe['x'], uPipe['y'], PIPE_WIDTH, PIPE_HEIGHT) lPipeRect = pygame.Rect(lPipe['x'], lPipe['y'], PIPE_WIDTH, PIPE_HEIGHT) pHitMask = HITMASKS['player'][pi] uHitmask = HITMASKS['pipe'][0] lHitmask = HITMASKS['pipe'][1] uCollide = pixelCollision(playerRect, uPipeRect, pHitMask, uHitmask) lCollide = pixelCollision(playerRect, lPipeRect, pHitMask, lHitmask) if (uCollide or lCollide): return True return False
[ "def", "checkCrash", "(", "player", ",", "upperPipes", ",", "lowerPipes", ")", ":", "pi", "=", "player", "[", "'index'", "]", "player", "[", "'w'", "]", "=", "IMAGES", "[", "'player'", "]", "[", "0", "]", ".", "get_width", "(", ")", "player", "[", "'h'", "]", "=", "IMAGES", "[", "'player'", "]", "[", "0", "]", ".", "get_height", "(", ")", "if", "(", "(", "player", "[", "'y'", "]", "+", "player", "[", "'h'", "]", ")", ">=", "(", "BASEY", "-", "1", ")", ")", ":", "return", "True", "else", ":", "playerRect", "=", "pygame", ".", "Rect", "(", "player", "[", "'x'", "]", ",", "player", "[", "'y'", "]", ",", "player", "[", "'w'", "]", ",", "player", "[", "'h'", "]", ")", "for", "(", "uPipe", ",", "lPipe", ")", "in", "zip", "(", "upperPipes", ",", "lowerPipes", ")", ":", "uPipeRect", "=", "pygame", ".", "Rect", "(", "uPipe", "[", "'x'", "]", ",", "uPipe", "[", "'y'", "]", ",", "PIPE_WIDTH", ",", "PIPE_HEIGHT", ")", "lPipeRect", "=", "pygame", ".", "Rect", "(", "lPipe", "[", "'x'", "]", ",", "lPipe", "[", "'y'", "]", ",", "PIPE_WIDTH", ",", "PIPE_HEIGHT", ")", "pHitMask", "=", "HITMASKS", "[", "'player'", "]", "[", "pi", "]", "uHitmask", "=", "HITMASKS", "[", "'pipe'", "]", "[", "0", "]", "lHitmask", "=", "HITMASKS", "[", "'pipe'", "]", "[", "1", "]", "uCollide", "=", "pixelCollision", "(", "playerRect", ",", "uPipeRect", ",", "pHitMask", ",", "uHitmask", ")", "lCollide", "=", "pixelCollision", "(", "playerRect", ",", "lPipeRect", ",", "pHitMask", ",", "lHitmask", ")", "if", "(", "uCollide", "or", "lCollide", ")", ":", "return", "True", "return", "False" ]
returns true if player collders with base or pipes .
train
false
53,716
def unpickle(fname): with smart_open(fname, 'rb') as f: if (sys.version_info > (3, 0)): return _pickle.load(f, encoding='latin1') else: return _pickle.loads(f.read())
[ "def", "unpickle", "(", "fname", ")", ":", "with", "smart_open", "(", "fname", ",", "'rb'", ")", "as", "f", ":", "if", "(", "sys", ".", "version_info", ">", "(", "3", ",", "0", ")", ")", ":", "return", "_pickle", ".", "load", "(", "f", ",", "encoding", "=", "'latin1'", ")", "else", ":", "return", "_pickle", ".", "loads", "(", "f", ".", "read", "(", ")", ")" ]
load pickled object from fname .
train
false
53,718
def _write_with_fallback(s, write, fileobj): if ((IPythonIOStream is not None) and isinstance(fileobj, IPythonIOStream)): write = fileobj.stream.write try: write(s) return write except UnicodeEncodeError: pass enc = locale.getpreferredencoding() try: Writer = codecs.getwriter(enc) except LookupError: Writer = codecs.getwriter(_DEFAULT_ENCODING) f = Writer(fileobj) write = f.write try: write(s) return write except UnicodeEncodeError: Writer = codecs.getwriter(u'latin-1') f = Writer(fileobj) write = f.write write(s) return write
[ "def", "_write_with_fallback", "(", "s", ",", "write", ",", "fileobj", ")", ":", "if", "(", "(", "IPythonIOStream", "is", "not", "None", ")", "and", "isinstance", "(", "fileobj", ",", "IPythonIOStream", ")", ")", ":", "write", "=", "fileobj", ".", "stream", ".", "write", "try", ":", "write", "(", "s", ")", "return", "write", "except", "UnicodeEncodeError", ":", "pass", "enc", "=", "locale", ".", "getpreferredencoding", "(", ")", "try", ":", "Writer", "=", "codecs", ".", "getwriter", "(", "enc", ")", "except", "LookupError", ":", "Writer", "=", "codecs", ".", "getwriter", "(", "_DEFAULT_ENCODING", ")", "f", "=", "Writer", "(", "fileobj", ")", "write", "=", "f", ".", "write", "try", ":", "write", "(", "s", ")", "return", "write", "except", "UnicodeEncodeError", ":", "Writer", "=", "codecs", ".", "getwriter", "(", "u'latin-1'", ")", "f", "=", "Writer", "(", "fileobj", ")", "write", "=", "f", ".", "write", "write", "(", "s", ")", "return", "write" ]
write the supplied string with the given write function like write(s) .
train
true
53,719
def claim_build(registry, xml_parent, data): XML.SubElement(xml_parent, 'hudson.plugins.claim.ClaimPublisher')
[ "def", "claim_build", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.claim.ClaimPublisher'", ")" ]
yaml: claim-build claim build failures requires the jenkins :jenkins-wiki:claim plugin <claim+plugin> .
train
false
53,722
def create_credential_resolver(session): profile_name = (session.get_config_variable('profile') or 'default') credential_file = session.get_config_variable('credentials_file') config_file = session.get_config_variable('config_file') metadata_timeout = session.get_config_variable('metadata_service_timeout') num_attempts = session.get_config_variable('metadata_service_num_attempts') env_provider = EnvProvider() providers = [env_provider, AssumeRoleProvider(load_config=(lambda : session.full_config), client_creator=session.create_client, cache={}, profile_name=profile_name), SharedCredentialProvider(creds_filename=credential_file, profile_name=profile_name), ConfigProvider(config_filename=config_file, profile_name=profile_name), OriginalEC2Provider(), BotoProvider(), ContainerProvider(), InstanceMetadataProvider(iam_role_fetcher=InstanceMetadataFetcher(timeout=metadata_timeout, num_attempts=num_attempts))] explicit_profile = session.get_config_variable('profile', methods=('instance',)) if (explicit_profile is not None): providers.remove(env_provider) logger.debug('Skipping environment variable credential check because profile name was explicitly set.') resolver = CredentialResolver(providers=providers) return resolver
[ "def", "create_credential_resolver", "(", "session", ")", ":", "profile_name", "=", "(", "session", ".", "get_config_variable", "(", "'profile'", ")", "or", "'default'", ")", "credential_file", "=", "session", ".", "get_config_variable", "(", "'credentials_file'", ")", "config_file", "=", "session", ".", "get_config_variable", "(", "'config_file'", ")", "metadata_timeout", "=", "session", ".", "get_config_variable", "(", "'metadata_service_timeout'", ")", "num_attempts", "=", "session", ".", "get_config_variable", "(", "'metadata_service_num_attempts'", ")", "env_provider", "=", "EnvProvider", "(", ")", "providers", "=", "[", "env_provider", ",", "AssumeRoleProvider", "(", "load_config", "=", "(", "lambda", ":", "session", ".", "full_config", ")", ",", "client_creator", "=", "session", ".", "create_client", ",", "cache", "=", "{", "}", ",", "profile_name", "=", "profile_name", ")", ",", "SharedCredentialProvider", "(", "creds_filename", "=", "credential_file", ",", "profile_name", "=", "profile_name", ")", ",", "ConfigProvider", "(", "config_filename", "=", "config_file", ",", "profile_name", "=", "profile_name", ")", ",", "OriginalEC2Provider", "(", ")", ",", "BotoProvider", "(", ")", ",", "ContainerProvider", "(", ")", ",", "InstanceMetadataProvider", "(", "iam_role_fetcher", "=", "InstanceMetadataFetcher", "(", "timeout", "=", "metadata_timeout", ",", "num_attempts", "=", "num_attempts", ")", ")", "]", "explicit_profile", "=", "session", ".", "get_config_variable", "(", "'profile'", ",", "methods", "=", "(", "'instance'", ",", ")", ")", "if", "(", "explicit_profile", "is", "not", "None", ")", ":", "providers", ".", "remove", "(", "env_provider", ")", "logger", ".", "debug", "(", "'Skipping environment variable credential check because profile name was explicitly set.'", ")", "resolver", "=", "CredentialResolver", "(", "providers", "=", "providers", ")", "return", "resolver" ]
create a default credential resolver .
train
false
53,723
def get_func_full_args(func): sig = inspect.signature(func) args = [] for (arg_name, param) in sig.parameters.items(): name = arg_name if (name == 'self'): continue if (param.kind == inspect.Parameter.VAR_POSITIONAL): name = ('*' + name) elif (param.kind == inspect.Parameter.VAR_KEYWORD): name = ('**' + name) if (param.default != inspect.Parameter.empty): args.append((name, param.default)) else: args.append((name,)) return args
[ "def", "get_func_full_args", "(", "func", ")", ":", "sig", "=", "inspect", ".", "signature", "(", "func", ")", "args", "=", "[", "]", "for", "(", "arg_name", ",", "param", ")", "in", "sig", ".", "parameters", ".", "items", "(", ")", ":", "name", "=", "arg_name", "if", "(", "name", "==", "'self'", ")", ":", "continue", "if", "(", "param", ".", "kind", "==", "inspect", ".", "Parameter", ".", "VAR_POSITIONAL", ")", ":", "name", "=", "(", "'*'", "+", "name", ")", "elif", "(", "param", ".", "kind", "==", "inspect", ".", "Parameter", ".", "VAR_KEYWORD", ")", ":", "name", "=", "(", "'**'", "+", "name", ")", "if", "(", "param", ".", "default", "!=", "inspect", ".", "Parameter", ".", "empty", ")", ":", "args", ".", "append", "(", "(", "name", ",", "param", ".", "default", ")", ")", "else", ":", "args", ".", "append", "(", "(", "name", ",", ")", ")", "return", "args" ]
return a list of tuples .
train
false
53,724
def build_flow_dict(G, R): flow_dict = {} for u in G: flow_dict[u] = dict(((v, 0) for v in G[u])) flow_dict[u].update(((v, attr['flow']) for (v, attr) in R[u].items() if (attr['flow'] > 0))) return flow_dict
[ "def", "build_flow_dict", "(", "G", ",", "R", ")", ":", "flow_dict", "=", "{", "}", "for", "u", "in", "G", ":", "flow_dict", "[", "u", "]", "=", "dict", "(", "(", "(", "v", ",", "0", ")", "for", "v", "in", "G", "[", "u", "]", ")", ")", "flow_dict", "[", "u", "]", ".", "update", "(", "(", "(", "v", ",", "attr", "[", "'flow'", "]", ")", "for", "(", "v", ",", "attr", ")", "in", "R", "[", "u", "]", ".", "items", "(", ")", "if", "(", "attr", "[", "'flow'", "]", ">", "0", ")", ")", ")", "return", "flow_dict" ]
build a flow dictionary from a residual network .
train
false
53,725
def getfullargspec(func): try: return inspect.getfullargspec(func) except AttributeError: arg_spec = inspect.getargspec(func) import collections tuple_fields = 'args varargs varkw defaults kwonlyargs kwonlydefaults annotations' tuple_type = collections.namedtuple('FullArgSpec', tuple_fields) return tuple_type(args=arg_spec.args, varargs=arg_spec.varargs, varkw=arg_spec.keywords, defaults=arg_spec.defaults, kwonlyargs=[], kwonlydefaults=None, annotations={})
[ "def", "getfullargspec", "(", "func", ")", ":", "try", ":", "return", "inspect", ".", "getfullargspec", "(", "func", ")", "except", "AttributeError", ":", "arg_spec", "=", "inspect", ".", "getargspec", "(", "func", ")", "import", "collections", "tuple_fields", "=", "'args varargs varkw defaults kwonlyargs kwonlydefaults annotations'", "tuple_type", "=", "collections", ".", "namedtuple", "(", "'FullArgSpec'", ",", "tuple_fields", ")", "return", "tuple_type", "(", "args", "=", "arg_spec", ".", "args", ",", "varargs", "=", "arg_spec", ".", "varargs", ",", "varkw", "=", "arg_spec", ".", "keywords", ",", "defaults", "=", "arg_spec", ".", "defaults", ",", "kwonlyargs", "=", "[", "]", ",", "kwonlydefaults", "=", "None", ",", "annotations", "=", "{", "}", ")" ]
compatibility function to provide inspect .
train
false
53,727
def find_and_create_file_from_metadata(children, source, destination, destination_node, obj): for item in children: if (not item['materialized'].startswith('/')): item['materialized'] = ('/' + item['materialized']) if (item['kind'] == 'folder'): return find_and_create_file_from_metadata(item.get('children', []), source, destination, destination_node, obj) elif ((item['kind'] == 'file') and (item['materialized'].replace(destination['materialized'], source['materialized']) == obj.referent.materialized_path)): data = dict(item) new_file = FileNode.resolve_class(destination['provider'], FileNode.FILE).get_or_create(destination_node, item['path']) if (destination['provider'] != 'osfstorage'): new_file.update(revision=None, data=data) return new_file
[ "def", "find_and_create_file_from_metadata", "(", "children", ",", "source", ",", "destination", ",", "destination_node", ",", "obj", ")", ":", "for", "item", "in", "children", ":", "if", "(", "not", "item", "[", "'materialized'", "]", ".", "startswith", "(", "'/'", ")", ")", ":", "item", "[", "'materialized'", "]", "=", "(", "'/'", "+", "item", "[", "'materialized'", "]", ")", "if", "(", "item", "[", "'kind'", "]", "==", "'folder'", ")", ":", "return", "find_and_create_file_from_metadata", "(", "item", ".", "get", "(", "'children'", ",", "[", "]", ")", ",", "source", ",", "destination", ",", "destination_node", ",", "obj", ")", "elif", "(", "(", "item", "[", "'kind'", "]", "==", "'file'", ")", "and", "(", "item", "[", "'materialized'", "]", ".", "replace", "(", "destination", "[", "'materialized'", "]", ",", "source", "[", "'materialized'", "]", ")", "==", "obj", ".", "referent", ".", "materialized_path", ")", ")", ":", "data", "=", "dict", "(", "item", ")", "new_file", "=", "FileNode", ".", "resolve_class", "(", "destination", "[", "'provider'", "]", ",", "FileNode", ".", "FILE", ")", ".", "get_or_create", "(", "destination_node", ",", "item", "[", "'path'", "]", ")", "if", "(", "destination", "[", "'provider'", "]", "!=", "'osfstorage'", ")", ":", "new_file", ".", "update", "(", "revision", "=", "None", ",", "data", "=", "data", ")", "return", "new_file" ]
given a guid obj .
train
false
53,728
def check_page_faults(con, host, port, warning, critical, perf_data): warning = (warning or 10) critical = (critical or 30) data = get_server_status(con) try: page_faults = float(data['extra_info']['page_faults']) except: return exit_with_general_critical('page_faults unsupported on the underlaying system') (err, delta) = maintain_delta([page_faults], host, port, 'page_faults') if (err == 0): page_faults_ps = (delta[1] / delta[0]) message = ('Page faults : %.2f ps' % page_faults_ps) message += performance_data(perf_data, [(('%.2f' % page_faults_ps), 'page_faults_ps', warning, critical)]) return check_levels(page_faults_ps, warning, critical, message) else: return exit_with_general_warning('problem reading data from temp file')
[ "def", "check_page_faults", "(", "con", ",", "host", ",", "port", ",", "warning", ",", "critical", ",", "perf_data", ")", ":", "warning", "=", "(", "warning", "or", "10", ")", "critical", "=", "(", "critical", "or", "30", ")", "data", "=", "get_server_status", "(", "con", ")", "try", ":", "page_faults", "=", "float", "(", "data", "[", "'extra_info'", "]", "[", "'page_faults'", "]", ")", "except", ":", "return", "exit_with_general_critical", "(", "'page_faults unsupported on the underlaying system'", ")", "(", "err", ",", "delta", ")", "=", "maintain_delta", "(", "[", "page_faults", "]", ",", "host", ",", "port", ",", "'page_faults'", ")", "if", "(", "err", "==", "0", ")", ":", "page_faults_ps", "=", "(", "delta", "[", "1", "]", "/", "delta", "[", "0", "]", ")", "message", "=", "(", "'Page faults : %.2f ps'", "%", "page_faults_ps", ")", "message", "+=", "performance_data", "(", "perf_data", ",", "[", "(", "(", "'%.2f'", "%", "page_faults_ps", ")", ",", "'page_faults_ps'", ",", "warning", ",", "critical", ")", "]", ")", "return", "check_levels", "(", "page_faults_ps", ",", "warning", ",", "critical", ",", "message", ")", "else", ":", "return", "exit_with_general_warning", "(", "'problem reading data from temp file'", ")" ]
a function to get page_faults per second from the system .
train
false
53,729
@pytest.fixture(autouse=True) def mock_inline_css(monkeypatch): from olympia.amo import helpers monkeypatch.setattr(helpers, 'is_external', (lambda css: True))
[ "@", "pytest", ".", "fixture", "(", "autouse", "=", "True", ")", "def", "mock_inline_css", "(", "monkeypatch", ")", ":", "from", "olympia", ".", "amo", "import", "helpers", "monkeypatch", ".", "setattr", "(", "helpers", ",", "'is_external'", ",", "(", "lambda", "css", ":", "True", ")", ")" ]
mock jingo_minify .
train
false
53,730
def assert_json_response(response, status_code, body, headers=None, body_cmp=operator.eq): headers = dict((headers or {})) headers['Content-Type'] = 'application/json' def json_cmp(response_body, body): return body_cmp(json.loads(response_body.decode('utf-8')), body) assert_response(response, status_code, body, headers, json_cmp)
[ "def", "assert_json_response", "(", "response", ",", "status_code", ",", "body", ",", "headers", "=", "None", ",", "body_cmp", "=", "operator", ".", "eq", ")", ":", "headers", "=", "dict", "(", "(", "headers", "or", "{", "}", ")", ")", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "def", "json_cmp", "(", "response_body", ",", "body", ")", ":", "return", "body_cmp", "(", "json", ".", "loads", "(", "response_body", ".", "decode", "(", "'utf-8'", ")", ")", ",", "body", ")", "assert_response", "(", "response", ",", "status_code", ",", "body", ",", "headers", ",", "json_cmp", ")" ]
assert json response has the expected status_code .
train
false
53,732
def set_review_unavailable(apps, schema_editor): DocumentSpamAttempt = apps.get_model(u'wiki', u'DocumentSpamAttempt') to_set = DocumentSpamAttempt.objects.filter(data__isnull=True, review=NEEDS_REVIEW) to_set.update(review=REVIEW_UNAVAILABLE)
[ "def", "set_review_unavailable", "(", "apps", ",", "schema_editor", ")", ":", "DocumentSpamAttempt", "=", "apps", ".", "get_model", "(", "u'wiki'", ",", "u'DocumentSpamAttempt'", ")", "to_set", "=", "DocumentSpamAttempt", ".", "objects", ".", "filter", "(", "data__isnull", "=", "True", ",", "review", "=", "NEEDS_REVIEW", ")", "to_set", ".", "update", "(", "review", "=", "REVIEW_UNAVAILABLE", ")" ]
for historic documentspamattempt .
train
false
53,734
@verbose def read_epochs(fname, proj=True, preload=True, verbose=None): return EpochsFIF(fname, proj, False, preload, verbose)
[ "@", "verbose", "def", "read_epochs", "(", "fname", ",", "proj", "=", "True", ",", "preload", "=", "True", ",", "verbose", "=", "None", ")", ":", "return", "EpochsFIF", "(", "fname", ",", "proj", ",", "False", ",", "preload", ",", "verbose", ")" ]
read epochs from a fif file .
train
false
53,735
@not_implemented_for('directed') @not_implemented_for('multigraph') def cycle_basis(G, root=None): gnodes = set(G.nodes()) cycles = [] while gnodes: if (root is None): root = gnodes.pop() stack = [root] pred = {root: root} used = {root: set()} while stack: z = stack.pop() zused = used[z] for nbr in G[z]: if (nbr not in used): pred[nbr] = z stack.append(nbr) used[nbr] = set([z]) elif (nbr == z): cycles.append([z]) elif (nbr not in zused): pn = used[nbr] cycle = [nbr, z] p = pred[z] while (p not in pn): cycle.append(p) p = pred[p] cycle.append(p) cycles.append(cycle) used[nbr].add(z) gnodes -= set(pred) root = None return cycles
[ "@", "not_implemented_for", "(", "'directed'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "cycle_basis", "(", "G", ",", "root", "=", "None", ")", ":", "gnodes", "=", "set", "(", "G", ".", "nodes", "(", ")", ")", "cycles", "=", "[", "]", "while", "gnodes", ":", "if", "(", "root", "is", "None", ")", ":", "root", "=", "gnodes", ".", "pop", "(", ")", "stack", "=", "[", "root", "]", "pred", "=", "{", "root", ":", "root", "}", "used", "=", "{", "root", ":", "set", "(", ")", "}", "while", "stack", ":", "z", "=", "stack", ".", "pop", "(", ")", "zused", "=", "used", "[", "z", "]", "for", "nbr", "in", "G", "[", "z", "]", ":", "if", "(", "nbr", "not", "in", "used", ")", ":", "pred", "[", "nbr", "]", "=", "z", "stack", ".", "append", "(", "nbr", ")", "used", "[", "nbr", "]", "=", "set", "(", "[", "z", "]", ")", "elif", "(", "nbr", "==", "z", ")", ":", "cycles", ".", "append", "(", "[", "z", "]", ")", "elif", "(", "nbr", "not", "in", "zused", ")", ":", "pn", "=", "used", "[", "nbr", "]", "cycle", "=", "[", "nbr", ",", "z", "]", "p", "=", "pred", "[", "z", "]", "while", "(", "p", "not", "in", "pn", ")", ":", "cycle", ".", "append", "(", "p", ")", "p", "=", "pred", "[", "p", "]", "cycle", ".", "append", "(", "p", ")", "cycles", ".", "append", "(", "cycle", ")", "used", "[", "nbr", "]", ".", "add", "(", "z", ")", "gnodes", "-=", "set", "(", "pred", ")", "root", "=", "None", "return", "cycles" ]
returns a list of cycles which form a basis for cycles of g .
train
false
53,736
def get_create_test_view_sql(): dir_path = os.path.dirname(os.path.abspath(__file__)) sql_path = os.path.join(dir_path, 'sql', 'tko-test-view-2.sql') return open(sql_path).read()
[ "def", "get_create_test_view_sql", "(", ")", ":", "dir_path", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "sql_path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "'sql'", ",", "'tko-test-view-2.sql'", ")", "return", "open", "(", "sql_path", ")", ".", "read", "(", ")" ]
returns the sql code that creates the test view .
train
false
53,737
def _parse_relators(rels): return rels
[ "def", "_parse_relators", "(", "rels", ")", ":", "return", "rels" ]
parse the passed relators .
train
false
53,738
def shebang_matches(text, regex): index = text.find('\n') if (index >= 0): first_line = text[:index].lower() else: first_line = text.lower() if first_line.startswith('#!'): try: found = [x for x in split_path_re.split(first_line[2:].strip()) if (x and (not x.startswith('-')))][(-1)] except IndexError: return False regex = re.compile(('^%s(\\.(exe|cmd|bat|bin))?$' % regex), re.IGNORECASE) if (regex.search(found) is not None): return True return False
[ "def", "shebang_matches", "(", "text", ",", "regex", ")", ":", "index", "=", "text", ".", "find", "(", "'\\n'", ")", "if", "(", "index", ">=", "0", ")", ":", "first_line", "=", "text", "[", ":", "index", "]", ".", "lower", "(", ")", "else", ":", "first_line", "=", "text", ".", "lower", "(", ")", "if", "first_line", ".", "startswith", "(", "'#!'", ")", ":", "try", ":", "found", "=", "[", "x", "for", "x", "in", "split_path_re", ".", "split", "(", "first_line", "[", "2", ":", "]", ".", "strip", "(", ")", ")", "if", "(", "x", "and", "(", "not", "x", ".", "startswith", "(", "'-'", ")", ")", ")", "]", "[", "(", "-", "1", ")", "]", "except", "IndexError", ":", "return", "False", "regex", "=", "re", ".", "compile", "(", "(", "'^%s(\\\\.(exe|cmd|bat|bin))?$'", "%", "regex", ")", ",", "re", ".", "IGNORECASE", ")", "if", "(", "regex", ".", "search", "(", "found", ")", "is", "not", "None", ")", ":", "return", "True", "return", "False" ]
check if the given regular expression matches the last part of the shebang if one exists .
train
true
53,739
def theq(a, b): astr = theano.printing.debugprint(a, file='str') bstr = theano.printing.debugprint(b, file='str') if (not (astr == bstr)): print () print astr print bstr return (astr == bstr)
[ "def", "theq", "(", "a", ",", "b", ")", ":", "astr", "=", "theano", ".", "printing", ".", "debugprint", "(", "a", ",", "file", "=", "'str'", ")", "bstr", "=", "theano", ".", "printing", ".", "debugprint", "(", "b", ",", "file", "=", "'str'", ")", "if", "(", "not", "(", "astr", "==", "bstr", ")", ")", ":", "print", "(", ")", "print", "astr", "print", "bstr", "return", "(", "astr", "==", "bstr", ")" ]
theano equality .
train
false
53,740
def PrintUsageExit(code): print (sys.modules['__main__'].__doc__ % sys.argv[0]) sys.stdout.flush() sys.stderr.flush() sys.exit(code)
[ "def", "PrintUsageExit", "(", "code", ")", ":", "print", "(", "sys", ".", "modules", "[", "'__main__'", "]", ".", "__doc__", "%", "sys", ".", "argv", "[", "0", "]", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "sys", ".", "exit", "(", "code", ")" ]
prints usage information and exits with a status code .
train
false
53,741
def process_mistral_config(config_path): assert config_path.startswith('/tmp') if (not os.path.isfile(config_path)): return config = ConfigParser() config.read(config_path) for (section, options) in MISTRAL_CONF_OPTIONS_TO_REMOVE.items(): for option in options: if config.has_option(section, option): config.set(section, option, REMOVED_VALUE_NAME) with open(config_path, 'w') as fp: config.write(fp)
[ "def", "process_mistral_config", "(", "config_path", ")", ":", "assert", "config_path", ".", "startswith", "(", "'/tmp'", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "config_path", ")", ")", ":", "return", "config", "=", "ConfigParser", "(", ")", "config", ".", "read", "(", "config_path", ")", "for", "(", "section", ",", "options", ")", "in", "MISTRAL_CONF_OPTIONS_TO_REMOVE", ".", "items", "(", ")", ":", "for", "option", "in", "options", ":", "if", "config", ".", "has_option", "(", "section", ",", "option", ")", ":", "config", ".", "set", "(", "section", ",", "option", ",", "REMOVED_VALUE_NAME", ")", "with", "open", "(", "config_path", ",", "'w'", ")", "as", "fp", ":", "config", ".", "write", "(", "fp", ")" ]
remove sensitive data from the mistral config .
train
false
53,743
def download_youtube_subs(youtube_id, video_descriptor, settings): i18n = video_descriptor.runtime.service(video_descriptor, 'i18n') _ = i18n.ugettext subs = get_transcripts_from_youtube(youtube_id, settings, i18n) save_subs_to_store(subs, youtube_id, video_descriptor) log.info('Transcripts for youtube_id %s for 1.0 speed are downloaded and saved.', youtube_id)
[ "def", "download_youtube_subs", "(", "youtube_id", ",", "video_descriptor", ",", "settings", ")", ":", "i18n", "=", "video_descriptor", ".", "runtime", ".", "service", "(", "video_descriptor", ",", "'i18n'", ")", "_", "=", "i18n", ".", "ugettext", "subs", "=", "get_transcripts_from_youtube", "(", "youtube_id", ",", "settings", ",", "i18n", ")", "save_subs_to_store", "(", "subs", ",", "youtube_id", ",", "video_descriptor", ")", "log", ".", "info", "(", "'Transcripts for youtube_id %s for 1.0 speed are downloaded and saved.'", ",", "youtube_id", ")" ]
download transcripts from youtube and save them to assets .
train
false
53,745
def _machinectl(cmd, output_loglevel='debug', ignore_retcode=False, use_vt=False): prefix = 'machinectl --no-legend --no-pager' return __salt__['cmd.run_all']('{0} {1}'.format(prefix, cmd), output_loglevel=output_loglevel, ignore_retcode=ignore_retcode, use_vt=use_vt)
[ "def", "_machinectl", "(", "cmd", ",", "output_loglevel", "=", "'debug'", ",", "ignore_retcode", "=", "False", ",", "use_vt", "=", "False", ")", ":", "prefix", "=", "'machinectl --no-legend --no-pager'", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "'{0} {1}'", ".", "format", "(", "prefix", ",", "cmd", ")", ",", "output_loglevel", "=", "output_loglevel", ",", "ignore_retcode", "=", "ignore_retcode", ",", "use_vt", "=", "use_vt", ")" ]
helper function to run machinectl .
train
true
53,746
def scott_bin_width(data, return_bins=False): data = np.asarray(data) if (data.ndim != 1): raise ValueError(u'data should be one-dimensional') n = data.size sigma = np.std(data) dx = ((3.5 * sigma) / (n ** (1 / 3))) if return_bins: Nbins = np.ceil(((data.max() - data.min()) / dx)) Nbins = max(1, Nbins) bins = (data.min() + (dx * np.arange((Nbins + 1)))) return (dx, bins) else: return dx
[ "def", "scott_bin_width", "(", "data", ",", "return_bins", "=", "False", ")", ":", "data", "=", "np", ".", "asarray", "(", "data", ")", "if", "(", "data", ".", "ndim", "!=", "1", ")", ":", "raise", "ValueError", "(", "u'data should be one-dimensional'", ")", "n", "=", "data", ".", "size", "sigma", "=", "np", ".", "std", "(", "data", ")", "dx", "=", "(", "(", "3.5", "*", "sigma", ")", "/", "(", "n", "**", "(", "1", "/", "3", ")", ")", ")", "if", "return_bins", ":", "Nbins", "=", "np", ".", "ceil", "(", "(", "(", "data", ".", "max", "(", ")", "-", "data", ".", "min", "(", ")", ")", "/", "dx", ")", ")", "Nbins", "=", "max", "(", "1", ",", "Nbins", ")", "bins", "=", "(", "data", ".", "min", "(", ")", "+", "(", "dx", "*", "np", ".", "arange", "(", "(", "Nbins", "+", "1", ")", ")", ")", ")", "return", "(", "dx", ",", "bins", ")", "else", ":", "return", "dx" ]
return the optimal histogram bin width using scotts rule scotts rule is a normal reference rule: it minimizes the integrated mean squared error in the bin approximation under the assumption that the data is approximately gaussian .
train
false
53,747
def _quoteattr(data, entities={}): entities['\n'] = '&#10;' entities['\r'] = '&#12;' data = _escape(data, entities) if ('"' in data): if ("'" in data): data = ('"%s"' % data.replace('"', '&quot;')) else: data = ("'%s'" % data) else: data = ('"%s"' % data) return data
[ "def", "_quoteattr", "(", "data", ",", "entities", "=", "{", "}", ")", ":", "entities", "[", "'\\n'", "]", "=", "'&#10;'", "entities", "[", "'\\r'", "]", "=", "'&#12;'", "data", "=", "_escape", "(", "data", ",", "entities", ")", "if", "(", "'\"'", "in", "data", ")", ":", "if", "(", "\"'\"", "in", "data", ")", ":", "data", "=", "(", "'\"%s\"'", "%", "data", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", ")", "else", ":", "data", "=", "(", "\"'%s'\"", "%", "data", ")", "else", ":", "data", "=", "(", "'\"%s\"'", "%", "data", ")", "return", "data" ]
escape and quote an attribute value .
train
false
53,748
def default_key_func(key, key_prefix, version): return ('%s:%s:%s' % (key_prefix, version, key))
[ "def", "default_key_func", "(", "key", ",", "key_prefix", ",", "version", ")", ":", "return", "(", "'%s:%s:%s'", "%", "(", "key_prefix", ",", "version", ",", "key", ")", ")" ]
default function to generate keys .
train
false
53,749
def send_message(to, text, sender=None): if sender: msg = OutboxMessage.objects.create(sender=sender, message=text) msg.to.add(*to) for user in to: im = InboxMessage.objects.create(sender=sender, to=user, message=text) if Setting.get_for_user(user, 'email_private_messages'): email_private_message(inbox_message_id=im.id) message_sent.send(sender=InboxMessage, to=to, text=text, msg_sender=sender)
[ "def", "send_message", "(", "to", ",", "text", ",", "sender", "=", "None", ")", ":", "if", "sender", ":", "msg", "=", "OutboxMessage", ".", "objects", ".", "create", "(", "sender", "=", "sender", ",", "message", "=", "text", ")", "msg", ".", "to", ".", "add", "(", "*", "to", ")", "for", "user", "in", "to", ":", "im", "=", "InboxMessage", ".", "objects", ".", "create", "(", "sender", "=", "sender", ",", "to", "=", "user", ",", "message", "=", "text", ")", "if", "Setting", ".", "get_for_user", "(", "user", ",", "'email_private_messages'", ")", ":", "email_private_message", "(", "inbox_message_id", "=", "im", ".", "id", ")", "message_sent", ".", "send", "(", "sender", "=", "InboxMessage", ",", "to", "=", "to", ",", "text", "=", "text", ",", "msg_sender", "=", "sender", ")" ]
send a private message .
train
false