id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
46,935
def fftn(x, shape=None, axes=None, overwrite_x=False): return _raw_fftn_dispatch(x, shape, axes, overwrite_x, 1)
[ "def", "fftn", "(", "x", ",", "shape", "=", "None", ",", "axes", "=", "None", ",", "overwrite_x", "=", "False", ")", ":", "return", "_raw_fftn_dispatch", "(", "x", ",", "shape", ",", "axes", ",", "overwrite_x", ",", "1", ")" ]
return multidimensional discrete fourier transform .
train
false
46,936
def ustrftime(somedate, fmt='%Y-%m-%d'): if (sys.version_info >= (3, 3)): return somedate.strftime(fmt) else: try: if (sys.version_info < (3, 0)): encoding = (getlocale(LC_TIME)[1] or 'ascii') return unicode(somedate.strftime(str(fmt)), encoding) else: return somedate.strftime(fmt) except ValueError: if (somedate.year >= 1900): raise fields = {'Y': somedate.year, 'm': somedate.month, 'd': somedate.day} if isinstance(somedate, datetime): fields.update({'H': somedate.hour, 'M': somedate.minute, 'S': somedate.second}) fmt = re.sub('%([YmdHMS])', '%(\\1)02d', fmt) return (unicode(fmt) % fields)
[ "def", "ustrftime", "(", "somedate", ",", "fmt", "=", "'%Y-%m-%d'", ")", ":", "if", "(", "sys", ".", "version_info", ">=", "(", "3", ",", "3", ")", ")", ":", "return", "somedate", ".", "strftime", "(", "fmt", ")", "else", ":", "try", ":", "if", "(", "sys", ".", "version_info", "<", "(", "3", ",", "0", ")", ")", ":", "encoding", "=", "(", "getlocale", "(", "LC_TIME", ")", "[", "1", "]", "or", "'ascii'", ")", "return", "unicode", "(", "somedate", ".", "strftime", "(", "str", "(", "fmt", ")", ")", ",", "encoding", ")", "else", ":", "return", "somedate", ".", "strftime", "(", "fmt", ")", "except", "ValueError", ":", "if", "(", "somedate", ".", "year", ">=", "1900", ")", ":", "raise", "fields", "=", "{", "'Y'", ":", "somedate", ".", "year", ",", "'m'", ":", "somedate", ".", "month", ",", "'d'", ":", "somedate", ".", "day", "}", "if", "isinstance", "(", "somedate", ",", "datetime", ")", ":", "fields", ".", "update", "(", "{", "'H'", ":", "somedate", ".", "hour", ",", "'M'", ":", "somedate", ".", "minute", ",", "'S'", ":", "somedate", ".", "second", "}", ")", "fmt", "=", "re", ".", "sub", "(", "'%([YmdHMS])'", ",", "'%(\\\\1)02d'", ",", "fmt", ")", "return", "(", "unicode", "(", "fmt", ")", "%", "fields", ")" ]
like strftime .
train
false
46,937
def getRoundZAxisByPlaneAngle(planeAngle, vector3): return Vector3(((vector3.x * planeAngle.real) - (vector3.y * planeAngle.imag)), ((vector3.x * planeAngle.imag) + (vector3.y * planeAngle.real)), vector3.z)
[ "def", "getRoundZAxisByPlaneAngle", "(", "planeAngle", ",", "vector3", ")", ":", "return", "Vector3", "(", "(", "(", "vector3", ".", "x", "*", "planeAngle", ".", "real", ")", "-", "(", "vector3", ".", "y", "*", "planeAngle", ".", "imag", ")", ")", ",", "(", "(", "vector3", ".", "x", "*", "planeAngle", ".", "imag", ")", "+", "(", "vector3", ".", "y", "*", "planeAngle", ".", "real", ")", ")", ",", "vector3", ".", "z", ")" ]
get vector3 rotated by a plane angle .
train
false
46,938
def ack_type(capabilities): if ('multi_ack_detailed' in capabilities): return MULTI_ACK_DETAILED elif ('multi_ack' in capabilities): return MULTI_ACK return SINGLE_ACK
[ "def", "ack_type", "(", "capabilities", ")", ":", "if", "(", "'multi_ack_detailed'", "in", "capabilities", ")", ":", "return", "MULTI_ACK_DETAILED", "elif", "(", "'multi_ack'", "in", "capabilities", ")", ":", "return", "MULTI_ACK", "return", "SINGLE_ACK" ]
extract the ack type from a capabilities list .
train
false
46,939
def write_index_dict(f, entries): entries_list = [] for name in sorted(entries): entries_list.append(((name,) + tuple(entries[name]))) write_index(f, entries_list)
[ "def", "write_index_dict", "(", "f", ",", "entries", ")", ":", "entries_list", "=", "[", "]", "for", "name", "in", "sorted", "(", "entries", ")", ":", "entries_list", ".", "append", "(", "(", "(", "name", ",", ")", "+", "tuple", "(", "entries", "[", "name", "]", ")", ")", ")", "write_index", "(", "f", ",", "entries_list", ")" ]
write an index file based on the contents of a dictionary .
train
false
46,940
def get_collection_summaries_matching_ids(collection_ids): return [(get_collection_summary_from_model(model) if model else None) for model in collection_models.CollectionSummaryModel.get_multi(collection_ids)]
[ "def", "get_collection_summaries_matching_ids", "(", "collection_ids", ")", ":", "return", "[", "(", "get_collection_summary_from_model", "(", "model", ")", "if", "model", "else", "None", ")", "for", "model", "in", "collection_models", ".", "CollectionSummaryModel", ".", "get_multi", "(", "collection_ids", ")", "]" ]
given a list of collection ids .
train
false
46,941
def _activation_summary(x): tensor_name = re.sub(('%s_[0-9]*/' % TOWER_NAME), '', x.op.name) tf.histogram_summary((tensor_name + '/activations'), x) tf.scalar_summary((tensor_name + '/sparsity'), tf.nn.zero_fraction(x))
[ "def", "_activation_summary", "(", "x", ")", ":", "tensor_name", "=", "re", ".", "sub", "(", "(", "'%s_[0-9]*/'", "%", "TOWER_NAME", ")", ",", "''", ",", "x", ".", "op", ".", "name", ")", "tf", ".", "histogram_summary", "(", "(", "tensor_name", "+", "'/activations'", ")", ",", "x", ")", "tf", ".", "scalar_summary", "(", "(", "tensor_name", "+", "'/sparsity'", ")", ",", "tf", ".", "nn", ".", "zero_fraction", "(", "x", ")", ")" ]
helper to create summaries for activations .
train
false
46,942
def render_config(config, template_renderer, _path=u''): if isinstance(config, (str, Template)): try: return template_renderer(config) except Exception as e: e.config_path = _path raise elif isinstance(config, list): if _path: _path += u'/' return [render_config(v, template_renderer, _path=(_path + str(i))) for (i, v) in enumerate(config)] elif isinstance(config, dict): if _path: _path += u'/' return {k: render_config(v, template_renderer, _path=(_path + k)) for (k, v) in config.items()} else: return config
[ "def", "render_config", "(", "config", ",", "template_renderer", ",", "_path", "=", "u''", ")", ":", "if", "isinstance", "(", "config", ",", "(", "str", ",", "Template", ")", ")", ":", "try", ":", "return", "template_renderer", "(", "config", ")", "except", "Exception", "as", "e", ":", "e", ".", "config_path", "=", "_path", "raise", "elif", "isinstance", "(", "config", ",", "list", ")", ":", "if", "_path", ":", "_path", "+=", "u'/'", "return", "[", "render_config", "(", "v", ",", "template_renderer", ",", "_path", "=", "(", "_path", "+", "str", "(", "i", ")", ")", ")", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "config", ")", "]", "elif", "isinstance", "(", "config", ",", "dict", ")", ":", "if", "_path", ":", "_path", "+=", "u'/'", "return", "{", "k", ":", "render_config", "(", "v", ",", "template_renderer", ",", "_path", "=", "(", "_path", "+", "k", ")", ")", "for", "(", "k", ",", "v", ")", "in", "config", ".", "items", "(", ")", "}", "else", ":", "return", "config" ]
recurse through config data structures attempting to render any string fields against a given context .
train
false
46,943
def sh_legendre(n, monic=False): if (n < 0): raise ValueError('n must be nonnegative.') wfunc = (lambda x: ((0.0 * x) + 1.0)) if (n == 0): return orthopoly1d([], [], 1.0, 1.0, wfunc, (0, 1), monic, (lambda x: eval_sh_legendre(n, x))) (x, w, mu0) = roots_sh_legendre(n, mu=True) hn = (1.0 / ((2 * n) + 1.0)) kn = (_gam(((2 * n) + 1)) / (_gam((n + 1)) ** 2)) p = orthopoly1d(x, w, hn, kn, wfunc, limits=(0, 1), monic=monic, eval_func=(lambda x: eval_sh_legendre(n, x))) return p
[ "def", "sh_legendre", "(", "n", ",", "monic", "=", "False", ")", ":", "if", "(", "n", "<", "0", ")", ":", "raise", "ValueError", "(", "'n must be nonnegative.'", ")", "wfunc", "=", "(", "lambda", "x", ":", "(", "(", "0.0", "*", "x", ")", "+", "1.0", ")", ")", "if", "(", "n", "==", "0", ")", ":", "return", "orthopoly1d", "(", "[", "]", ",", "[", "]", ",", "1.0", ",", "1.0", ",", "wfunc", ",", "(", "0", ",", "1", ")", ",", "monic", ",", "(", "lambda", "x", ":", "eval_sh_legendre", "(", "n", ",", "x", ")", ")", ")", "(", "x", ",", "w", ",", "mu0", ")", "=", "roots_sh_legendre", "(", "n", ",", "mu", "=", "True", ")", "hn", "=", "(", "1.0", "/", "(", "(", "2", "*", "n", ")", "+", "1.0", ")", ")", "kn", "=", "(", "_gam", "(", "(", "(", "2", "*", "n", ")", "+", "1", ")", ")", "/", "(", "_gam", "(", "(", "n", "+", "1", ")", ")", "**", "2", ")", ")", "p", "=", "orthopoly1d", "(", "x", ",", "w", ",", "hn", ",", "kn", ",", "wfunc", ",", "limits", "=", "(", "0", ",", "1", ")", ",", "monic", "=", "monic", ",", "eval_func", "=", "(", "lambda", "x", ":", "eval_sh_legendre", "(", "n", ",", "x", ")", ")", ")", "return", "p" ]
shifted legendre polynomial .
train
false
46,944
def _passed(argspec, positional, keyword): result = {} unpassed = (len(argspec.args) - len(positional)) if (argspec.keywords is not None): kwargs = result[argspec.keywords] = {} if (unpassed < 0): if (argspec.varargs is None): raise TypeError('Too many arguments.') else: result[argspec.varargs] = positional[len(argspec.args):] for (name, value) in zip(argspec.args, positional): result[name] = value for (name, value) in keyword.items(): if (name in argspec.args): if (name in result): raise TypeError('Already passed.') result[name] = value elif (argspec.keywords is not None): kwargs[name] = value else: raise TypeError('no such param') return result
[ "def", "_passed", "(", "argspec", ",", "positional", ",", "keyword", ")", ":", "result", "=", "{", "}", "unpassed", "=", "(", "len", "(", "argspec", ".", "args", ")", "-", "len", "(", "positional", ")", ")", "if", "(", "argspec", ".", "keywords", "is", "not", "None", ")", ":", "kwargs", "=", "result", "[", "argspec", ".", "keywords", "]", "=", "{", "}", "if", "(", "unpassed", "<", "0", ")", ":", "if", "(", "argspec", ".", "varargs", "is", "None", ")", ":", "raise", "TypeError", "(", "'Too many arguments.'", ")", "else", ":", "result", "[", "argspec", ".", "varargs", "]", "=", "positional", "[", "len", "(", "argspec", ".", "args", ")", ":", "]", "for", "(", "name", ",", "value", ")", "in", "zip", "(", "argspec", ".", "args", ",", "positional", ")", ":", "result", "[", "name", "]", "=", "value", "for", "(", "name", ",", "value", ")", "in", "keyword", ".", "items", "(", ")", ":", "if", "(", "name", "in", "argspec", ".", "args", ")", ":", "if", "(", "name", "in", "result", ")", ":", "raise", "TypeError", "(", "'Already passed.'", ")", "result", "[", "name", "]", "=", "value", "elif", "(", "argspec", ".", "keywords", "is", "not", "None", ")", ":", "kwargs", "[", "name", "]", "=", "value", "else", ":", "raise", "TypeError", "(", "'no such param'", ")", "return", "result" ]
take an i{inspect .
train
false
46,945
def construct_doc2author(corpus, author2doc): doc2author = {} for (d, _) in enumerate(corpus): author_ids = [] for (a, a_doc_ids) in author2doc.items(): if (d in a_doc_ids): author_ids.append(a) doc2author[d] = author_ids return doc2author
[ "def", "construct_doc2author", "(", "corpus", ",", "author2doc", ")", ":", "doc2author", "=", "{", "}", "for", "(", "d", ",", "_", ")", "in", "enumerate", "(", "corpus", ")", ":", "author_ids", "=", "[", "]", "for", "(", "a", ",", "a_doc_ids", ")", "in", "author2doc", ".", "items", "(", ")", ":", "if", "(", "d", "in", "a_doc_ids", ")", ":", "author_ids", ".", "append", "(", "a", ")", "doc2author", "[", "d", "]", "=", "author_ids", "return", "doc2author" ]
make a mapping from document ids to author ids .
train
false
46,946
def index_alt(): s3_redirect_default(URL(f='person'))
[ "def", "index_alt", "(", ")", ":", "s3_redirect_default", "(", "URL", "(", "f", "=", "'person'", ")", ")" ]
module homepage for non-admin users when no cms content found .
train
false
46,947
def hub_matrix(G, nodelist=None): M = nx.to_numpy_matrix(G, nodelist=nodelist) return (M * M.T)
[ "def", "hub_matrix", "(", "G", ",", "nodelist", "=", "None", ")", ":", "M", "=", "nx", ".", "to_numpy_matrix", "(", "G", ",", "nodelist", "=", "nodelist", ")", "return", "(", "M", "*", "M", ".", "T", ")" ]
return the hits hub matrix .
train
false
46,949
def getctime(filename): return os.stat(filename).st_ctime
[ "def", "getctime", "(", "filename", ")", ":", "return", "os", ".", "stat", "(", "filename", ")", ".", "st_ctime" ]
return the metadata change time of a file .
train
false
46,950
def EntityGroupKind(key): return key.path().element(0).type()
[ "def", "EntityGroupKind", "(", "key", ")", ":", "return", "key", ".", "path", "(", ")", ".", "element", "(", "0", ")", ".", "type", "(", ")" ]
given entity primary key as reference proto .
train
false
46,952
def ReferenceToKeyValue(key, id_resolver=None): if (datastore_pbs._CLOUD_DATASTORE_ENABLED and isinstance(key, googledatastore.Key)): v1_key = key key = entity_pb.Reference() datastore_pbs.get_entity_converter(id_resolver).v1_to_v3_reference(v1_key, key) elif isinstance(key, entity_v4_pb.Key): v4_key = key key = entity_pb.Reference() datastore_pbs.get_entity_converter().v4_to_v3_reference(v4_key, key) if isinstance(key, entity_pb.Reference): element_list = key.path().element_list() elif isinstance(key, entity_pb.PropertyValue_ReferenceValue): element_list = key.pathelement_list() else: raise datastore_errors.BadArgumentError(('key arg expected to be entity_pb.Reference or googledatastore.Key (%r)' % (key,))) result = [entity_pb.PropertyValue.kReferenceValueGroup, key.app(), key.name_space()] for element in element_list: result.append(element.type()) if element.has_name(): result.append(element.name()) else: result.append(element.id()) return tuple(result)
[ "def", "ReferenceToKeyValue", "(", "key", ",", "id_resolver", "=", "None", ")", ":", "if", "(", "datastore_pbs", ".", "_CLOUD_DATASTORE_ENABLED", "and", "isinstance", "(", "key", ",", "googledatastore", ".", "Key", ")", ")", ":", "v1_key", "=", "key", "key", "=", "entity_pb", ".", "Reference", "(", ")", "datastore_pbs", ".", "get_entity_converter", "(", "id_resolver", ")", ".", "v1_to_v3_reference", "(", "v1_key", ",", "key", ")", "elif", "isinstance", "(", "key", ",", "entity_v4_pb", ".", "Key", ")", ":", "v4_key", "=", "key", "key", "=", "entity_pb", ".", "Reference", "(", ")", "datastore_pbs", ".", "get_entity_converter", "(", ")", ".", "v4_to_v3_reference", "(", "v4_key", ",", "key", ")", "if", "isinstance", "(", "key", ",", "entity_pb", ".", "Reference", ")", ":", "element_list", "=", "key", ".", "path", "(", ")", ".", "element_list", "(", ")", "elif", "isinstance", "(", "key", ",", "entity_pb", ".", "PropertyValue_ReferenceValue", ")", ":", "element_list", "=", "key", ".", "pathelement_list", "(", ")", "else", ":", "raise", "datastore_errors", ".", "BadArgumentError", "(", "(", "'key arg expected to be entity_pb.Reference or googledatastore.Key (%r)'", "%", "(", "key", ",", ")", ")", ")", "result", "=", "[", "entity_pb", ".", "PropertyValue", ".", "kReferenceValueGroup", ",", "key", ".", "app", "(", ")", ",", "key", ".", "name_space", "(", ")", "]", "for", "element", "in", "element_list", ":", "result", ".", "append", "(", "element", ".", "type", "(", ")", ")", "if", "element", ".", "has_name", "(", ")", ":", "result", ".", "append", "(", "element", ".", "name", "(", ")", ")", "else", ":", "result", ".", "append", "(", "element", ".", "id", "(", ")", ")", "return", "tuple", "(", "result", ")" ]
converts a key into a comparable hashable "key" value .
train
false
46,953
def setProfileFromString(options): options = base64.b64decode(options) options = zlib.decompress(options) (profileOpts, alt) = options.split('\x0c', 1) global settingsDictionary for option in profileOpts.split('\x08'): if (len(option) > 0): (key, value) = option.split('=', 1) if (key in settingsDictionary): if settingsDictionary[key].isProfile(): settingsDictionary[key].setValue(value) for option in alt.split('\x08'): if (len(option) > 0): (key, value) = option.split('=', 1) if (key in settingsDictionary): if settingsDictionary[key].isAlteration(): settingsDictionary[key].setValue(value)
[ "def", "setProfileFromString", "(", "options", ")", ":", "options", "=", "base64", ".", "b64decode", "(", "options", ")", "options", "=", "zlib", ".", "decompress", "(", "options", ")", "(", "profileOpts", ",", "alt", ")", "=", "options", ".", "split", "(", "'\\x0c'", ",", "1", ")", "global", "settingsDictionary", "for", "option", "in", "profileOpts", ".", "split", "(", "'\\x08'", ")", ":", "if", "(", "len", "(", "option", ")", ">", "0", ")", ":", "(", "key", ",", "value", ")", "=", "option", ".", "split", "(", "'='", ",", "1", ")", "if", "(", "key", "in", "settingsDictionary", ")", ":", "if", "settingsDictionary", "[", "key", "]", ".", "isProfile", "(", ")", ":", "settingsDictionary", "[", "key", "]", ".", "setValue", "(", "value", ")", "for", "option", "in", "alt", ".", "split", "(", "'\\x08'", ")", ":", "if", "(", "len", "(", "option", ")", ">", "0", ")", ":", "(", "key", ",", "value", ")", "=", "option", ".", "split", "(", "'='", ",", "1", ")", "if", "(", "key", "in", "settingsDictionary", ")", ":", "if", "settingsDictionary", "[", "key", "]", ".", "isAlteration", "(", ")", ":", "settingsDictionary", "[", "key", "]", ".", "setValue", "(", "value", ")" ]
parse an encoded string which has all the profile settings stored inside of it .
train
false
46,954
@app.route('/scans/<int:scan_id>/kb/', methods=['GET']) @requires_auth def list_kb(scan_id): scan_info = get_scan_info_from_id(scan_id) if (scan_info is None): abort(404, 'Scan not found') data = [] for (finding_id, finding) in enumerate(kb.kb.get_all_findings()): if matches_filter(finding, request): data.append(finding_to_json(finding, scan_id, finding_id)) return jsonify({'items': data})
[ "@", "app", ".", "route", "(", "'/scans/<int:scan_id>/kb/'", ",", "methods", "=", "[", "'GET'", "]", ")", "@", "requires_auth", "def", "list_kb", "(", "scan_id", ")", ":", "scan_info", "=", "get_scan_info_from_id", "(", "scan_id", ")", "if", "(", "scan_info", "is", "None", ")", ":", "abort", "(", "404", ",", "'Scan not found'", ")", "data", "=", "[", "]", "for", "(", "finding_id", ",", "finding", ")", "in", "enumerate", "(", "kb", ".", "kb", ".", "get_all_findings", "(", ")", ")", ":", "if", "matches_filter", "(", "finding", ",", "request", ")", ":", "data", ".", "append", "(", "finding_to_json", "(", "finding", ",", "scan_id", ",", "finding_id", ")", ")", "return", "jsonify", "(", "{", "'items'", ":", "data", "}", ")" ]
list vulnerabilities stored in the kb filters: * /scans/0/kb/?name= returns only vulnerabilities which contain the specified string in the vulnerability name .
train
false
46,955
def expand_abbreviations(template, abbreviations): if (template in abbreviations): return abbreviations[template] (prefix, sep, rest) = template.partition(u':') if (prefix in abbreviations): return abbreviations[prefix].format(rest) return template
[ "def", "expand_abbreviations", "(", "template", ",", "abbreviations", ")", ":", "if", "(", "template", "in", "abbreviations", ")", ":", "return", "abbreviations", "[", "template", "]", "(", "prefix", ",", "sep", ",", "rest", ")", "=", "template", ".", "partition", "(", "u':'", ")", "if", "(", "prefix", "in", "abbreviations", ")", ":", "return", "abbreviations", "[", "prefix", "]", ".", "format", "(", "rest", ")", "return", "template" ]
expand abbreviations in a template name .
train
true
46,957
def decode64chops(string): chips = string.split(',') chops = [] for string in chips: chops.append(str642int(string)) return chops
[ "def", "decode64chops", "(", "string", ")", ":", "chips", "=", "string", ".", "split", "(", "','", ")", "chops", "=", "[", "]", "for", "string", "in", "chips", ":", "chops", ".", "append", "(", "str642int", "(", "string", ")", ")", "return", "chops" ]
base64decodes and makes a .
train
false
46,958
def randomize_unitofwork(): from sqlalchemy.orm import unitofwork, session, mapper, dependency from sqlalchemy.util import topological from sqlalchemy.testing.util import RandomSet topological.set = unitofwork.set = session.set = mapper.set = dependency.set = RandomSet
[ "def", "randomize_unitofwork", "(", ")", ":", "from", "sqlalchemy", ".", "orm", "import", "unitofwork", ",", "session", ",", "mapper", ",", "dependency", "from", "sqlalchemy", ".", "util", "import", "topological", "from", "sqlalchemy", ".", "testing", ".", "util", "import", "RandomSet", "topological", ".", "set", "=", "unitofwork", ".", "set", "=", "session", ".", "set", "=", "mapper", ".", "set", "=", "dependency", ".", "set", "=", "RandomSet" ]
use random-ordering sets within the unit of work in order to detect unit of work sorting issues .
train
false
46,959
def get_stream_enc(stream, default=None): if ((not hasattr(stream, 'encoding')) or (not stream.encoding)): return default else: return stream.encoding
[ "def", "get_stream_enc", "(", "stream", ",", "default", "=", "None", ")", ":", "if", "(", "(", "not", "hasattr", "(", "stream", ",", "'encoding'", ")", ")", "or", "(", "not", "stream", ".", "encoding", ")", ")", ":", "return", "default", "else", ":", "return", "stream", ".", "encoding" ]
return the given streams encoding or a default .
train
true
46,961
def combination_matches(combination, match_combinations): for cmatch in match_combinations: for (key, val) in combination.items(): if (cmatch.get(key, val) != val): break else: return True return False
[ "def", "combination_matches", "(", "combination", ",", "match_combinations", ")", ":", "for", "cmatch", "in", "match_combinations", ":", "for", "(", "key", ",", "val", ")", "in", "combination", ".", "items", "(", ")", ":", "if", "(", "cmatch", ".", "get", "(", "key", ",", "val", ")", "!=", "val", ")", ":", "break", "else", ":", "return", "True", "return", "False" ]
checks if the given combination is matches for any of the given combination globs .
train
false
46,965
def set_script_prefix(prefix): if (not prefix.endswith('/')): prefix += '/' _prefixes.value = prefix
[ "def", "set_script_prefix", "(", "prefix", ")", ":", "if", "(", "not", "prefix", ".", "endswith", "(", "'/'", ")", ")", ":", "prefix", "+=", "'/'", "_prefixes", ".", "value", "=", "prefix" ]
set the script prefix for the current thread .
train
false
46,966
def hoffman_singleton_graph(): G = nx.Graph() for i in range(5): for j in range(5): G.add_edge(('pentagon', i, j), ('pentagon', i, ((j - 1) % 5))) G.add_edge(('pentagon', i, j), ('pentagon', i, ((j + 1) % 5))) G.add_edge(('pentagram', i, j), ('pentagram', i, ((j - 2) % 5))) G.add_edge(('pentagram', i, j), ('pentagram', i, ((j + 2) % 5))) for k in range(5): G.add_edge(('pentagon', i, j), ('pentagram', k, (((i * k) + j) % 5))) G = nx.convert_node_labels_to_integers(G) G.name = 'Hoffman-Singleton Graph' return G
[ "def", "hoffman_singleton_graph", "(", ")", ":", "G", "=", "nx", ".", "Graph", "(", ")", "for", "i", "in", "range", "(", "5", ")", ":", "for", "j", "in", "range", "(", "5", ")", ":", "G", ".", "add_edge", "(", "(", "'pentagon'", ",", "i", ",", "j", ")", ",", "(", "'pentagon'", ",", "i", ",", "(", "(", "j", "-", "1", ")", "%", "5", ")", ")", ")", "G", ".", "add_edge", "(", "(", "'pentagon'", ",", "i", ",", "j", ")", ",", "(", "'pentagon'", ",", "i", ",", "(", "(", "j", "+", "1", ")", "%", "5", ")", ")", ")", "G", ".", "add_edge", "(", "(", "'pentagram'", ",", "i", ",", "j", ")", ",", "(", "'pentagram'", ",", "i", ",", "(", "(", "j", "-", "2", ")", "%", "5", ")", ")", ")", "G", ".", "add_edge", "(", "(", "'pentagram'", ",", "i", ",", "j", ")", ",", "(", "'pentagram'", ",", "i", ",", "(", "(", "j", "+", "2", ")", "%", "5", ")", ")", ")", "for", "k", "in", "range", "(", "5", ")", ":", "G", ".", "add_edge", "(", "(", "'pentagon'", ",", "i", ",", "j", ")", ",", "(", "'pentagram'", ",", "k", ",", "(", "(", "(", "i", "*", "k", ")", "+", "j", ")", "%", "5", ")", ")", ")", "G", "=", "nx", ".", "convert_node_labels_to_integers", "(", "G", ")", "G", ".", "name", "=", "'Hoffman-Singleton Graph'", "return", "G" ]
return the hoffman-singleton graph .
train
false
46,967
def lineno(): return inspect.currentframe().f_back.f_lineno
[ "def", "lineno", "(", ")", ":", "return", "inspect", ".", "currentframe", "(", ")", ".", "f_back", ".", "f_lineno" ]
returns the current line number in our program .
train
false
46,968
@api_versions.wraps('2.11') @utils.arg('host', metavar='<hostname>', help=_('Name of host.')) @utils.arg('binary', metavar='<binary>', help=_('Service binary.')) @utils.arg('--unset', dest='force_down', help=_('Unset the force state down of service.'), action='store_false', default=True) def do_service_force_down(cs, args): result = cs.services.force_down(args.host, args.binary, args.force_down) utils.print_list([result], ['Host', 'Binary', 'Forced down'])
[ "@", "api_versions", ".", "wraps", "(", "'2.11'", ")", "@", "utils", ".", "arg", "(", "'host'", ",", "metavar", "=", "'<hostname>'", ",", "help", "=", "_", "(", "'Name of host.'", ")", ")", "@", "utils", ".", "arg", "(", "'binary'", ",", "metavar", "=", "'<binary>'", ",", "help", "=", "_", "(", "'Service binary.'", ")", ")", "@", "utils", ".", "arg", "(", "'--unset'", ",", "dest", "=", "'force_down'", ",", "help", "=", "_", "(", "'Unset the force state down of service.'", ")", ",", "action", "=", "'store_false'", ",", "default", "=", "True", ")", "def", "do_service_force_down", "(", "cs", ",", "args", ")", ":", "result", "=", "cs", ".", "services", ".", "force_down", "(", "args", ".", "host", ",", "args", ".", "binary", ",", "args", ".", "force_down", ")", "utils", ".", "print_list", "(", "[", "result", "]", ",", "[", "'Host'", ",", "'Binary'", ",", "'Forced down'", "]", ")" ]
force service to down .
train
false
46,969
def _import_key_der(key_data, passphrase, params): decodings = (_import_openssl_private, _import_subjectPublicKeyInfo, _import_x509_cert, _import_pkcs8) for decoding in decodings: try: return decoding(key_data, passphrase, params) except ValueError: pass raise ValueError('DSA key format is not supported')
[ "def", "_import_key_der", "(", "key_data", ",", "passphrase", ",", "params", ")", ":", "decodings", "=", "(", "_import_openssl_private", ",", "_import_subjectPublicKeyInfo", ",", "_import_x509_cert", ",", "_import_pkcs8", ")", "for", "decoding", "in", "decodings", ":", "try", ":", "return", "decoding", "(", "key_data", ",", "passphrase", ",", "params", ")", "except", "ValueError", ":", "pass", "raise", "ValueError", "(", "'DSA key format is not supported'", ")" ]
import a dsa key .
train
false
46,970
def filetype_id_to_string(filetype='a'): _validate_filetype(filetype) return _SELINUX_FILETYPES.get(filetype, 'error')
[ "def", "filetype_id_to_string", "(", "filetype", "=", "'a'", ")", ":", "_validate_filetype", "(", "filetype", ")", "return", "_SELINUX_FILETYPES", ".", "get", "(", "filetype", ",", "'error'", ")" ]
translates selinux filetype single-letter representation to a more human-readable version .
train
false
46,971
def register_save_all(id, driver): SAVE_ALL[id.upper()] = driver
[ "def", "register_save_all", "(", "id", ",", "driver", ")", ":", "SAVE_ALL", "[", "id", ".", "upper", "(", ")", "]", "=", "driver" ]
registers an image function to save all the frames of a multiframe format .
train
false
46,972
def make_overload_template(func, overload_func, jit_options): func_name = getattr(func, '__name__', str(func)) name = ('OverloadTemplate_%s' % (func_name,)) base = _OverloadFunctionTemplate dct = dict(key=func, _overload_func=staticmethod(overload_func), _impl_cache={}, _compiled_overloads={}, _jit_options=jit_options) return type(base)(name, (base,), dct)
[ "def", "make_overload_template", "(", "func", ",", "overload_func", ",", "jit_options", ")", ":", "func_name", "=", "getattr", "(", "func", ",", "'__name__'", ",", "str", "(", "func", ")", ")", "name", "=", "(", "'OverloadTemplate_%s'", "%", "(", "func_name", ",", ")", ")", "base", "=", "_OverloadFunctionTemplate", "dct", "=", "dict", "(", "key", "=", "func", ",", "_overload_func", "=", "staticmethod", "(", "overload_func", ")", ",", "_impl_cache", "=", "{", "}", ",", "_compiled_overloads", "=", "{", "}", ",", "_jit_options", "=", "jit_options", ")", "return", "type", "(", "base", ")", "(", "name", ",", "(", "base", ",", ")", ",", "dct", ")" ]
make a template class for function *func* overloaded by *overload_func* .
train
false
46,973
def rectangle(width, height, dtype=np.uint8): return np.ones((width, height), dtype=dtype)
[ "def", "rectangle", "(", "width", ",", "height", ",", "dtype", "=", "np", ".", "uint8", ")", ":", "return", "np", ".", "ones", "(", "(", "width", ",", "height", ")", ",", "dtype", "=", "dtype", ")" ]
draw a rectangle with corners at the provided upper-left and lower-right coordinates .
train
false
46,974
def add_courseware_context(content_list, course, user, id_map=None): if (id_map is None): id_map = get_cached_discussion_id_map(course, [content['commentable_id'] for content in content_list], user) for content in content_list: commentable_id = content['commentable_id'] if (commentable_id in id_map): location = id_map[commentable_id]['location'].to_deprecated_string() title = id_map[commentable_id]['title'] url = reverse('jump_to', kwargs={'course_id': course.id.to_deprecated_string(), 'location': location}) content.update({'courseware_url': url, 'courseware_title': title})
[ "def", "add_courseware_context", "(", "content_list", ",", "course", ",", "user", ",", "id_map", "=", "None", ")", ":", "if", "(", "id_map", "is", "None", ")", ":", "id_map", "=", "get_cached_discussion_id_map", "(", "course", ",", "[", "content", "[", "'commentable_id'", "]", "for", "content", "in", "content_list", "]", ",", "user", ")", "for", "content", "in", "content_list", ":", "commentable_id", "=", "content", "[", "'commentable_id'", "]", "if", "(", "commentable_id", "in", "id_map", ")", ":", "location", "=", "id_map", "[", "commentable_id", "]", "[", "'location'", "]", ".", "to_deprecated_string", "(", ")", "title", "=", "id_map", "[", "commentable_id", "]", "[", "'title'", "]", "url", "=", "reverse", "(", "'jump_to'", ",", "kwargs", "=", "{", "'course_id'", ":", "course", ".", "id", ".", "to_deprecated_string", "(", ")", ",", "'location'", ":", "location", "}", ")", "content", ".", "update", "(", "{", "'courseware_url'", ":", "url", ",", "'courseware_title'", ":", "title", "}", ")" ]
decorates content_list with courseware metadata using the discussion id map cache if available .
train
false
46,975
def tukeylambda_variance(lam): lam = np.asarray(lam) shp = lam.shape lam = np.atleast_1d(lam).astype(np.float64) threshold = 0.075 low_mask = (lam < (-0.5)) neghalf_mask = (lam == (-0.5)) small_mask = (np.abs(lam) < threshold) reg_mask = (~ ((low_mask | neghalf_mask) | small_mask)) small = lam[small_mask] reg = lam[reg_mask] v = np.empty_like(lam) v[low_mask] = np.nan v[neghalf_mask] = np.inf if (small.size > 0): v[small_mask] = (_tukeylambda_var_p(small) / _tukeylambda_var_q(small)) if (reg.size > 0): v[reg_mask] = ((2.0 / (reg ** 2)) * ((1.0 / (1.0 + (2 * reg))) - beta((reg + 1), (reg + 1)))) v.shape = shp return v
[ "def", "tukeylambda_variance", "(", "lam", ")", ":", "lam", "=", "np", ".", "asarray", "(", "lam", ")", "shp", "=", "lam", ".", "shape", "lam", "=", "np", ".", "atleast_1d", "(", "lam", ")", ".", "astype", "(", "np", ".", "float64", ")", "threshold", "=", "0.075", "low_mask", "=", "(", "lam", "<", "(", "-", "0.5", ")", ")", "neghalf_mask", "=", "(", "lam", "==", "(", "-", "0.5", ")", ")", "small_mask", "=", "(", "np", ".", "abs", "(", "lam", ")", "<", "threshold", ")", "reg_mask", "=", "(", "~", "(", "(", "low_mask", "|", "neghalf_mask", ")", "|", "small_mask", ")", ")", "small", "=", "lam", "[", "small_mask", "]", "reg", "=", "lam", "[", "reg_mask", "]", "v", "=", "np", ".", "empty_like", "(", "lam", ")", "v", "[", "low_mask", "]", "=", "np", ".", "nan", "v", "[", "neghalf_mask", "]", "=", "np", ".", "inf", "if", "(", "small", ".", "size", ">", "0", ")", ":", "v", "[", "small_mask", "]", "=", "(", "_tukeylambda_var_p", "(", "small", ")", "/", "_tukeylambda_var_q", "(", "small", ")", ")", "if", "(", "reg", ".", "size", ">", "0", ")", ":", "v", "[", "reg_mask", "]", "=", "(", "(", "2.0", "/", "(", "reg", "**", "2", ")", ")", "*", "(", "(", "1.0", "/", "(", "1.0", "+", "(", "2", "*", "reg", ")", ")", ")", "-", "beta", "(", "(", "reg", "+", "1", ")", ",", "(", "reg", "+", "1", ")", ")", ")", ")", "v", ".", "shape", "=", "shp", "return", "v" ]
variance of the tukey lambda distribution .
train
false
46,976
def humanize_seconds(secs, prefix=''): for (unit, divider, formatter) in TIME_UNITS: if (secs >= divider): w = (secs / divider) punit = (((w > 1) and (unit + 's')) or unit) return ('%s%s %s' % (prefix, formatter(w), punit)) return 'now'
[ "def", "humanize_seconds", "(", "secs", ",", "prefix", "=", "''", ")", ":", "for", "(", "unit", ",", "divider", ",", "formatter", ")", "in", "TIME_UNITS", ":", "if", "(", "secs", ">=", "divider", ")", ":", "w", "=", "(", "secs", "/", "divider", ")", "punit", "=", "(", "(", "(", "w", ">", "1", ")", "and", "(", "unit", "+", "'s'", ")", ")", "or", "unit", ")", "return", "(", "'%s%s %s'", "%", "(", "prefix", ",", "formatter", "(", "w", ")", ",", "punit", ")", ")", "return", "'now'" ]
show seconds in human form .
train
false
46,977
def test_iht_fit_sample(): iht = InstanceHardnessThreshold(ESTIMATOR, random_state=RND_SEED) (X_resampled, y_resampled) = iht.fit_sample(X, Y) X_gt = np.array([[(-0.3879569), 0.6894251], [(-0.09322739), 1.28177189], [(-0.77740357), 0.74097941], [0.91542919, (-0.65453327)], [(-0.43877303), 1.07366684], [(-0.85795321), 0.82980738], [(-0.18430329), 0.52328473], [(-0.65571327), 0.42412021], [(-0.28305528), 0.30284991], [1.06446472, (-1.09279772)], [0.30543283, (-0.02589502)], [(-0.00717161), 0.00318087]]) y_gt = np.array([0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt)
[ "def", "test_iht_fit_sample", "(", ")", ":", "iht", "=", "InstanceHardnessThreshold", "(", "ESTIMATOR", ",", "random_state", "=", "RND_SEED", ")", "(", "X_resampled", ",", "y_resampled", ")", "=", "iht", ".", "fit_sample", "(", "X", ",", "Y", ")", "X_gt", "=", "np", ".", "array", "(", "[", "[", "(", "-", "0.3879569", ")", ",", "0.6894251", "]", ",", "[", "(", "-", "0.09322739", ")", ",", "1.28177189", "]", ",", "[", "(", "-", "0.77740357", ")", ",", "0.74097941", "]", ",", "[", "0.91542919", ",", "(", "-", "0.65453327", ")", "]", ",", "[", "(", "-", "0.43877303", ")", ",", "1.07366684", "]", ",", "[", "(", "-", "0.85795321", ")", ",", "0.82980738", "]", ",", "[", "(", "-", "0.18430329", ")", ",", "0.52328473", "]", ",", "[", "(", "-", "0.65571327", ")", ",", "0.42412021", "]", ",", "[", "(", "-", "0.28305528", ")", ",", "0.30284991", "]", ",", "[", "1.06446472", ",", "(", "-", "1.09279772", ")", "]", ",", "[", "0.30543283", ",", "(", "-", "0.02589502", ")", "]", ",", "[", "(", "-", "0.00717161", ")", ",", "0.00318087", "]", "]", ")", "y_gt", "=", "np", ".", "array", "(", "[", "0", ",", "1", ",", "1", ",", "0", ",", "1", ",", "1", ",", "1", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", "]", ")", "assert_array_equal", "(", "X_resampled", ",", "X_gt", ")", "assert_array_equal", "(", "y_resampled", ",", "y_gt", ")" ]
test the fit sample routine .
train
false
46,979
def simple_message(msg, parent=None, title=None): dialog = gtk.MessageDialog(parent=None, type=gtk.MESSAGE_INFO, buttons=gtk.BUTTONS_OK, message_format=msg) if (parent is not None): dialog.set_transient_for(parent) if (title is not None): dialog.set_title(title) dialog.show() dialog.run() dialog.destroy() return None
[ "def", "simple_message", "(", "msg", ",", "parent", "=", "None", ",", "title", "=", "None", ")", ":", "dialog", "=", "gtk", ".", "MessageDialog", "(", "parent", "=", "None", ",", "type", "=", "gtk", ".", "MESSAGE_INFO", ",", "buttons", "=", "gtk", ".", "BUTTONS_OK", ",", "message_format", "=", "msg", ")", "if", "(", "parent", "is", "not", "None", ")", ":", "dialog", ".", "set_transient_for", "(", "parent", ")", "if", "(", "title", "is", "not", "None", ")", ":", "dialog", ".", "set_title", "(", "title", ")", "dialog", ".", "show", "(", ")", "dialog", ".", "run", "(", ")", "dialog", ".", "destroy", "(", ")", "return", "None" ]
create a simple message dialog with string msg .
train
false
46,980
def _GetTime(time_string): (hourstr, minutestr) = time_string.split(':') return datetime.time(int(hourstr), int(minutestr))
[ "def", "_GetTime", "(", "time_string", ")", ":", "(", "hourstr", ",", "minutestr", ")", "=", "time_string", ".", "split", "(", "':'", ")", "return", "datetime", ".", "time", "(", "int", "(", "hourstr", ")", ",", "int", "(", "minutestr", ")", ")" ]
converts a string to a datetime .
train
false
46,981
def robots(): if os.path.exists(os.path.join(settings.STATIC_FOLDER, 'robots.local.txt')): robots_file = 'robots.local.txt' else: robots_file = 'robots.txt' return send_from_directory(settings.STATIC_FOLDER, robots_file, mimetype='text/plain')
[ "def", "robots", "(", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "settings", ".", "STATIC_FOLDER", ",", "'robots.local.txt'", ")", ")", ":", "robots_file", "=", "'robots.local.txt'", "else", ":", "robots_file", "=", "'robots.txt'", "return", "send_from_directory", "(", "settings", ".", "STATIC_FOLDER", ",", "robots_file", ",", "mimetype", "=", "'text/plain'", ")" ]
serves the robots .
train
false
46,982
def precision_score(y_real, y_pred): (p, _, _) = precision_recall_fscore(y_real, y_pred) return np.average(p)
[ "def", "precision_score", "(", "y_real", ",", "y_pred", ")", ":", "(", "p", ",", "_", ",", "_", ")", "=", "precision_recall_fscore", "(", "y_real", ",", "y_pred", ")", "return", "np", ".", "average", "(", "p", ")" ]
compute the precision the precision is the ratio :math:tp / where tp is the number of true positives and fp the number of false positives .
train
false
46,983
def check_date(option, opt, value): try: return DateTime.strptime(value, '%Y/%m/%d') except DateTime.Error: raise OptionValueError(('expected format of %s is yyyy/mm/dd' % opt))
[ "def", "check_date", "(", "option", ",", "opt", ",", "value", ")", ":", "try", ":", "return", "DateTime", ".", "strptime", "(", "value", ",", "'%Y/%m/%d'", ")", "except", "DateTime", ".", "Error", ":", "raise", "OptionValueError", "(", "(", "'expected format of %s is yyyy/mm/dd'", "%", "opt", ")", ")" ]
check a file value return the filepath .
train
false
46,985
def get_pid(pidfile): pid = None if os.path.exists(pidfile): with open(pidfile, 'r') as f: pid = f.read() return pid
[ "def", "get_pid", "(", "pidfile", ")", ":", "pid", "=", "None", "if", "os", ".", "path", ".", "exists", "(", "pidfile", ")", ":", "with", "open", "(", "pidfile", ",", "'r'", ")", "as", "f", ":", "pid", "=", "f", ".", "read", "(", ")", "return", "pid" ]
get the pid by trying to access an pid file .
train
false
46,986
def console_auth_token_get_valid(context, token_hash, instance_uuid): return IMPL.console_auth_token_get_valid(context, token_hash, instance_uuid)
[ "def", "console_auth_token_get_valid", "(", "context", ",", "token_hash", ",", "instance_uuid", ")", ":", "return", "IMPL", ".", "console_auth_token_get_valid", "(", "context", ",", "token_hash", ",", "instance_uuid", ")" ]
get a valid console authorization by token_hash and instance_uuid .
train
false
46,987
@register.assignment_tag def assignment_one_param(arg): return ('assignment_one_param - Expected result: %s' % arg)
[ "@", "register", ".", "assignment_tag", "def", "assignment_one_param", "(", "arg", ")", ":", "return", "(", "'assignment_one_param - Expected result: %s'", "%", "arg", ")" ]
expected assignment_one_param __doc__ .
train
false
46,988
def ban_list(): ret = _run_varnishadm('ban.list') if ret['retcode']: return False else: return ret['stdout'].split('\n')[1:]
[ "def", "ban_list", "(", ")", ":", "ret", "=", "_run_varnishadm", "(", "'ban.list'", ")", "if", "ret", "[", "'retcode'", "]", ":", "return", "False", "else", ":", "return", "ret", "[", "'stdout'", "]", ".", "split", "(", "'\\n'", ")", "[", "1", ":", "]" ]
list varnish cache current bans cli example: .
train
false
46,989
def update_all_languages(application_path): path = pjoin(application_path, 'languages/') for language in oslistdir(path): if regex_langfile.match(language): findT(application_path, language[:(-3)])
[ "def", "update_all_languages", "(", "application_path", ")", ":", "path", "=", "pjoin", "(", "application_path", ",", "'languages/'", ")", "for", "language", "in", "oslistdir", "(", "path", ")", ":", "if", "regex_langfile", ".", "match", "(", "language", ")", ":", "findT", "(", "application_path", ",", "language", "[", ":", "(", "-", "3", ")", "]", ")" ]
note: must be run by the admin app .
train
false
46,990
def get_nova_objects(): all_classes = base.NovaObjectRegistry.obj_classes() nova_classes = {} for name in all_classes: objclasses = all_classes[name] if (objclasses[0].OBJ_PROJECT_NAMESPACE != base.NovaObject.OBJ_PROJECT_NAMESPACE): continue nova_classes[name] = objclasses return nova_classes
[ "def", "get_nova_objects", "(", ")", ":", "all_classes", "=", "base", ".", "NovaObjectRegistry", ".", "obj_classes", "(", ")", "nova_classes", "=", "{", "}", "for", "name", "in", "all_classes", ":", "objclasses", "=", "all_classes", "[", "name", "]", "if", "(", "objclasses", "[", "0", "]", ".", "OBJ_PROJECT_NAMESPACE", "!=", "base", ".", "NovaObject", ".", "OBJ_PROJECT_NAMESPACE", ")", ":", "continue", "nova_classes", "[", "name", "]", "=", "objclasses", "return", "nova_classes" ]
get nova versioned objects this returns a dict of versioned objects which are in the nova project namespace only .
train
false
46,992
@frappe.whitelist() def get_fee_schedule(program, student_category=None): fs = frappe.get_list(u'Program Fee', fields=[u'academic_term', u'fee_structure', u'due_date', u'amount'], filters={u'parent': program, u'student_category': student_category}, order_by=u'idx') return fs
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_fee_schedule", "(", "program", ",", "student_category", "=", "None", ")", ":", "fs", "=", "frappe", ".", "get_list", "(", "u'Program Fee'", ",", "fields", "=", "[", "u'academic_term'", ",", "u'fee_structure'", ",", "u'due_date'", ",", "u'amount'", "]", ",", "filters", "=", "{", "u'parent'", ":", "program", ",", "u'student_category'", ":", "student_category", "}", ",", "order_by", "=", "u'idx'", ")", "return", "fs" ]
returns fee schedule .
train
false
46,994
def test_command_line_options_override_env_vars(script, virtualenv): script.environ['PIP_INDEX_URL'] = 'https://b.pypi.python.org/simple/' result = script.pip('install', '-vvv', 'INITools', expect_error=True) assert ('Getting page https://b.pypi.python.org/simple/initools' in result.stdout) virtualenv.clear() result = script.pip('install', '-vvv', '--index-url', 'https://download.zope.org/ppix', 'INITools', expect_error=True) assert ('b.pypi.python.org' not in result.stdout) assert ('Getting page https://download.zope.org/ppix' in result.stdout)
[ "def", "test_command_line_options_override_env_vars", "(", "script", ",", "virtualenv", ")", ":", "script", ".", "environ", "[", "'PIP_INDEX_URL'", "]", "=", "'https://b.pypi.python.org/simple/'", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-vvv'", ",", "'INITools'", ",", "expect_error", "=", "True", ")", "assert", "(", "'Getting page https://b.pypi.python.org/simple/initools'", "in", "result", ".", "stdout", ")", "virtualenv", ".", "clear", "(", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-vvv'", ",", "'--index-url'", ",", "'https://download.zope.org/ppix'", ",", "'INITools'", ",", "expect_error", "=", "True", ")", "assert", "(", "'b.pypi.python.org'", "not", "in", "result", ".", "stdout", ")", "assert", "(", "'Getting page https://download.zope.org/ppix'", "in", "result", ".", "stdout", ")" ]
test that command line options override environmental variables .
train
false
46,996
def patternbroadcast(x, broadcastable): rval = Rebroadcast(*[(i, broadcastable[i]) for i in xrange(len(broadcastable))])(x) return theano.tensor.opt.apply_rebroadcast_opt(rval)
[ "def", "patternbroadcast", "(", "x", ",", "broadcastable", ")", ":", "rval", "=", "Rebroadcast", "(", "*", "[", "(", "i", ",", "broadcastable", "[", "i", "]", ")", "for", "i", "in", "xrange", "(", "len", "(", "broadcastable", ")", ")", "]", ")", "(", "x", ")", "return", "theano", ".", "tensor", ".", "opt", ".", "apply_rebroadcast_opt", "(", "rval", ")" ]
make the input adopt a specific broadcasting pattern .
train
false
46,997
def available_community_plugins(): return _availables_plugins(resources.PLUGINS_COMMUNITY)
[ "def", "available_community_plugins", "(", ")", ":", "return", "_availables_plugins", "(", "resources", ".", "PLUGINS_COMMUNITY", ")" ]
returns a dict with community availables plugins in ninja-ide web page .
train
false
46,998
def uninstall_repl_displayhook(): global _IP_REGISTERED global _INSTALL_FIG_OBSERVER if _IP_REGISTERED: from IPython import get_ipython ip = get_ipython() try: ip.events.unregister(u'post_execute', _IP_REGISTERED) except AttributeError: raise NotImplementedError(u'Can not unregister events in IPython < 2.0') _IP_REGISTERED = None if _INSTALL_FIG_OBSERVER: _INSTALL_FIG_OBSERVER = False
[ "def", "uninstall_repl_displayhook", "(", ")", ":", "global", "_IP_REGISTERED", "global", "_INSTALL_FIG_OBSERVER", "if", "_IP_REGISTERED", ":", "from", "IPython", "import", "get_ipython", "ip", "=", "get_ipython", "(", ")", "try", ":", "ip", ".", "events", ".", "unregister", "(", "u'post_execute'", ",", "_IP_REGISTERED", ")", "except", "AttributeError", ":", "raise", "NotImplementedError", "(", "u'Can not unregister events in IPython < 2.0'", ")", "_IP_REGISTERED", "=", "None", "if", "_INSTALL_FIG_OBSERVER", ":", "_INSTALL_FIG_OBSERVER", "=", "False" ]
uninstalls the matplotlib display hook .
train
false
46,999
def get_resource_events(request, after_commit=False): by_resource = request.bound_data.get('resource_events', {}) events = [] for (action, payload, impacted, request) in by_resource.values(): if after_commit: if (action == ACTIONS.READ): event_cls = AfterResourceRead else: event_cls = AfterResourceChanged elif (action == ACTIONS.READ): event_cls = ResourceRead else: event_cls = ResourceChanged event = event_cls(payload, impacted, request) events.append(event) return events
[ "def", "get_resource_events", "(", "request", ",", "after_commit", "=", "False", ")", ":", "by_resource", "=", "request", ".", "bound_data", ".", "get", "(", "'resource_events'", ",", "{", "}", ")", "events", "=", "[", "]", "for", "(", "action", ",", "payload", ",", "impacted", ",", "request", ")", "in", "by_resource", ".", "values", "(", ")", ":", "if", "after_commit", ":", "if", "(", "action", "==", "ACTIONS", ".", "READ", ")", ":", "event_cls", "=", "AfterResourceRead", "else", ":", "event_cls", "=", "AfterResourceChanged", "elif", "(", "action", "==", "ACTIONS", ".", "READ", ")", ":", "event_cls", "=", "ResourceRead", "else", ":", "event_cls", "=", "ResourceChanged", "event", "=", "event_cls", "(", "payload", ",", "impacted", ",", "request", ")", "events", ".", "append", "(", "event", ")", "return", "events" ]
request helper to return the list of events triggered on resources .
train
false
47,000
def _atleast_nd_transform(min_ndim, axes): assert (min_ndim == len(axes)) def transform(context, builder, arr, arrty, retty): for i in range(min_ndim): ndim = (i + 1) if (arrty.ndim < ndim): axis = cgutils.intp_t(axes[i]) newarrty = arrty.copy(ndim=(arrty.ndim + 1)) arr = expand_dims(context, builder, typing.signature(newarrty, arrty), (arr,), axis) arrty = newarrty return arr return transform
[ "def", "_atleast_nd_transform", "(", "min_ndim", ",", "axes", ")", ":", "assert", "(", "min_ndim", "==", "len", "(", "axes", ")", ")", "def", "transform", "(", "context", ",", "builder", ",", "arr", ",", "arrty", ",", "retty", ")", ":", "for", "i", "in", "range", "(", "min_ndim", ")", ":", "ndim", "=", "(", "i", "+", "1", ")", "if", "(", "arrty", ".", "ndim", "<", "ndim", ")", ":", "axis", "=", "cgutils", ".", "intp_t", "(", "axes", "[", "i", "]", ")", "newarrty", "=", "arrty", ".", "copy", "(", "ndim", "=", "(", "arrty", ".", "ndim", "+", "1", ")", ")", "arr", "=", "expand_dims", "(", "context", ",", "builder", ",", "typing", ".", "signature", "(", "newarrty", ",", "arrty", ")", ",", "(", "arr", ",", ")", ",", "axis", ")", "arrty", "=", "newarrty", "return", "arr", "return", "transform" ]
return a callback successively inserting 1-sized dimensions at the following axes .
train
false
47,001
def standardize(layer, offset, scale, shared_axes='auto'): layer = BiasLayer(layer, (- offset), shared_axes) layer.params[layer.b].remove('trainable') layer = ScaleLayer(layer, (floatX(1.0) / scale), shared_axes) layer.params[layer.scales].remove('trainable') return layer
[ "def", "standardize", "(", "layer", ",", "offset", ",", "scale", ",", "shared_axes", "=", "'auto'", ")", ":", "layer", "=", "BiasLayer", "(", "layer", ",", "(", "-", "offset", ")", ",", "shared_axes", ")", "layer", ".", "params", "[", "layer", ".", "b", "]", ".", "remove", "(", "'trainable'", ")", "layer", "=", "ScaleLayer", "(", "layer", ",", "(", "floatX", "(", "1.0", ")", "/", "scale", ")", ",", "shared_axes", ")", "layer", ".", "params", "[", "layer", ".", "scales", "]", ".", "remove", "(", "'trainable'", ")", "return", "layer" ]
convenience function for standardizing inputs by applying a fixed offset and scale .
train
false
47,002
def _format_auth_line(key, enc, comment, options): line = '' if options: line += '{0} '.format(','.join(options)) line += '{0} {1} {2}\n'.format(enc, key, comment) return line
[ "def", "_format_auth_line", "(", "key", ",", "enc", ",", "comment", ",", "options", ")", ":", "line", "=", "''", "if", "options", ":", "line", "+=", "'{0} '", ".", "format", "(", "','", ".", "join", "(", "options", ")", ")", "line", "+=", "'{0} {1} {2}\\n'", ".", "format", "(", "enc", ",", "key", ",", "comment", ")", "return", "line" ]
properly format user input .
train
true
47,003
def GetFlow(state=None): return client.OAuth2WebServerFlow(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, scope='https://www.googleapis.com/auth/sqlservice', user_agent=USER_AGENT, state=state)
[ "def", "GetFlow", "(", "state", "=", "None", ")", ":", "return", "client", ".", "OAuth2WebServerFlow", "(", "client_id", "=", "CLIENT_ID", ",", "client_secret", "=", "CLIENT_SECRET", ",", "scope", "=", "'https://www.googleapis.com/auth/sqlservice'", ",", "user_agent", "=", "USER_AGENT", ",", "state", "=", "state", ")" ]
get a client .
train
false
47,004
def _validate_family(family): if ((sys.platform != 'win32') and (family == 'AF_PIPE')): raise ValueError(('Family %s is not recognized.' % family)) if ((sys.platform == 'win32') and (family == 'AF_UNIX')): if (not hasattr(socket, family)): raise ValueError(('Family %s is not recognized.' % family))
[ "def", "_validate_family", "(", "family", ")", ":", "if", "(", "(", "sys", ".", "platform", "!=", "'win32'", ")", "and", "(", "family", "==", "'AF_PIPE'", ")", ")", ":", "raise", "ValueError", "(", "(", "'Family %s is not recognized.'", "%", "family", ")", ")", "if", "(", "(", "sys", ".", "platform", "==", "'win32'", ")", "and", "(", "family", "==", "'AF_UNIX'", ")", ")", ":", "if", "(", "not", "hasattr", "(", "socket", ",", "family", ")", ")", ":", "raise", "ValueError", "(", "(", "'Family %s is not recognized.'", "%", "family", ")", ")" ]
checks if the family is valid for the current environment .
train
false
47,005
def parse_tree(text, strict=False): count = 0 l = len(text) while (count < l): mode_end = text.index(' ', count) mode_text = text[count:mode_end] if (strict and mode_text.startswith('0')): raise ObjectFormatException(("Invalid mode '%s'" % mode_text)) try: mode = int(mode_text, 8) except ValueError: raise ObjectFormatException(("Invalid mode '%s'" % mode_text)) name_end = text.index('\x00', mode_end) name = text[(mode_end + 1):name_end] count = (name_end + 21) sha = text[(name_end + 1):count] if (len(sha) != 20): raise ObjectFormatException('Sha has invalid length') hexsha = sha_to_hex(sha) (yield (name, mode, hexsha))
[ "def", "parse_tree", "(", "text", ",", "strict", "=", "False", ")", ":", "count", "=", "0", "l", "=", "len", "(", "text", ")", "while", "(", "count", "<", "l", ")", ":", "mode_end", "=", "text", ".", "index", "(", "' '", ",", "count", ")", "mode_text", "=", "text", "[", "count", ":", "mode_end", "]", "if", "(", "strict", "and", "mode_text", ".", "startswith", "(", "'0'", ")", ")", ":", "raise", "ObjectFormatException", "(", "(", "\"Invalid mode '%s'\"", "%", "mode_text", ")", ")", "try", ":", "mode", "=", "int", "(", "mode_text", ",", "8", ")", "except", "ValueError", ":", "raise", "ObjectFormatException", "(", "(", "\"Invalid mode '%s'\"", "%", "mode_text", ")", ")", "name_end", "=", "text", ".", "index", "(", "'\\x00'", ",", "mode_end", ")", "name", "=", "text", "[", "(", "mode_end", "+", "1", ")", ":", "name_end", "]", "count", "=", "(", "name_end", "+", "21", ")", "sha", "=", "text", "[", "(", "name_end", "+", "1", ")", ":", "count", "]", "if", "(", "len", "(", "sha", ")", "!=", "20", ")", ":", "raise", "ObjectFormatException", "(", "'Sha has invalid length'", ")", "hexsha", "=", "sha_to_hex", "(", "sha", ")", "(", "yield", "(", "name", ",", "mode", ",", "hexsha", ")", ")" ]
parse a tree text .
train
false
47,006
def enable_if(cond, true_value, false_value=None): if cond: ret = true_value else: ret = false_value if (ret is None): ret = [] return ret
[ "def", "enable_if", "(", "cond", ",", "true_value", ",", "false_value", "=", "None", ")", ":", "if", "cond", ":", "ret", "=", "true_value", "else", ":", "ret", "=", "false_value", "if", "(", "ret", "is", "None", ")", ":", "ret", "=", "[", "]", "return", "ret" ]
a global function can be called in build to filter srcs/deps by target .
train
false
47,007
def bucket(series, k, by=None): if (by is None): by = series else: by = by.reindex(series.index) split = _split_quantile(by, k) mat = (np.empty((len(series), k), dtype=float) * np.NaN) for (i, v) in enumerate(split): mat[:, i][v] = series.take(v) return DataFrame(mat, index=series.index, columns=(np.arange(k) + 1))
[ "def", "bucket", "(", "series", ",", "k", ",", "by", "=", "None", ")", ":", "if", "(", "by", "is", "None", ")", ":", "by", "=", "series", "else", ":", "by", "=", "by", ".", "reindex", "(", "series", ".", "index", ")", "split", "=", "_split_quantile", "(", "by", ",", "k", ")", "mat", "=", "(", "np", ".", "empty", "(", "(", "len", "(", "series", ")", ",", "k", ")", ",", "dtype", "=", "float", ")", "*", "np", ".", "NaN", ")", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "split", ")", ":", "mat", "[", ":", ",", "i", "]", "[", "v", "]", "=", "series", ".", "take", "(", "v", ")", "return", "DataFrame", "(", "mat", ",", "index", "=", "series", ".", "index", ",", "columns", "=", "(", "np", ".", "arange", "(", "k", ")", "+", "1", ")", ")" ]
produce dataframe representing quantiles of a series parameters series : series k : int number of quantiles by : series or same-length array bucket by value returns dataframe .
train
false
47,009
def verifyPassword(password, hash): try: (digestname, iterations, salt, pwHash) = hash.split(Delimiter) except ValueError: raise ValueError(u"Expected hash encoded password in format 'digestmod{0}iterations{0}salt{0}hashed_password".format(Delimiter)) if (digestname not in Hashes.keys()): raise ValueError(u"Unsupported hash algorithm '{0}' for hash encoded password '{1}'.".format(digestname, hash)) iterations = int(iterations) salt = base64.b64decode(salt.encode(u'ascii')) pwHash = base64.b64decode(pwHash.encode(u'ascii')) password = password.encode(u'utf-8') return (pwHash == pbkdf2(password, salt, iterations, Hashes[digestname]))
[ "def", "verifyPassword", "(", "password", ",", "hash", ")", ":", "try", ":", "(", "digestname", ",", "iterations", ",", "salt", ",", "pwHash", ")", "=", "hash", ".", "split", "(", "Delimiter", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "u\"Expected hash encoded password in format 'digestmod{0}iterations{0}salt{0}hashed_password\"", ".", "format", "(", "Delimiter", ")", ")", "if", "(", "digestname", "not", "in", "Hashes", ".", "keys", "(", ")", ")", ":", "raise", "ValueError", "(", "u\"Unsupported hash algorithm '{0}' for hash encoded password '{1}'.\"", ".", "format", "(", "digestname", ",", "hash", ")", ")", "iterations", "=", "int", "(", "iterations", ")", "salt", "=", "base64", ".", "b64decode", "(", "salt", ".", "encode", "(", "u'ascii'", ")", ")", "pwHash", "=", "base64", ".", "b64decode", "(", "pwHash", ".", "encode", "(", "u'ascii'", ")", ")", "password", "=", "password", ".", "encode", "(", "u'utf-8'", ")", "return", "(", "pwHash", "==", "pbkdf2", "(", "password", ",", "salt", ",", "iterations", ",", "Hashes", "[", "digestname", "]", ")", ")" ]
module function to verify a password against a hash encoded password .
train
false
47,010
def Rfc822MsgFromString(xml_string): return atom.CreateClassFromXMLString(Rfc822Msg, xml_string)
[ "def", "Rfc822MsgFromString", "(", "xml_string", ")", ":", "return", "atom", ".", "CreateClassFromXMLString", "(", "Rfc822Msg", ",", "xml_string", ")" ]
parse in the rrc822 message from the xml definition .
train
false
47,012
def crc64(s): crcl = 0 crch = 0 for c in s: shr = ((crch & 255) << 24) temp1h = (crch >> 8) temp1l = ((crcl >> 8) | shr) idx = ((crcl ^ ord(c)) & 255) crch = (temp1h ^ _table_h[idx]) crcl = temp1l return ('CRC-%08X%08X' % (crch, crcl))
[ "def", "crc64", "(", "s", ")", ":", "crcl", "=", "0", "crch", "=", "0", "for", "c", "in", "s", ":", "shr", "=", "(", "(", "crch", "&", "255", ")", "<<", "24", ")", "temp1h", "=", "(", "crch", ">>", "8", ")", "temp1l", "=", "(", "(", "crcl", ">>", "8", ")", "|", "shr", ")", "idx", "=", "(", "(", "crcl", "^", "ord", "(", "c", ")", ")", "&", "255", ")", "crch", "=", "(", "temp1h", "^", "_table_h", "[", "idx", "]", ")", "crcl", "=", "temp1l", "return", "(", "'CRC-%08X%08X'", "%", "(", "crch", ",", "crcl", ")", ")" ]
returns the crc64 checksum for a sequence .
train
false
47,013
@core_helper def has_more_facets(facet, limit=None, exclude_active=False): facets = [] for facet_item in c.search_facets.get(facet)['items']: if (not len(facet_item['name'].strip())): continue if (not ((facet, facet_item['name']) in request.params.items())): facets.append(dict(active=False, **facet_item)) elif (not exclude_active): facets.append(dict(active=True, **facet_item)) if (c.search_facets_limits and (limit is None)): limit = c.search_facets_limits.get(facet) if ((limit is not None) and (len(facets) > limit)): return True return False
[ "@", "core_helper", "def", "has_more_facets", "(", "facet", ",", "limit", "=", "None", ",", "exclude_active", "=", "False", ")", ":", "facets", "=", "[", "]", "for", "facet_item", "in", "c", ".", "search_facets", ".", "get", "(", "facet", ")", "[", "'items'", "]", ":", "if", "(", "not", "len", "(", "facet_item", "[", "'name'", "]", ".", "strip", "(", ")", ")", ")", ":", "continue", "if", "(", "not", "(", "(", "facet", ",", "facet_item", "[", "'name'", "]", ")", "in", "request", ".", "params", ".", "items", "(", ")", ")", ")", ":", "facets", ".", "append", "(", "dict", "(", "active", "=", "False", ",", "**", "facet_item", ")", ")", "elif", "(", "not", "exclude_active", ")", ":", "facets", ".", "append", "(", "dict", "(", "active", "=", "True", ",", "**", "facet_item", ")", ")", "if", "(", "c", ".", "search_facets_limits", "and", "(", "limit", "is", "None", ")", ")", ":", "limit", "=", "c", ".", "search_facets_limits", ".", "get", "(", "facet", ")", "if", "(", "(", "limit", "is", "not", "None", ")", "and", "(", "len", "(", "facets", ")", ">", "limit", ")", ")", ":", "return", "True", "return", "False" ]
returns true if there are more facet items for the given facet than the limit .
train
false
47,016
def all_cgroup_delete(): try: utils.run('cgclear', ignore_status=False) except error.CmdError as detail: logging.warn('cgclear: Fail to clear all cgroups, some specific system cgroups might exist and affect further testing.')
[ "def", "all_cgroup_delete", "(", ")", ":", "try", ":", "utils", ".", "run", "(", "'cgclear'", ",", "ignore_status", "=", "False", ")", "except", "error", ".", "CmdError", "as", "detail", ":", "logging", ".", "warn", "(", "'cgclear: Fail to clear all cgroups, some specific system cgroups might exist and affect further testing.'", ")" ]
clear all cgroups in system .
train
false
47,018
def _check_termination(factors, n, limitp1, use_trial, use_rho, use_pm1, verbose): if verbose: print('Check for termination') p = perfect_power(n, factor=False) if (p is not False): (base, exp) = p if limitp1: limit = (limitp1 - 1) else: limit = limitp1 facs = factorint(base, limit, use_trial, use_rho, use_pm1, verbose=False) for (b, e) in facs.items(): if verbose: print((factor_msg % (b, e))) factors[b] = (exp * e) raise StopIteration if isprime(n): factors[int(n)] = 1 raise StopIteration if (n == 1): raise StopIteration
[ "def", "_check_termination", "(", "factors", ",", "n", ",", "limitp1", ",", "use_trial", ",", "use_rho", ",", "use_pm1", ",", "verbose", ")", ":", "if", "verbose", ":", "print", "(", "'Check for termination'", ")", "p", "=", "perfect_power", "(", "n", ",", "factor", "=", "False", ")", "if", "(", "p", "is", "not", "False", ")", ":", "(", "base", ",", "exp", ")", "=", "p", "if", "limitp1", ":", "limit", "=", "(", "limitp1", "-", "1", ")", "else", ":", "limit", "=", "limitp1", "facs", "=", "factorint", "(", "base", ",", "limit", ",", "use_trial", ",", "use_rho", ",", "use_pm1", ",", "verbose", "=", "False", ")", "for", "(", "b", ",", "e", ")", "in", "facs", ".", "items", "(", ")", ":", "if", "verbose", ":", "print", "(", "(", "factor_msg", "%", "(", "b", ",", "e", ")", ")", ")", "factors", "[", "b", "]", "=", "(", "exp", "*", "e", ")", "raise", "StopIteration", "if", "isprime", "(", "n", ")", ":", "factors", "[", "int", "(", "n", ")", "]", "=", "1", "raise", "StopIteration", "if", "(", "n", "==", "1", ")", ":", "raise", "StopIteration" ]
helper function for integer factorization .
train
false
47,020
def _group_matching(tlist, cls): opens = [] tidx_offset = 0 for (idx, token) in enumerate(list(tlist)): tidx = (idx - tidx_offset) if token.is_whitespace(): continue if (token.is_group() and (not isinstance(token, cls))): _group_matching(token, cls) continue if token.match(*cls.M_OPEN): opens.append(tidx) elif token.match(*cls.M_CLOSE): try: open_idx = opens.pop() except IndexError: continue close_idx = tidx tlist.group_tokens(cls, open_idx, close_idx) tidx_offset += (close_idx - open_idx)
[ "def", "_group_matching", "(", "tlist", ",", "cls", ")", ":", "opens", "=", "[", "]", "tidx_offset", "=", "0", "for", "(", "idx", ",", "token", ")", "in", "enumerate", "(", "list", "(", "tlist", ")", ")", ":", "tidx", "=", "(", "idx", "-", "tidx_offset", ")", "if", "token", ".", "is_whitespace", "(", ")", ":", "continue", "if", "(", "token", ".", "is_group", "(", ")", "and", "(", "not", "isinstance", "(", "token", ",", "cls", ")", ")", ")", ":", "_group_matching", "(", "token", ",", "cls", ")", "continue", "if", "token", ".", "match", "(", "*", "cls", ".", "M_OPEN", ")", ":", "opens", ".", "append", "(", "tidx", ")", "elif", "token", ".", "match", "(", "*", "cls", ".", "M_CLOSE", ")", ":", "try", ":", "open_idx", "=", "opens", ".", "pop", "(", ")", "except", "IndexError", ":", "continue", "close_idx", "=", "tidx", "tlist", ".", "group_tokens", "(", "cls", ",", "open_idx", ",", "close_idx", ")", "tidx_offset", "+=", "(", "close_idx", "-", "open_idx", ")" ]
groups tokens that have beginning and end .
train
true
47,022
def test_follow_import_incomplete(): datetime = check_follow_definition_types('import itertool') assert (datetime == ['module']) itert = jedi.Script('from itertools import ').completions() definitions = [d for d in itert if (d.name == 'chain')] assert (len(definitions) == 1) assert ([d.type for d in definitions[0].follow_definition()] == ['class']) datetime = check_follow_definition_types('from datetime import datetim') assert (set(datetime) == set(['class', 'instance'])) ospath = check_follow_definition_types('from os.path import abspat') assert (ospath == ['function']) alias = check_follow_definition_types('import io as abcd; abcd') assert (alias == ['module'])
[ "def", "test_follow_import_incomplete", "(", ")", ":", "datetime", "=", "check_follow_definition_types", "(", "'import itertool'", ")", "assert", "(", "datetime", "==", "[", "'module'", "]", ")", "itert", "=", "jedi", ".", "Script", "(", "'from itertools import '", ")", ".", "completions", "(", ")", "definitions", "=", "[", "d", "for", "d", "in", "itert", "if", "(", "d", ".", "name", "==", "'chain'", ")", "]", "assert", "(", "len", "(", "definitions", ")", "==", "1", ")", "assert", "(", "[", "d", ".", "type", "for", "d", "in", "definitions", "[", "0", "]", ".", "follow_definition", "(", ")", "]", "==", "[", "'class'", "]", ")", "datetime", "=", "check_follow_definition_types", "(", "'from datetime import datetim'", ")", "assert", "(", "set", "(", "datetime", ")", "==", "set", "(", "[", "'class'", ",", "'instance'", "]", ")", ")", "ospath", "=", "check_follow_definition_types", "(", "'from os.path import abspat'", ")", "assert", "(", "ospath", "==", "[", "'function'", "]", ")", "alias", "=", "check_follow_definition_types", "(", "'import io as abcd; abcd'", ")", "assert", "(", "alias", "==", "[", "'module'", "]", ")" ]
completion on incomplete imports should always take the full completion to do any evaluation .
train
false
47,023
def compile_all_projects(projects, extra_args=''): failed_clients = [] for (project, clients) in enumerate_projects().iteritems(): for client in clients: project_client = ('%s.%s' % (project, client)) if (not compile_and_install_client(project_client, extra_args)): failed_clients.append(project_client) return failed_clients
[ "def", "compile_all_projects", "(", "projects", ",", "extra_args", "=", "''", ")", ":", "failed_clients", "=", "[", "]", "for", "(", "project", ",", "clients", ")", "in", "enumerate_projects", "(", ")", ".", "iteritems", "(", ")", ":", "for", "client", "in", "clients", ":", "project_client", "=", "(", "'%s.%s'", "%", "(", "project", ",", "client", ")", ")", "if", "(", "not", "compile_and_install_client", "(", "project_client", ",", "extra_args", ")", ")", ":", "failed_clients", ".", "append", "(", "project_client", ")", "return", "failed_clients" ]
compile all projects available as defined by enumerate_projects .
train
false
47,025
@require_context def volume_attachment_get_all_by_project(context, project_id, filters=None, marker=None, limit=None, offset=None, sort_keys=None, sort_dirs=None): authorize_project_context(context, project_id) if (not filters): filters = {} else: filters = filters.copy() filters['project_id'] = project_id return _attachment_get_all(context, filters, marker, limit, offset, sort_keys, sort_dirs)
[ "@", "require_context", "def", "volume_attachment_get_all_by_project", "(", "context", ",", "project_id", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "offset", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ")", ":", "authorize_project_context", "(", "context", ",", "project_id", ")", "if", "(", "not", "filters", ")", ":", "filters", "=", "{", "}", "else", ":", "filters", "=", "filters", ".", "copy", "(", ")", "filters", "[", "'project_id'", "]", "=", "project_id", "return", "_attachment_get_all", "(", "context", ",", "filters", ",", "marker", ",", "limit", ",", "offset", ",", "sort_keys", ",", "sort_dirs", ")" ]
retrieve all attachment records for specific project .
train
false
47,026
def test_single_scenario_single_scenario(): feature = Feature.from_string(FEATURE11) first_scenario = feature.scenarios[0] assert that(first_scenario.tags).deep_equals(['many', 'other', 'basic', 'tags', 'here', ':)'])
[ "def", "test_single_scenario_single_scenario", "(", ")", ":", "feature", "=", "Feature", ".", "from_string", "(", "FEATURE11", ")", "first_scenario", "=", "feature", ".", "scenarios", "[", "0", "]", "assert", "that", "(", "first_scenario", ".", "tags", ")", ".", "deep_equals", "(", "[", "'many'", ",", "'other'", ",", "'basic'", ",", "'tags'", ",", "'here'", ",", "':)'", "]", ")" ]
features should have at least the first scenario parsed with tags .
train
false
47,028
def gen_mac(prefix='AC:DE:48'): return '{0}:{1:02X}:{2:02X}:{3:02X}'.format(prefix, random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))
[ "def", "gen_mac", "(", "prefix", "=", "'AC:DE:48'", ")", ":", "return", "'{0}:{1:02X}:{2:02X}:{3:02X}'", ".", "format", "(", "prefix", ",", "random", ".", "randint", "(", "0", ",", "255", ")", ",", "random", ".", "randint", "(", "0", ",", "255", ")", ",", "random", ".", "randint", "(", "0", ",", "255", ")", ")" ]
generates a mac address with the defined oui prefix .
train
true
47,029
def assertRequestGenerationFailed(self, deferred, reasonTypes): return assertWrapperExceptionTypes(self, deferred, RequestGenerationFailed, reasonTypes)
[ "def", "assertRequestGenerationFailed", "(", "self", ",", "deferred", ",", "reasonTypes", ")", ":", "return", "assertWrapperExceptionTypes", "(", "self", ",", "deferred", ",", "RequestGenerationFailed", ",", "reasonTypes", ")" ]
a simple helper to invoke l{assertwrapperexceptiontypes} with a c{maintype} of l{requestgenerationfailed} .
train
false
47,030
def _read_channel(fid): out = {'chan_label': read_str(fid, 16), 'chan_no': read_int16(fid), 'attributes': read_int16(fid), 'scale': read_float(fid), 'yaxis_label': read_str(fid, 16), 'valid_min_max': read_int16(fid)} fid.seek(6, 1) out.update({'ymin': read_double(fid), 'ymax': read_double(fid), 'index': read_int32(fid), 'checksum': read_int32(fid), 'off_flag': read_str(fid, 16), 'offset': read_float(fid)}) fid.seek(12, 1) return out
[ "def", "_read_channel", "(", "fid", ")", ":", "out", "=", "{", "'chan_label'", ":", "read_str", "(", "fid", ",", "16", ")", ",", "'chan_no'", ":", "read_int16", "(", "fid", ")", ",", "'attributes'", ":", "read_int16", "(", "fid", ")", ",", "'scale'", ":", "read_float", "(", "fid", ")", ",", "'yaxis_label'", ":", "read_str", "(", "fid", ",", "16", ")", ",", "'valid_min_max'", ":", "read_int16", "(", "fid", ")", "}", "fid", ".", "seek", "(", "6", ",", "1", ")", "out", ".", "update", "(", "{", "'ymin'", ":", "read_double", "(", "fid", ")", ",", "'ymax'", ":", "read_double", "(", "fid", ")", ",", "'index'", ":", "read_int32", "(", "fid", ")", ",", "'checksum'", ":", "read_int32", "(", "fid", ")", ",", "'off_flag'", ":", "read_str", "(", "fid", ",", "16", ")", ",", "'offset'", ":", "read_float", "(", "fid", ")", "}", ")", "fid", ".", "seek", "(", "12", ",", "1", ")", "return", "out" ]
read bti pdf channel .
train
false
47,031
def _instantiate_backend_from_name(name, options): try: parts = name.split('.') module_name = '.'.join(parts[:(-1)]) class_name = parts[(-1)] except IndexError: raise ValueError(('Invalid event track backend %s' % name)) try: module = import_module(module_name) cls = getattr(module, class_name) if ((not inspect.isclass(cls)) or (not issubclass(cls, BaseBackend))): raise TypeError except (ValueError, AttributeError, TypeError, ImportError): raise ValueError(('Cannot find event track backend %s' % name)) backend = cls(**options) return backend
[ "def", "_instantiate_backend_from_name", "(", "name", ",", "options", ")", ":", "try", ":", "parts", "=", "name", ".", "split", "(", "'.'", ")", "module_name", "=", "'.'", ".", "join", "(", "parts", "[", ":", "(", "-", "1", ")", "]", ")", "class_name", "=", "parts", "[", "(", "-", "1", ")", "]", "except", "IndexError", ":", "raise", "ValueError", "(", "(", "'Invalid event track backend %s'", "%", "name", ")", ")", "try", ":", "module", "=", "import_module", "(", "module_name", ")", "cls", "=", "getattr", "(", "module", ",", "class_name", ")", "if", "(", "(", "not", "inspect", ".", "isclass", "(", "cls", ")", ")", "or", "(", "not", "issubclass", "(", "cls", ",", "BaseBackend", ")", ")", ")", ":", "raise", "TypeError", "except", "(", "ValueError", ",", "AttributeError", ",", "TypeError", ",", "ImportError", ")", ":", "raise", "ValueError", "(", "(", "'Cannot find event track backend %s'", "%", "name", ")", ")", "backend", "=", "cls", "(", "**", "options", ")", "return", "backend" ]
instantiate an event tracker backend from the full module path to the backend class .
train
false
47,032
def convert_post_to_VERB(request, verb): if (request.method == verb): if hasattr(request, u'_post'): del request._post del request._files try: request.method = u'POST' request._load_post_and_files() request.method = verb except AttributeError: request.META[u'REQUEST_METHOD'] = u'POST' request._load_post_and_files() request.META[u'REQUEST_METHOD'] = verb setattr(request, verb, request.POST) return request
[ "def", "convert_post_to_VERB", "(", "request", ",", "verb", ")", ":", "if", "(", "request", ".", "method", "==", "verb", ")", ":", "if", "hasattr", "(", "request", ",", "u'_post'", ")", ":", "del", "request", ".", "_post", "del", "request", ".", "_files", "try", ":", "request", ".", "method", "=", "u'POST'", "request", ".", "_load_post_and_files", "(", ")", "request", ".", "method", "=", "verb", "except", "AttributeError", ":", "request", ".", "META", "[", "u'REQUEST_METHOD'", "]", "=", "u'POST'", "request", ".", "_load_post_and_files", "(", ")", "request", ".", "META", "[", "u'REQUEST_METHOD'", "]", "=", "verb", "setattr", "(", "request", ",", "verb", ",", "request", ".", "POST", ")", "return", "request" ]
force django to process the verb .
train
false
47,033
def _triangle_neighbors(tris, npts): verts = tris.ravel() counts = np.bincount(verts, minlength=npts) reord = np.argsort(verts) tri_idx = np.unravel_index(reord, (len(tris), 3))[0] idx = np.cumsum(np.r_[(0, counts)]) neighbor_tri = [np.sort(tri_idx[v1:v2]) for (v1, v2) in zip(idx[:(-1)], idx[1:])] return neighbor_tri
[ "def", "_triangle_neighbors", "(", "tris", ",", "npts", ")", ":", "verts", "=", "tris", ".", "ravel", "(", ")", "counts", "=", "np", ".", "bincount", "(", "verts", ",", "minlength", "=", "npts", ")", "reord", "=", "np", ".", "argsort", "(", "verts", ")", "tri_idx", "=", "np", ".", "unravel_index", "(", "reord", ",", "(", "len", "(", "tris", ")", ",", "3", ")", ")", "[", "0", "]", "idx", "=", "np", ".", "cumsum", "(", "np", ".", "r_", "[", "(", "0", ",", "counts", ")", "]", ")", "neighbor_tri", "=", "[", "np", ".", "sort", "(", "tri_idx", "[", "v1", ":", "v2", "]", ")", "for", "(", "v1", ",", "v2", ")", "in", "zip", "(", "idx", "[", ":", "(", "-", "1", ")", "]", ",", "idx", "[", "1", ":", "]", ")", "]", "return", "neighbor_tri" ]
efficiently compute vertex neighboring triangles .
train
false
47,034
def get_index_by_name(table, name): for index in table.indexes: if (index.name == name): return index
[ "def", "get_index_by_name", "(", "table", ",", "name", ")", ":", "for", "index", "in", "table", ".", "indexes", ":", "if", "(", "index", ".", "name", "==", "name", ")", ":", "return", "index" ]
find declaratively defined index from table by name .
train
false
47,036
def plot_mts(Y, names=None, index=None): import matplotlib.pyplot as plt k = Y.shape[1] (rows, cols) = (k, 1) plt.figure(figsize=(10, 10)) for j in range(k): ts = Y[:, j] ax = plt.subplot(rows, cols, (j + 1)) if (index is not None): ax.plot(index, ts) else: ax.plot(ts) if (names is not None): ax.set_title(names[j])
[ "def", "plot_mts", "(", "Y", ",", "names", "=", "None", ",", "index", "=", "None", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "k", "=", "Y", ".", "shape", "[", "1", "]", "(", "rows", ",", "cols", ")", "=", "(", "k", ",", "1", ")", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "10", ")", ")", "for", "j", "in", "range", "(", "k", ")", ":", "ts", "=", "Y", "[", ":", ",", "j", "]", "ax", "=", "plt", ".", "subplot", "(", "rows", ",", "cols", ",", "(", "j", "+", "1", ")", ")", "if", "(", "index", "is", "not", "None", ")", ":", "ax", ".", "plot", "(", "index", ",", "ts", ")", "else", ":", "ax", ".", "plot", "(", "ts", ")", "if", "(", "names", "is", "not", "None", ")", ":", "ax", ".", "set_title", "(", "names", "[", "j", "]", ")" ]
plot multiple time series .
train
false
47,037
def test_num_reads(): reads_count = 0 rparser = ReadParser(utils.get_test_data('100-reads.fq.gz')) for _ in rparser: reads_count += 1 assert (reads_count == 100) assert (rparser.num_reads == 100)
[ "def", "test_num_reads", "(", ")", ":", "reads_count", "=", "0", "rparser", "=", "ReadParser", "(", "utils", ".", "get_test_data", "(", "'100-reads.fq.gz'", ")", ")", "for", "_", "in", "rparser", ":", "reads_count", "+=", "1", "assert", "(", "reads_count", "==", "100", ")", "assert", "(", "rparser", ".", "num_reads", "==", "100", ")" ]
test readparser .
train
false
47,038
def getStepKey(x, y): return (x, y)
[ "def", "getStepKey", "(", "x", ",", "y", ")", ":", "return", "(", "x", ",", "y", ")" ]
get step key for x and y .
train
false
47,039
def logistic_log_gradient_i(x_i, y_i, beta): return [logistic_log_partial_ij(x_i, y_i, beta, j) for (j, _) in enumerate(beta)]
[ "def", "logistic_log_gradient_i", "(", "x_i", ",", "y_i", ",", "beta", ")", ":", "return", "[", "logistic_log_partial_ij", "(", "x_i", ",", "y_i", ",", "beta", ",", "j", ")", "for", "(", "j", ",", "_", ")", "in", "enumerate", "(", "beta", ")", "]" ]
the gradient of the log likelihood corresponding to the i-th data point .
train
false
47,040
def invalidate_zone_stack_cache(document, async=False): pks = ([document.pk] + [parent.pk for parent in document.get_topic_parents()]) job = DocumentZoneStackJob() if async: invalidator = job.invalidate else: invalidator = job.refresh for pk in pks: invalidator(pk)
[ "def", "invalidate_zone_stack_cache", "(", "document", ",", "async", "=", "False", ")", ":", "pks", "=", "(", "[", "document", ".", "pk", "]", "+", "[", "parent", ".", "pk", "for", "parent", "in", "document", ".", "get_topic_parents", "(", ")", "]", ")", "job", "=", "DocumentZoneStackJob", "(", ")", "if", "async", ":", "invalidator", "=", "job", ".", "invalidate", "else", ":", "invalidator", "=", "job", ".", "refresh", "for", "pk", "in", "pks", ":", "invalidator", "(", "pk", ")" ]
reset the cache for the zone stack for all of the documents in the document tree branch .
train
false
47,041
def __FindSupportedVersion(protocol, server, port, path, preferredApiVersions, sslContext): serviceVersionDescription = __GetServiceVersionDescription(protocol, server, port, path, sslContext) if (serviceVersionDescription is None): return None if (not isinstance(preferredApiVersions, list)): preferredApiVersions = [preferredApiVersions] for desiredVersion in preferredApiVersions: if __VersionIsSupported(desiredVersion, serviceVersionDescription): return desiredVersion return None
[ "def", "__FindSupportedVersion", "(", "protocol", ",", "server", ",", "port", ",", "path", ",", "preferredApiVersions", ",", "sslContext", ")", ":", "serviceVersionDescription", "=", "__GetServiceVersionDescription", "(", "protocol", ",", "server", ",", "port", ",", "path", ",", "sslContext", ")", "if", "(", "serviceVersionDescription", "is", "None", ")", ":", "return", "None", "if", "(", "not", "isinstance", "(", "preferredApiVersions", ",", "list", ")", ")", ":", "preferredApiVersions", "=", "[", "preferredApiVersions", "]", "for", "desiredVersion", "in", "preferredApiVersions", ":", "if", "__VersionIsSupported", "(", "desiredVersion", ",", "serviceVersionDescription", ")", ":", "return", "desiredVersion", "return", "None" ]
private method that returns the most preferred api version supported by the specified server .
train
true
47,044
def concatenate_join_units(join_units, concat_axis, copy): if ((concat_axis == 0) and (len(join_units) > 1)): raise AssertionError('Concatenating join units along axis0') (empty_dtype, upcasted_na) = get_empty_dtype_and_na(join_units) to_concat = [ju.get_reindexed_values(empty_dtype=empty_dtype, upcasted_na=upcasted_na) for ju in join_units] if (len(to_concat) == 1): concat_values = to_concat[0] if (copy and (concat_values.base is not None)): concat_values = concat_values.copy() else: concat_values = _concat._concat_compat(to_concat, axis=concat_axis) return concat_values
[ "def", "concatenate_join_units", "(", "join_units", ",", "concat_axis", ",", "copy", ")", ":", "if", "(", "(", "concat_axis", "==", "0", ")", "and", "(", "len", "(", "join_units", ")", ">", "1", ")", ")", ":", "raise", "AssertionError", "(", "'Concatenating join units along axis0'", ")", "(", "empty_dtype", ",", "upcasted_na", ")", "=", "get_empty_dtype_and_na", "(", "join_units", ")", "to_concat", "=", "[", "ju", ".", "get_reindexed_values", "(", "empty_dtype", "=", "empty_dtype", ",", "upcasted_na", "=", "upcasted_na", ")", "for", "ju", "in", "join_units", "]", "if", "(", "len", "(", "to_concat", ")", "==", "1", ")", ":", "concat_values", "=", "to_concat", "[", "0", "]", "if", "(", "copy", "and", "(", "concat_values", ".", "base", "is", "not", "None", ")", ")", ":", "concat_values", "=", "concat_values", ".", "copy", "(", ")", "else", ":", "concat_values", "=", "_concat", ".", "_concat_compat", "(", "to_concat", ",", "axis", "=", "concat_axis", ")", "return", "concat_values" ]
concatenate values from several join units along selected axis .
train
true
47,045
@treeio_login_required def account_view(request, response_format='html'): profile = request.user.profile try: contacts = profile.contact_set.exclude(trash=True) except: contacts = [] return render_to_response('account/account_view', {'profile': profile, 'contacts': contacts}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "def", "account_view", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "profile", "=", "request", ".", "user", ".", "profile", "try", ":", "contacts", "=", "profile", ".", "contact_set", ".", "exclude", "(", "trash", "=", "True", ")", "except", ":", "contacts", "=", "[", "]", "return", "render_to_response", "(", "'account/account_view'", ",", "{", "'profile'", ":", "profile", ",", "'contacts'", ":", "contacts", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
single transaction view page .
train
false
47,046
def assign_keywords(resource): for i in range(0, randint(0, 5)): resource.keywords.add(('keyword_%s' % randint(0, n_keywords)))
[ "def", "assign_keywords", "(", "resource", ")", ":", "for", "i", "in", "range", "(", "0", ",", "randint", "(", "0", ",", "5", ")", ")", ":", "resource", ".", "keywords", ".", "add", "(", "(", "'keyword_%s'", "%", "randint", "(", "0", ",", "n_keywords", ")", ")", ")" ]
assigns up to 5 keywords to resource .
train
false
47,047
@register.inclusion_tag('angular/angular_templates.html', takes_context=True) def angular_templates(context): template_paths = context['HORIZON_CONFIG']['external_templates'] all_theme_static_files = context['HORIZON_CONFIG']['theme_static_files'] this_theme_static_files = all_theme_static_files[context['THEME']] template_overrides = this_theme_static_files['template_overrides'] angular_templates = {} for relative_path in template_paths: template_static_path = (context['STATIC_URL'] + relative_path) if (relative_path in template_overrides): relative_path = template_overrides[relative_path] result = [] for finder in finders.get_finders(): result.extend(finder.find(relative_path, True)) path = result[(-1)] try: with open(path) as template_file: angular_templates[template_static_path] = template_file.read() except (OSError, IOError): pass templates = [(key, value) for (key, value) in angular_templates.items()] templates.sort(key=(lambda item: item[0])) return {'angular_templates': templates}
[ "@", "register", ".", "inclusion_tag", "(", "'angular/angular_templates.html'", ",", "takes_context", "=", "True", ")", "def", "angular_templates", "(", "context", ")", ":", "template_paths", "=", "context", "[", "'HORIZON_CONFIG'", "]", "[", "'external_templates'", "]", "all_theme_static_files", "=", "context", "[", "'HORIZON_CONFIG'", "]", "[", "'theme_static_files'", "]", "this_theme_static_files", "=", "all_theme_static_files", "[", "context", "[", "'THEME'", "]", "]", "template_overrides", "=", "this_theme_static_files", "[", "'template_overrides'", "]", "angular_templates", "=", "{", "}", "for", "relative_path", "in", "template_paths", ":", "template_static_path", "=", "(", "context", "[", "'STATIC_URL'", "]", "+", "relative_path", ")", "if", "(", "relative_path", "in", "template_overrides", ")", ":", "relative_path", "=", "template_overrides", "[", "relative_path", "]", "result", "=", "[", "]", "for", "finder", "in", "finders", ".", "get_finders", "(", ")", ":", "result", ".", "extend", "(", "finder", ".", "find", "(", "relative_path", ",", "True", ")", ")", "path", "=", "result", "[", "(", "-", "1", ")", "]", "try", ":", "with", "open", "(", "path", ")", "as", "template_file", ":", "angular_templates", "[", "template_static_path", "]", "=", "template_file", ".", "read", "(", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "pass", "templates", "=", "[", "(", "key", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "angular_templates", ".", "items", "(", ")", "]", "templates", ".", "sort", "(", "key", "=", "(", "lambda", "item", ":", "item", "[", "0", "]", ")", ")", "return", "{", "'angular_templates'", ":", "templates", "}" ]
for all static html templates .
train
true
47,048
@register.inclusion_tag(u'admin/subnav_item.html', takes_context=True) def admin_subnav(context, url_name, name, icon=u''): request = context.get(u'request') url = local_site_reverse(url_name, request=request) return RequestContext(request, {u'url': url, u'name': name, u'current': (url == request.path), u'icon': icon})
[ "@", "register", ".", "inclusion_tag", "(", "u'admin/subnav_item.html'", ",", "takes_context", "=", "True", ")", "def", "admin_subnav", "(", "context", ",", "url_name", ",", "name", ",", "icon", "=", "u''", ")", ":", "request", "=", "context", ".", "get", "(", "u'request'", ")", "url", "=", "local_site_reverse", "(", "url_name", ",", "request", "=", "request", ")", "return", "RequestContext", "(", "request", ",", "{", "u'url'", ":", "url", ",", "u'name'", ":", "name", ",", "u'current'", ":", "(", "url", "==", "request", ".", "path", ")", ",", "u'icon'", ":", "icon", "}", ")" ]
return an <li> containing a link to the desired setting tab .
train
false
47,049
@env.catch_exceptions def completions(): (row, col) = env.cursor if env.var('a:findstart', True): count = 0 for char in reversed(env.current.line[:col]): if (not re.match('[\\w\\d]', char)): break count += 1 env.debug('Complete find start', (col - count)) return env.stop((col - count)) base = env.var('a:base') (source, offset) = env.get_offset_params((row, col), base) proposals = get_proporsals(source, offset, base) return env.stop(proposals)
[ "@", "env", ".", "catch_exceptions", "def", "completions", "(", ")", ":", "(", "row", ",", "col", ")", "=", "env", ".", "cursor", "if", "env", ".", "var", "(", "'a:findstart'", ",", "True", ")", ":", "count", "=", "0", "for", "char", "in", "reversed", "(", "env", ".", "current", ".", "line", "[", ":", "col", "]", ")", ":", "if", "(", "not", "re", ".", "match", "(", "'[\\\\w\\\\d]'", ",", "char", ")", ")", ":", "break", "count", "+=", "1", "env", ".", "debug", "(", "'Complete find start'", ",", "(", "col", "-", "count", ")", ")", "return", "env", ".", "stop", "(", "(", "col", "-", "count", ")", ")", "base", "=", "env", ".", "var", "(", "'a:base'", ")", "(", "source", ",", "offset", ")", "=", "env", ".", "get_offset_params", "(", "(", "row", ",", "col", ")", ",", "base", ")", "proposals", "=", "get_proporsals", "(", "source", ",", "offset", ",", "base", ")", "return", "env", ".", "stop", "(", "proposals", ")" ]
search completions .
train
false
47,050
def libvlc_media_list_player_set_media_list(p_mlp, p_mlist): f = (_Cfunctions.get('libvlc_media_list_player_set_media_list', None) or _Cfunction('libvlc_media_list_player_set_media_list', ((1,), (1,)), None, None, MediaListPlayer, MediaList)) return f(p_mlp, p_mlist)
[ "def", "libvlc_media_list_player_set_media_list", "(", "p_mlp", ",", "p_mlist", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_set_media_list'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_set_media_list'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaListPlayer", ",", "MediaList", ")", ")", "return", "f", "(", "p_mlp", ",", "p_mlist", ")" ]
set the media list associated with the player .
train
true
47,051
def load_metadefs(): return get_backend().db_load_metadefs(engine=db_api.get_engine(), metadata_path=None, merge=False, prefer_new=False, overwrite=False)
[ "def", "load_metadefs", "(", ")", ":", "return", "get_backend", "(", ")", ".", "db_load_metadefs", "(", "engine", "=", "db_api", ".", "get_engine", "(", ")", ",", "metadata_path", "=", "None", ",", "merge", "=", "False", ",", "prefer_new", "=", "False", ",", "overwrite", "=", "False", ")" ]
read metadefinition files and insert data into the database .
train
false
47,052
def _syscmd_file(target, default=''): if (sys.platform in ('dos', 'win32', 'win16', 'os2')): return default target = _follow_symlinks(target) try: f = os.popen(('file "%s" 2> /dev/null' % target)) except (AttributeError, os.error): return default output = string.strip(f.read()) rc = f.close() if ((not output) or rc): return default else: return output
[ "def", "_syscmd_file", "(", "target", ",", "default", "=", "''", ")", ":", "if", "(", "sys", ".", "platform", "in", "(", "'dos'", ",", "'win32'", ",", "'win16'", ",", "'os2'", ")", ")", ":", "return", "default", "target", "=", "_follow_symlinks", "(", "target", ")", "try", ":", "f", "=", "os", ".", "popen", "(", "(", "'file \"%s\" 2> /dev/null'", "%", "target", ")", ")", "except", "(", "AttributeError", ",", "os", ".", "error", ")", ":", "return", "default", "output", "=", "string", ".", "strip", "(", "f", ".", "read", "(", ")", ")", "rc", "=", "f", ".", "close", "(", ")", "if", "(", "(", "not", "output", ")", "or", "rc", ")", ":", "return", "default", "else", ":", "return", "output" ]
interface to the systems file command .
train
false
47,053
def test_finalize(): assert ('' == render('{{ x }}', {'x': None}))
[ "def", "test_finalize", "(", ")", ":", "assert", "(", "''", "==", "render", "(", "'{{ x }}'", ",", "{", "'x'", ":", "None", "}", ")", ")" ]
we want none to show up as .
train
false
47,054
def _mkdirp(d): try: os.makedirs(d) except OSError as e: if (e.errno != errno.EEXIST): raise
[ "def", "_mkdirp", "(", "d", ")", ":", "try", ":", "os", ".", "makedirs", "(", "d", ")", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "!=", "errno", ".", "EEXIST", ")", ":", "raise" ]
ensure directory d exists no guarantee that the directory is writable .
train
false
47,056
def click_studio_help(page): help_link_selector = '.cta-show-sock' EmptyPromise((lambda : page.q(css=help_link_selector).visible), 'Help link visible').fulfill() page.q(css=help_link_selector).click() EmptyPromise((lambda : (page.q(css='.support .list-actions a').results[0].text != '')), 'Support section opened').fulfill()
[ "def", "click_studio_help", "(", "page", ")", ":", "help_link_selector", "=", "'.cta-show-sock'", "EmptyPromise", "(", "(", "lambda", ":", "page", ".", "q", "(", "css", "=", "help_link_selector", ")", ".", "visible", ")", ",", "'Help link visible'", ")", ".", "fulfill", "(", ")", "page", ".", "q", "(", "css", "=", "help_link_selector", ")", ".", "click", "(", ")", "EmptyPromise", "(", "(", "lambda", ":", "(", "page", ".", "q", "(", "css", "=", "'.support .list-actions a'", ")", ".", "results", "[", "0", "]", ".", "text", "!=", "''", ")", ")", ",", "'Support section opened'", ")", ".", "fulfill", "(", ")" ]
click the studio help link in the page footer .
train
false
47,057
def idPage(request): return direct_to_template(request, 'server/idPage.html', {'server_url': getViewURL(request, endpoint)})
[ "def", "idPage", "(", "request", ")", ":", "return", "direct_to_template", "(", "request", ",", "'server/idPage.html'", ",", "{", "'server_url'", ":", "getViewURL", "(", "request", ",", "endpoint", ")", "}", ")" ]
serve the identity page for openid urls .
train
false
47,059
@cronjobs.register def dump_user_installs_cron(): chunk_size = 100 user_ids = set(Installed.objects.filter(user__enable_recommendations=True).values_list('user', flat=True)) path_to_cleanup = os.path.join(settings.DUMPED_USERS_PATH, 'users') task_log.info('Cleaning up path {0}'.format(path_to_cleanup)) try: for (dirpath, dirnames, filenames) in walk_storage(path_to_cleanup, storage=private_storage): for filename in filenames: private_storage.delete(os.path.join(dirpath, filename)) except OSError: pass grouping = [] for chunk in chunked(user_ids, chunk_size): grouping.append(dump_user_installs.subtask(args=[chunk])) post = zip_users.subtask(immutable=True) ts = chord(grouping, post) ts.apply_async()
[ "@", "cronjobs", ".", "register", "def", "dump_user_installs_cron", "(", ")", ":", "chunk_size", "=", "100", "user_ids", "=", "set", "(", "Installed", ".", "objects", ".", "filter", "(", "user__enable_recommendations", "=", "True", ")", ".", "values_list", "(", "'user'", ",", "flat", "=", "True", ")", ")", "path_to_cleanup", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "DUMPED_USERS_PATH", ",", "'users'", ")", "task_log", ".", "info", "(", "'Cleaning up path {0}'", ".", "format", "(", "path_to_cleanup", ")", ")", "try", ":", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "walk_storage", "(", "path_to_cleanup", ",", "storage", "=", "private_storage", ")", ":", "for", "filename", "in", "filenames", ":", "private_storage", ".", "delete", "(", "os", ".", "path", ".", "join", "(", "dirpath", ",", "filename", ")", ")", "except", "OSError", ":", "pass", "grouping", "=", "[", "]", "for", "chunk", "in", "chunked", "(", "user_ids", ",", "chunk_size", ")", ":", "grouping", ".", "append", "(", "dump_user_installs", ".", "subtask", "(", "args", "=", "[", "chunk", "]", ")", ")", "post", "=", "zip_users", ".", "subtask", "(", "immutable", "=", "True", ")", "ts", "=", "chord", "(", "grouping", ",", "post", ")", "ts", ".", "apply_async", "(", ")" ]
sets up tasks to do user install dumps .
train
false
47,060
def enforce_types(key, val): non_string_params = {'ssl_verify': bool, 'insecure_auth': bool, 'env_whitelist': 'stringlist', 'env_blacklist': 'stringlist', 'gitfs_env_whitelist': 'stringlist', 'gitfs_env_blacklist': 'stringlist'} if (key not in non_string_params): return six.text_type(val) else: expected = non_string_params[key] if (expected is bool): return val elif (expected == 'stringlist'): if (not isinstance(val, (six.string_types, list))): val = six.text_type(val) if isinstance(val, six.string_types): return [x.strip() for x in val.split(',')] return [six.text_type(x) for x in val]
[ "def", "enforce_types", "(", "key", ",", "val", ")", ":", "non_string_params", "=", "{", "'ssl_verify'", ":", "bool", ",", "'insecure_auth'", ":", "bool", ",", "'env_whitelist'", ":", "'stringlist'", ",", "'env_blacklist'", ":", "'stringlist'", ",", "'gitfs_env_whitelist'", ":", "'stringlist'", ",", "'gitfs_env_blacklist'", ":", "'stringlist'", "}", "if", "(", "key", "not", "in", "non_string_params", ")", ":", "return", "six", ".", "text_type", "(", "val", ")", "else", ":", "expected", "=", "non_string_params", "[", "key", "]", "if", "(", "expected", "is", "bool", ")", ":", "return", "val", "elif", "(", "expected", "==", "'stringlist'", ")", ":", "if", "(", "not", "isinstance", "(", "val", ",", "(", "six", ".", "string_types", ",", "list", ")", ")", ")", ":", "val", "=", "six", ".", "text_type", "(", "val", ")", "if", "isinstance", "(", "val", ",", "six", ".", "string_types", ")", ":", "return", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "val", ".", "split", "(", "','", ")", "]", "return", "[", "six", ".", "text_type", "(", "x", ")", "for", "x", "in", "val", "]" ]
force params to be strings unless they should remain a different type .
train
false