id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
35,933
def detail_route(methods=None, **kwargs): methods = ([u'get'] if (methods is None) else methods) def decorator(func): func.bind_to_methods = methods func.detail = True func.kwargs = kwargs return func return decorator
[ "def", "detail_route", "(", "methods", "=", "None", ",", "**", "kwargs", ")", ":", "methods", "=", "(", "[", "u'get'", "]", "if", "(", "methods", "is", "None", ")", "else", "methods", ")", "def", "decorator", "(", "func", ")", ":", "func", ".", "bind_to_methods", "=", "methods", "func", ".", "detail", "=", "True", "func", ".", "kwargs", "=", "kwargs", "return", "func", "return", "decorator" ]
used to mark a method on a viewset that should be routed for detail requests .
train
true
35,934
def set_xattr(path, key, value): namespaced_key = _make_namespaced_xattr_key(key) xattr.setxattr(path, namespaced_key, str(value))
[ "def", "set_xattr", "(", "path", ",", "key", ",", "value", ")", ":", "namespaced_key", "=", "_make_namespaced_xattr_key", "(", "key", ")", "xattr", ".", "setxattr", "(", "path", ",", "namespaced_key", ",", "str", "(", "value", ")", ")" ]
set the value of a specified xattr .
train
false
35,936
@builtin(u'Swap the case of text (ignore tags)', swapcase, apply_func_to_html_text) def replace_swapcase_ignore_tags(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs): return apply_func_to_html_text(match, swapcase)
[ "@", "builtin", "(", "u'Swap the case of text (ignore tags)'", ",", "swapcase", ",", "apply_func_to_html_text", ")", "def", "replace_swapcase_ignore_tags", "(", "match", ",", "number", ",", "file_name", ",", "metadata", ",", "dictionaries", ",", "data", ",", "functions", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "apply_func_to_html_text", "(", "match", ",", "swapcase", ")" ]
swap the case of the matched text .
train
false
35,937
def CDL2CROWS(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDL2CROWS)
[ "def", "CDL2CROWS", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDL2CROWS", ")" ]
two crows .
train
false
35,938
def delete_asset(course_key, asset_key): try: content = contentstore().find(asset_key) except NotFoundError: raise AssetNotFoundException contentstore('trashcan').save(content) if (content.thumbnail_location is not None): thumbnail_location = course_key.make_asset_key('thumbnail', asset_key.name) try: thumbnail_content = contentstore().find(thumbnail_location) contentstore('trashcan').save(thumbnail_content) contentstore().delete(thumbnail_content.get_id()) del_cached_content(thumbnail_location) except Exception: logging.warning('Could not delete thumbnail: %s', thumbnail_location) contentstore().delete(content.get_id()) del_cached_content(content.location)
[ "def", "delete_asset", "(", "course_key", ",", "asset_key", ")", ":", "try", ":", "content", "=", "contentstore", "(", ")", ".", "find", "(", "asset_key", ")", "except", "NotFoundError", ":", "raise", "AssetNotFoundException", "contentstore", "(", "'trashcan'", ")", ".", "save", "(", "content", ")", "if", "(", "content", ".", "thumbnail_location", "is", "not", "None", ")", ":", "thumbnail_location", "=", "course_key", ".", "make_asset_key", "(", "'thumbnail'", ",", "asset_key", ".", "name", ")", "try", ":", "thumbnail_content", "=", "contentstore", "(", ")", ".", "find", "(", "thumbnail_location", ")", "contentstore", "(", "'trashcan'", ")", ".", "save", "(", "thumbnail_content", ")", "contentstore", "(", ")", ".", "delete", "(", "thumbnail_content", ".", "get_id", "(", ")", ")", "del_cached_content", "(", "thumbnail_location", ")", "except", "Exception", ":", "logging", ".", "warning", "(", "'Could not delete thumbnail: %s'", ",", "thumbnail_location", ")", "contentstore", "(", ")", ".", "delete", "(", "content", ".", "get_id", "(", ")", ")", "del_cached_content", "(", "content", ".", "location", ")" ]
deletes asset represented by given asset_key in the course represented by given course_key .
train
false
35,941
def test_split_pootle_path(): assert (split_pootle_path('') == (None, None, '', '')) assert (split_pootle_path('/projects/') == (None, None, '', '')) assert (split_pootle_path('/projects/tutorial/') == (None, 'tutorial', '', '')) assert (split_pootle_path('/pt/tutorial/tutorial.po') == ('pt', 'tutorial', '', 'tutorial.po')) assert (split_pootle_path('/pt/tutorial/foo/tutorial.po') == ('pt', 'tutorial', 'foo/', 'tutorial.po'))
[ "def", "test_split_pootle_path", "(", ")", ":", "assert", "(", "split_pootle_path", "(", "''", ")", "==", "(", "None", ",", "None", ",", "''", ",", "''", ")", ")", "assert", "(", "split_pootle_path", "(", "'/projects/'", ")", "==", "(", "None", ",", "None", ",", "''", ",", "''", ")", ")", "assert", "(", "split_pootle_path", "(", "'/projects/tutorial/'", ")", "==", "(", "None", ",", "'tutorial'", ",", "''", ",", "''", ")", ")", "assert", "(", "split_pootle_path", "(", "'/pt/tutorial/tutorial.po'", ")", "==", "(", "'pt'", ",", "'tutorial'", ",", "''", ",", "'tutorial.po'", ")", ")", "assert", "(", "split_pootle_path", "(", "'/pt/tutorial/foo/tutorial.po'", ")", "==", "(", "'pt'", ",", "'tutorial'", ",", "'foo/'", ",", "'tutorial.po'", ")", ")" ]
tests pootle path are properly split .
train
false
35,942
def sort_by_start_date(courses): courses = sorted(courses, key=(lambda course: (course.has_ended(), (course.start is None), course.start)), reverse=False) return courses
[ "def", "sort_by_start_date", "(", "courses", ")", ":", "courses", "=", "sorted", "(", "courses", ",", "key", "=", "(", "lambda", "course", ":", "(", "course", ".", "has_ended", "(", ")", ",", "(", "course", ".", "start", "is", "None", ")", ",", "course", ".", "start", ")", ")", ",", "reverse", "=", "False", ")", "return", "courses" ]
returns a list of courses sorted by their start date .
train
false
35,944
def commit(): connection._commit() set_clean()
[ "def", "commit", "(", ")", ":", "connection", ".", "_commit", "(", ")", "set_clean", "(", ")" ]
commit a container container container id repository repository/image to commit to tag tag of the image message commit message author author name conf conf cli example: .
train
false
35,945
def getTetragridCopy(tetragrid): if (tetragrid == None): return None tetragridCopy = [] for tetragridRow in tetragrid: tetragridCopy.append(tetragridRow[:]) return tetragridCopy
[ "def", "getTetragridCopy", "(", "tetragrid", ")", ":", "if", "(", "tetragrid", "==", "None", ")", ":", "return", "None", "tetragridCopy", "=", "[", "]", "for", "tetragridRow", "in", "tetragrid", ":", "tetragridCopy", ".", "append", "(", "tetragridRow", "[", ":", "]", ")", "return", "tetragridCopy" ]
get tetragrid copy .
train
false
35,946
def iddp_aid(eps, A): A = np.asfortranarray(A) (m, n) = A.shape (n2, w) = idd_frmi(m) proj = np.empty((((n * ((2 * n2) + 1)) + n2) + 1), order='F') (k, idx, proj) = _id.iddp_aid(eps, A, w, proj) proj = proj[:(k * (n - k))].reshape((k, (n - k)), order='F') return (k, idx, proj)
[ "def", "iddp_aid", "(", "eps", ",", "A", ")", ":", "A", "=", "np", ".", "asfortranarray", "(", "A", ")", "(", "m", ",", "n", ")", "=", "A", ".", "shape", "(", "n2", ",", "w", ")", "=", "idd_frmi", "(", "m", ")", "proj", "=", "np", ".", "empty", "(", "(", "(", "(", "n", "*", "(", "(", "2", "*", "n2", ")", "+", "1", ")", ")", "+", "n2", ")", "+", "1", ")", ",", "order", "=", "'F'", ")", "(", "k", ",", "idx", ",", "proj", ")", "=", "_id", ".", "iddp_aid", "(", "eps", ",", "A", ",", "w", ",", "proj", ")", "proj", "=", "proj", "[", ":", "(", "k", "*", "(", "n", "-", "k", ")", ")", "]", ".", "reshape", "(", "(", "k", ",", "(", "n", "-", "k", ")", ")", ",", "order", "=", "'F'", ")", "return", "(", "k", ",", "idx", ",", "proj", ")" ]
compute id of a real matrix to a specified relative precision using random sampling .
train
false
35,948
def squaring_cycle_graph_old(n, b=10): G = DiGraph() for k in range(1, (n + 1)): k1 = k G.add_node(k1) knext = powersum(k1, 2, b) G.add_edge(k1, knext) while (k1 != knext): k1 = knext knext = powersum(k1, 2, b) G.add_edge(k1, knext) if (G.out_degree(knext) >= 1): break return G
[ "def", "squaring_cycle_graph_old", "(", "n", ",", "b", "=", "10", ")", ":", "G", "=", "DiGraph", "(", ")", "for", "k", "in", "range", "(", "1", ",", "(", "n", "+", "1", ")", ")", ":", "k1", "=", "k", "G", ".", "add_node", "(", "k1", ")", "knext", "=", "powersum", "(", "k1", ",", "2", ",", "b", ")", "G", ".", "add_edge", "(", "k1", ",", "knext", ")", "while", "(", "k1", "!=", "knext", ")", ":", "k1", "=", "knext", "knext", "=", "powersum", "(", "k1", ",", "2", ",", "b", ")", "G", ".", "add_edge", "(", "k1", ",", "knext", ")", "if", "(", "G", ".", "out_degree", "(", "knext", ")", ">=", "1", ")", ":", "break", "return", "G" ]
return digraph of iterations of powersum .
train
false
35,949
def assert_list(obj, expected_type=string_types, can_be_none=True, default=(), key_arg=None, allowable=(list, Fileset, OrderedSet, set, tuple), raise_type=ValueError): def get_key_msg(key=None): if (key is None): return u'' else: return u"In key '{}': ".format(key) allowable = tuple(allowable) key_msg = get_key_msg(key_arg) val = obj if (val is None): if can_be_none: val = list(default) else: raise raise_type(u'{}Expected an object of acceptable type {}, received None and can_be_none is False'.format(key_msg, allowable)) if isinstance(val, allowable): lst = list(val) for e in lst: if (not isinstance(e, expected_type)): raise raise_type(u'{}Expected a list containing values of type {}, instead got a value {} of {}'.format(key_msg, expected_type, e, e.__class__)) return lst else: raise raise_type(u'{}Expected an object of acceptable type {}, received {} instead'.format(key_msg, allowable, val))
[ "def", "assert_list", "(", "obj", ",", "expected_type", "=", "string_types", ",", "can_be_none", "=", "True", ",", "default", "=", "(", ")", ",", "key_arg", "=", "None", ",", "allowable", "=", "(", "list", ",", "Fileset", ",", "OrderedSet", ",", "set", ",", "tuple", ")", ",", "raise_type", "=", "ValueError", ")", ":", "def", "get_key_msg", "(", "key", "=", "None", ")", ":", "if", "(", "key", "is", "None", ")", ":", "return", "u''", "else", ":", "return", "u\"In key '{}': \"", ".", "format", "(", "key", ")", "allowable", "=", "tuple", "(", "allowable", ")", "key_msg", "=", "get_key_msg", "(", "key_arg", ")", "val", "=", "obj", "if", "(", "val", "is", "None", ")", ":", "if", "can_be_none", ":", "val", "=", "list", "(", "default", ")", "else", ":", "raise", "raise_type", "(", "u'{}Expected an object of acceptable type {}, received None and can_be_none is False'", ".", "format", "(", "key_msg", ",", "allowable", ")", ")", "if", "isinstance", "(", "val", ",", "allowable", ")", ":", "lst", "=", "list", "(", "val", ")", "for", "e", "in", "lst", ":", "if", "(", "not", "isinstance", "(", "e", ",", "expected_type", ")", ")", ":", "raise", "raise_type", "(", "u'{}Expected a list containing values of type {}, instead got a value {} of {}'", ".", "format", "(", "key_msg", ",", "expected_type", ",", "e", ",", "e", ".", "__class__", ")", ")", "return", "lst", "else", ":", "raise", "raise_type", "(", "u'{}Expected an object of acceptable type {}, received {} instead'", ".", "format", "(", "key_msg", ",", "allowable", ",", "val", ")", ")" ]
this function is used to ensure that parameters set by users in build files are of acceptable types .
train
true
35,950
def catalog_nodes(consul_url=None, **kwargs): ret = {} query_params = {} if (not consul_url): consul_url = _get_config() if (not consul_url): log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret if ('dc' in kwargs): query_params['dc'] = kwargs['dc'] function = 'catalog/nodes' ret = _query(consul_url=consul_url, function=function, query_params=query_params) return ret
[ "def", "catalog_nodes", "(", "consul_url", "=", "None", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "}", "query_params", "=", "{", "}", "if", "(", "not", "consul_url", ")", ":", "consul_url", "=", "_get_config", "(", ")", "if", "(", "not", "consul_url", ")", ":", "log", ".", "error", "(", "'No Consul URL found.'", ")", "ret", "[", "'message'", "]", "=", "'No Consul URL found.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "(", "'dc'", "in", "kwargs", ")", ":", "query_params", "[", "'dc'", "]", "=", "kwargs", "[", "'dc'", "]", "function", "=", "'catalog/nodes'", "ret", "=", "_query", "(", "consul_url", "=", "consul_url", ",", "function", "=", "function", ",", "query_params", "=", "query_params", ")", "return", "ret" ]
return list of available nodes from catalog .
train
true
35,951
def parse_and_instantiate(parent_to, text, indent): (all_tokens, seen_ts) = tokenize_snippet_text(parent_to, text, indent, __ALLOWED_TOKENS, __ALLOWED_TOKENS_IN_TABSTOPS, _TOKEN_TO_TEXTOBJECT) resolve_ambiguity(all_tokens, seen_ts) finalize(all_tokens, seen_ts, parent_to)
[ "def", "parse_and_instantiate", "(", "parent_to", ",", "text", ",", "indent", ")", ":", "(", "all_tokens", ",", "seen_ts", ")", "=", "tokenize_snippet_text", "(", "parent_to", ",", "text", ",", "indent", ",", "__ALLOWED_TOKENS", ",", "__ALLOWED_TOKENS_IN_TABSTOPS", ",", "_TOKEN_TO_TEXTOBJECT", ")", "resolve_ambiguity", "(", "all_tokens", ",", "seen_ts", ")", "finalize", "(", "all_tokens", ",", "seen_ts", ",", "parent_to", ")" ]
parses a snippet definition in snipmate format from text assuming the current indent .
train
false
35,952
def binary_fill_holes(input, structure=None, output=None, origin=0): mask = numpy.logical_not(input) tmp = numpy.zeros(mask.shape, bool) inplace = isinstance(output, numpy.ndarray) if inplace: binary_dilation(tmp, structure, (-1), mask, output, 1, origin) numpy.logical_not(output, output) else: output = binary_dilation(tmp, structure, (-1), mask, None, 1, origin) numpy.logical_not(output, output) return output
[ "def", "binary_fill_holes", "(", "input", ",", "structure", "=", "None", ",", "output", "=", "None", ",", "origin", "=", "0", ")", ":", "mask", "=", "numpy", ".", "logical_not", "(", "input", ")", "tmp", "=", "numpy", ".", "zeros", "(", "mask", ".", "shape", ",", "bool", ")", "inplace", "=", "isinstance", "(", "output", ",", "numpy", ".", "ndarray", ")", "if", "inplace", ":", "binary_dilation", "(", "tmp", ",", "structure", ",", "(", "-", "1", ")", ",", "mask", ",", "output", ",", "1", ",", "origin", ")", "numpy", ".", "logical_not", "(", "output", ",", "output", ")", "else", ":", "output", "=", "binary_dilation", "(", "tmp", ",", "structure", ",", "(", "-", "1", ")", ",", "mask", ",", "None", ",", "1", ",", "origin", ")", "numpy", ".", "logical_not", "(", "output", ",", "output", ")", "return", "output" ]
fill the holes in binary objects .
train
false
35,953
def foreign_key_columns(model): mapper = sqlalchemy_inspect(model) return [c for c in mapper.columns if c.foreign_keys]
[ "def", "foreign_key_columns", "(", "model", ")", ":", "mapper", "=", "sqlalchemy_inspect", "(", "model", ")", "return", "[", "c", "for", "c", "in", "mapper", ".", "columns", "if", "c", ".", "foreign_keys", "]" ]
returns a list of the :class:sqlalchemy .
train
false
35,954
def _read_footer(file_obj): footer_size = _get_footer_size(file_obj) if logger.isEnabledFor(logging.DEBUG): logger.debug(u'Footer size in bytes: %s', footer_size) file_obj.seek((- (8 + footer_size)), 2) tin = TFileTransport(file_obj) pin = TCompactProtocolFactory().get_protocol(tin) fmd = parquet_thrift.FileMetaData() fmd.read(pin) return fmd
[ "def", "_read_footer", "(", "file_obj", ")", ":", "footer_size", "=", "_get_footer_size", "(", "file_obj", ")", "if", "logger", ".", "isEnabledFor", "(", "logging", ".", "DEBUG", ")", ":", "logger", ".", "debug", "(", "u'Footer size in bytes: %s'", ",", "footer_size", ")", "file_obj", ".", "seek", "(", "(", "-", "(", "8", "+", "footer_size", ")", ")", ",", "2", ")", "tin", "=", "TFileTransport", "(", "file_obj", ")", "pin", "=", "TCompactProtocolFactory", "(", ")", ".", "get_protocol", "(", "tin", ")", "fmd", "=", "parquet_thrift", ".", "FileMetaData", "(", ")", "fmd", ".", "read", "(", "pin", ")", "return", "fmd" ]
read the footer from the given file object and returns a filemetadata object .
train
true
35,955
def organisation_needs(): def prep(r): if r.interactive: if (r.method == 'create'): organisation_id = get_vars.get('~.(organisation)', None) if organisation_id: field = s3db.req_organisation_needs.organisation_id field.default = organisation_id field.readable = False field.writable = False elif (r.method == 'update'): field = s3db.req_organisation_needs.organisation_id field.writable = False field.comment = None return True s3.prep = prep return s3_rest_controller()
[ "def", "organisation_needs", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "r", ".", "interactive", ":", "if", "(", "r", ".", "method", "==", "'create'", ")", ":", "organisation_id", "=", "get_vars", ".", "get", "(", "'~.(organisation)'", ",", "None", ")", "if", "organisation_id", ":", "field", "=", "s3db", ".", "req_organisation_needs", ".", "organisation_id", "field", ".", "default", "=", "organisation_id", "field", ".", "readable", "=", "False", "field", ".", "writable", "=", "False", "elif", "(", "r", ".", "method", "==", "'update'", ")", ":", "field", "=", "s3db", ".", "req_organisation_needs", ".", "organisation_id", "field", ".", "writable", "=", "False", "field", ".", "comment", "=", "None", "return", "True", "s3", ".", "prep", "=", "prep", "return", "s3_rest_controller", "(", ")" ]
restful crud controller for organisation needs .
train
false
35,956
def test_default_task_loading(): (docs, tasks) = load_fabfile(fabfile('default_tasks')) ok_(isinstance(crawl('mymodule', tasks), Task))
[ "def", "test_default_task_loading", "(", ")", ":", "(", "docs", ",", "tasks", ")", "=", "load_fabfile", "(", "fabfile", "(", "'default_tasks'", ")", ")", "ok_", "(", "isinstance", "(", "crawl", "(", "'mymodule'", ",", "tasks", ")", ",", "Task", ")", ")" ]
crawl() should return default tasks where found .
train
false
35,957
def oneTransportTest(testMethod): @wraps(testMethod) def actualTestMethod(builder): other = ConnectableProtocol() class ServerProtocol(ConnectableProtocol, ): def connectionMade(self): try: testMethod(builder, self.reactor, self.transport) finally: if (self.transport is not None): self.transport.loseConnection() if (other.transport is not None): other.transport.loseConnection() serverProtocol = ServerProtocol() runProtocolsWithReactor(builder, serverProtocol, other, TCPCreator()) return actualTestMethod
[ "def", "oneTransportTest", "(", "testMethod", ")", ":", "@", "wraps", "(", "testMethod", ")", "def", "actualTestMethod", "(", "builder", ")", ":", "other", "=", "ConnectableProtocol", "(", ")", "class", "ServerProtocol", "(", "ConnectableProtocol", ",", ")", ":", "def", "connectionMade", "(", "self", ")", ":", "try", ":", "testMethod", "(", "builder", ",", "self", ".", "reactor", ",", "self", ".", "transport", ")", "finally", ":", "if", "(", "self", ".", "transport", "is", "not", "None", ")", ":", "self", ".", "transport", ".", "loseConnection", "(", ")", "if", "(", "other", ".", "transport", "is", "not", "None", ")", ":", "other", ".", "transport", ".", "loseConnection", "(", ")", "serverProtocol", "=", "ServerProtocol", "(", ")", "runProtocolsWithReactor", "(", "builder", ",", "serverProtocol", ",", "other", ",", "TCPCreator", "(", ")", ")", "return", "actualTestMethod" ]
decorate a l{reactorbuilder} test function which tests one reactor and one connected transport .
train
false
35,958
@register.filter def volsort(l): if (not l): return for x in l: if x.get('class_'): (yield x) for x in l: if (not x.get('class_')): (yield x)
[ "@", "register", ".", "filter", "def", "volsort", "(", "l", ")", ":", "if", "(", "not", "l", ")", ":", "return", "for", "x", "in", "l", ":", "if", "x", ".", "get", "(", "'class_'", ")", ":", "(", "yield", "x", ")", "for", "x", "in", "l", ":", "if", "(", "not", "x", ".", "get", "(", "'class_'", ")", ")", ":", "(", "yield", "x", ")" ]
sort baselined volatility results .
train
false
35,960
def get_remote_catalog_db(dbname, cache=True, verbose=True): return VOSDatabase.from_json(urllib.parse.urljoin(vo_conf.vos_baseurl, (dbname + u'.json')), encoding=u'utf8', cache=cache, show_progress=verbose)
[ "def", "get_remote_catalog_db", "(", "dbname", ",", "cache", "=", "True", ",", "verbose", "=", "True", ")", ":", "return", "VOSDatabase", ".", "from_json", "(", "urllib", ".", "parse", ".", "urljoin", "(", "vo_conf", ".", "vos_baseurl", ",", "(", "dbname", "+", "u'.json'", ")", ")", ",", "encoding", "=", "u'utf8'", ",", "cache", "=", "cache", ",", "show_progress", "=", "verbose", ")" ]
get a database of vo services from a remote location .
train
false
35,961
def setup_light(device_id, name, insteonhub, hass, add_devices_callback): if (device_id in _CONFIGURING): request_id = _CONFIGURING.pop(device_id) configurator = get_component('configurator') configurator.request_done(request_id) _LOGGER.info('Device configuration done!') conf_lights = config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF)) if (device_id not in conf_lights): conf_lights[device_id] = name if (not config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF), conf_lights)): _LOGGER.error('Failed to save configuration file') device = insteonhub.dimmer(device_id) add_devices_callback([InsteonLocalDimmerDevice(device, name)])
[ "def", "setup_light", "(", "device_id", ",", "name", ",", "insteonhub", ",", "hass", ",", "add_devices_callback", ")", ":", "if", "(", "device_id", "in", "_CONFIGURING", ")", ":", "request_id", "=", "_CONFIGURING", ".", "pop", "(", "device_id", ")", "configurator", "=", "get_component", "(", "'configurator'", ")", "configurator", ".", "request_done", "(", "request_id", ")", "_LOGGER", ".", "info", "(", "'Device configuration done!'", ")", "conf_lights", "=", "config_from_file", "(", "hass", ".", "config", ".", "path", "(", "INSTEON_LOCAL_LIGHTS_CONF", ")", ")", "if", "(", "device_id", "not", "in", "conf_lights", ")", ":", "conf_lights", "[", "device_id", "]", "=", "name", "if", "(", "not", "config_from_file", "(", "hass", ".", "config", ".", "path", "(", "INSTEON_LOCAL_LIGHTS_CONF", ")", ",", "conf_lights", ")", ")", ":", "_LOGGER", ".", "error", "(", "'Failed to save configuration file'", ")", "device", "=", "insteonhub", ".", "dimmer", "(", "device_id", ")", "add_devices_callback", "(", "[", "InsteonLocalDimmerDevice", "(", "device", ",", "name", ")", "]", ")" ]
set up the light .
train
false
35,962
def list_profiles_in(path): files = os.listdir(path) profiles = [] for f in files: try: full_path = os.path.join(path, f) except UnicodeError: continue if (os.path.isdir(full_path) and f.startswith('profile_')): profiles.append(f.split('_', 1)[(-1)]) return profiles
[ "def", "list_profiles_in", "(", "path", ")", ":", "files", "=", "os", ".", "listdir", "(", "path", ")", "profiles", "=", "[", "]", "for", "f", "in", "files", ":", "try", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "f", ")", "except", "UnicodeError", ":", "continue", "if", "(", "os", ".", "path", ".", "isdir", "(", "full_path", ")", "and", "f", ".", "startswith", "(", "'profile_'", ")", ")", ":", "profiles", ".", "append", "(", "f", ".", "split", "(", "'_'", ",", "1", ")", "[", "(", "-", "1", ")", "]", ")", "return", "profiles" ]
list profiles in a given root directory .
train
true
35,963
def quotesplit(line): l = [] try: for i in _quotesplit(line): l.append(i) except QuoteError: pass return l
[ "def", "quotesplit", "(", "line", ")", ":", "l", "=", "[", "]", "try", ":", "for", "i", "in", "_quotesplit", "(", "line", ")", ":", "l", ".", "append", "(", "i", ")", "except", "QuoteError", ":", "pass", "return", "l" ]
split line into a list of offset .
train
false
35,964
def document_wait_method(section, waiter_name, event_emitter, service_model, service_waiter_model, include_signature=True): waiter_model = service_waiter_model.get_waiter(waiter_name) operation_model = service_model.operation_model(waiter_model.operation) wait_description = 'Polls :py:meth:`{0}.Client.{1}` every {2} seconds until a successful state is reached. An error is returned after {3} failed checks.'.format(get_service_module_name(service_model), xform_name(waiter_model.operation), waiter_model.delay, waiter_model.max_attempts) document_model_driven_method(section, 'wait', operation_model, event_emitter=event_emitter, method_description=wait_description, example_prefix='waiter.wait', document_output=False, include_signature=include_signature)
[ "def", "document_wait_method", "(", "section", ",", "waiter_name", ",", "event_emitter", ",", "service_model", ",", "service_waiter_model", ",", "include_signature", "=", "True", ")", ":", "waiter_model", "=", "service_waiter_model", ".", "get_waiter", "(", "waiter_name", ")", "operation_model", "=", "service_model", ".", "operation_model", "(", "waiter_model", ".", "operation", ")", "wait_description", "=", "'Polls :py:meth:`{0}.Client.{1}` every {2} seconds until a successful state is reached. An error is returned after {3} failed checks.'", ".", "format", "(", "get_service_module_name", "(", "service_model", ")", ",", "xform_name", "(", "waiter_model", ".", "operation", ")", ",", "waiter_model", ".", "delay", ",", "waiter_model", ".", "max_attempts", ")", "document_model_driven_method", "(", "section", ",", "'wait'", ",", "operation_model", ",", "event_emitter", "=", "event_emitter", ",", "method_description", "=", "wait_description", ",", "example_prefix", "=", "'waiter.wait'", ",", "document_output", "=", "False", ",", "include_signature", "=", "include_signature", ")" ]
documents a the wait method of a waiter .
train
false
35,965
def _rules_config(rules): if (rules is None): rules = DEFAULT_RULES lines = [RULES_HEADER] for entry in rules: entry.setdefault('proto', 'tcp') entry.setdefault('dest_port', '-') entry.setdefault('source_port', '-') entry.setdefault('original_dest', '-') entry.setdefault('rate_limit', '-') entry.setdefault('user', '-') entry.setdefault('mark', '-') entry.setdefault('conn_limit', '-') entry.setdefault('time', '-') if isinstance(entry['dest_port'], list): entry['dest_port'] = ','.join(map(str, entry['dest_port'])) if isinstance(entry['source_port'], list): entry['source_port'] = ','.join(map(str, entry['source_port'])) lines.append((RULES_FORMAT % entry)) file('/etc/shorewall/rules', contents=''.join(lines), use_sudo=True)
[ "def", "_rules_config", "(", "rules", ")", ":", "if", "(", "rules", "is", "None", ")", ":", "rules", "=", "DEFAULT_RULES", "lines", "=", "[", "RULES_HEADER", "]", "for", "entry", "in", "rules", ":", "entry", ".", "setdefault", "(", "'proto'", ",", "'tcp'", ")", "entry", ".", "setdefault", "(", "'dest_port'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'source_port'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'original_dest'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'rate_limit'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'user'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'mark'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'conn_limit'", ",", "'-'", ")", "entry", ".", "setdefault", "(", "'time'", ",", "'-'", ")", "if", "isinstance", "(", "entry", "[", "'dest_port'", "]", ",", "list", ")", ":", "entry", "[", "'dest_port'", "]", "=", "','", ".", "join", "(", "map", "(", "str", ",", "entry", "[", "'dest_port'", "]", ")", ")", "if", "isinstance", "(", "entry", "[", "'source_port'", "]", ",", "list", ")", ":", "entry", "[", "'source_port'", "]", "=", "','", ".", "join", "(", "map", "(", "str", ",", "entry", "[", "'source_port'", "]", ")", ")", "lines", ".", "append", "(", "(", "RULES_FORMAT", "%", "entry", ")", ")", "file", "(", "'/etc/shorewall/rules'", ",", "contents", "=", "''", ".", "join", "(", "lines", ")", ",", "use_sudo", "=", "True", ")" ]
policy configuration .
train
false
35,967
def find_preprint_provider(node): try: preprint = PreprintService.find_one(Q('node', 'eq', node._id)) provider = preprint.provider if (provider._id == 'osf'): return ('osf', provider.name) else: return ('branded', provider.name) except Exception: return (None, None)
[ "def", "find_preprint_provider", "(", "node", ")", ":", "try", ":", "preprint", "=", "PreprintService", ".", "find_one", "(", "Q", "(", "'node'", ",", "'eq'", ",", "node", ".", "_id", ")", ")", "provider", "=", "preprint", ".", "provider", "if", "(", "provider", ".", "_id", "==", "'osf'", ")", ":", "return", "(", "'osf'", ",", "provider", ".", "name", ")", "else", ":", "return", "(", "'branded'", ",", "provider", ".", "name", ")", "except", "Exception", ":", "return", "(", "None", ",", "None", ")" ]
given a node .
train
false
35,968
def _configuration_model(deg_sequence, create_using, directed=False, in_deg_sequence=None, seed=None): if (seed is not None): random.seed(seed) n = len(deg_sequence) G = nx.empty_graph(n, create_using=create_using) if (n == 0): return G if directed: pairs = zip_longest(deg_sequence, in_deg_sequence, fillvalue=0) (out_deg, in_deg) = zip(*pairs) out_stublist = _to_stublist(out_deg) in_stublist = _to_stublist(in_deg) random.shuffle(out_stublist) random.shuffle(in_stublist) else: stublist = _to_stublist(deg_sequence) n = len(stublist) half = (n // 2) random.shuffle(stublist) (out_stublist, in_stublist) = (stublist[:half], stublist[half:]) G.add_edges_from(zip(out_stublist, in_stublist)) return G
[ "def", "_configuration_model", "(", "deg_sequence", ",", "create_using", ",", "directed", "=", "False", ",", "in_deg_sequence", "=", "None", ",", "seed", "=", "None", ")", ":", "if", "(", "seed", "is", "not", "None", ")", ":", "random", ".", "seed", "(", "seed", ")", "n", "=", "len", "(", "deg_sequence", ")", "G", "=", "nx", ".", "empty_graph", "(", "n", ",", "create_using", "=", "create_using", ")", "if", "(", "n", "==", "0", ")", ":", "return", "G", "if", "directed", ":", "pairs", "=", "zip_longest", "(", "deg_sequence", ",", "in_deg_sequence", ",", "fillvalue", "=", "0", ")", "(", "out_deg", ",", "in_deg", ")", "=", "zip", "(", "*", "pairs", ")", "out_stublist", "=", "_to_stublist", "(", "out_deg", ")", "in_stublist", "=", "_to_stublist", "(", "in_deg", ")", "random", ".", "shuffle", "(", "out_stublist", ")", "random", ".", "shuffle", "(", "in_stublist", ")", "else", ":", "stublist", "=", "_to_stublist", "(", "deg_sequence", ")", "n", "=", "len", "(", "stublist", ")", "half", "=", "(", "n", "//", "2", ")", "random", ".", "shuffle", "(", "stublist", ")", "(", "out_stublist", ",", "in_stublist", ")", "=", "(", "stublist", "[", ":", "half", "]", ",", "stublist", "[", "half", ":", "]", ")", "G", ".", "add_edges_from", "(", "zip", "(", "out_stublist", ",", "in_stublist", ")", ")", "return", "G" ]
helper function for generating either undirected or directed configuration model graphs .
train
false
35,972
def _server_maintenance(): global EVENNIA, _MAINTENANCE_COUNT, _FLUSH_CACHE, _GAMETIME_MODULE if (not _FLUSH_CACHE): from evennia.utils.idmapper.models import conditional_flush as _FLUSH_CACHE if (not _GAMETIME_MODULE): from evennia.utils import gametime as _GAMETIME_MODULE _MAINTENANCE_COUNT += 1 now = time.time() if (_MAINTENANCE_COUNT == 1): _GAMETIME_MODULE.SERVER_START_TIME = now _GAMETIME_MODULE.SERVER_RUNTIME = ServerConfig.objects.conf('runtime', default=0.0) else: _GAMETIME_MODULE.SERVER_RUNTIME += 60.0 _GAMETIME_MODULE.SERVER_RUNTIME_LAST_UPDATED = now ServerConfig.objects.conf('runtime', _GAMETIME_MODULE.SERVER_RUNTIME) if ((_MAINTENANCE_COUNT % 300) == 0): _FLUSH_CACHE(_IDMAPPER_CACHE_MAXSIZE) if ((_MAINTENANCE_COUNT % 3600) == 0): evennia.ScriptDB.objects.validate() if ((_MAINTENANCE_COUNT % 3700) == 0): evennia.CHANNEL_HANDLER.update()
[ "def", "_server_maintenance", "(", ")", ":", "global", "EVENNIA", ",", "_MAINTENANCE_COUNT", ",", "_FLUSH_CACHE", ",", "_GAMETIME_MODULE", "if", "(", "not", "_FLUSH_CACHE", ")", ":", "from", "evennia", ".", "utils", ".", "idmapper", ".", "models", "import", "conditional_flush", "as", "_FLUSH_CACHE", "if", "(", "not", "_GAMETIME_MODULE", ")", ":", "from", "evennia", ".", "utils", "import", "gametime", "as", "_GAMETIME_MODULE", "_MAINTENANCE_COUNT", "+=", "1", "now", "=", "time", ".", "time", "(", ")", "if", "(", "_MAINTENANCE_COUNT", "==", "1", ")", ":", "_GAMETIME_MODULE", ".", "SERVER_START_TIME", "=", "now", "_GAMETIME_MODULE", ".", "SERVER_RUNTIME", "=", "ServerConfig", ".", "objects", ".", "conf", "(", "'runtime'", ",", "default", "=", "0.0", ")", "else", ":", "_GAMETIME_MODULE", ".", "SERVER_RUNTIME", "+=", "60.0", "_GAMETIME_MODULE", ".", "SERVER_RUNTIME_LAST_UPDATED", "=", "now", "ServerConfig", ".", "objects", ".", "conf", "(", "'runtime'", ",", "_GAMETIME_MODULE", ".", "SERVER_RUNTIME", ")", "if", "(", "(", "_MAINTENANCE_COUNT", "%", "300", ")", "==", "0", ")", ":", "_FLUSH_CACHE", "(", "_IDMAPPER_CACHE_MAXSIZE", ")", "if", "(", "(", "_MAINTENANCE_COUNT", "%", "3600", ")", "==", "0", ")", ":", "evennia", ".", "ScriptDB", ".", "objects", ".", "validate", "(", ")", "if", "(", "(", "_MAINTENANCE_COUNT", "%", "3700", ")", "==", "0", ")", ":", "evennia", ".", "CHANNEL_HANDLER", ".", "update", "(", ")" ]
this maintenance function handles repeated checks and updates that the server needs to do .
train
false
35,973
def location_tag(): return s3_rest_controller()
[ "def", "location_tag", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
35,974
def open_if_exists(filename, mode='rb'): try: return open(filename, mode) except IOError as e: if (e.errno not in (errno.ENOENT, errno.EISDIR)): raise
[ "def", "open_if_exists", "(", "filename", ",", "mode", "=", "'rb'", ")", ":", "try", ":", "return", "open", "(", "filename", ",", "mode", ")", "except", "IOError", "as", "e", ":", "if", "(", "e", ".", "errno", "not", "in", "(", "errno", ".", "ENOENT", ",", "errno", ".", "EISDIR", ")", ")", ":", "raise" ]
returns a file descriptor for the filename if that file exists .
train
true
35,975
def _next_regular(target): if (target <= 6): return target if (not (target & (target - 1))): return target match = float('inf') p5 = 1 while (p5 < target): p35 = p5 while (p35 < target): quotient = (- ((- target) // p35)) try: p2 = (2 ** (quotient - 1).bit_length()) except AttributeError: p2 = (2 ** _bit_length_26((quotient - 1))) N = (p2 * p35) if (N == target): return N elif (N < match): match = N p35 *= 3 if (p35 == target): return p35 if (p35 < match): match = p35 p5 *= 5 if (p5 == target): return p5 if (p5 < match): match = p5 return match
[ "def", "_next_regular", "(", "target", ")", ":", "if", "(", "target", "<=", "6", ")", ":", "return", "target", "if", "(", "not", "(", "target", "&", "(", "target", "-", "1", ")", ")", ")", ":", "return", "target", "match", "=", "float", "(", "'inf'", ")", "p5", "=", "1", "while", "(", "p5", "<", "target", ")", ":", "p35", "=", "p5", "while", "(", "p35", "<", "target", ")", ":", "quotient", "=", "(", "-", "(", "(", "-", "target", ")", "//", "p35", ")", ")", "try", ":", "p2", "=", "(", "2", "**", "(", "quotient", "-", "1", ")", ".", "bit_length", "(", ")", ")", "except", "AttributeError", ":", "p2", "=", "(", "2", "**", "_bit_length_26", "(", "(", "quotient", "-", "1", ")", ")", ")", "N", "=", "(", "p2", "*", "p35", ")", "if", "(", "N", "==", "target", ")", ":", "return", "N", "elif", "(", "N", "<", "match", ")", ":", "match", "=", "N", "p35", "*=", "3", "if", "(", "p35", "==", "target", ")", ":", "return", "p35", "if", "(", "p35", "<", "match", ")", ":", "match", "=", "p35", "p5", "*=", "5", "if", "(", "p5", "==", "target", ")", ":", "return", "p5", "if", "(", "p5", "<", "match", ")", ":", "match", "=", "p5", "return", "match" ]
find the next regular number greater than or equal to target .
train
true
35,976
def cbAuthentication(result, proto): return proto.list('', '*').addCallback(cbMailboxList, proto)
[ "def", "cbAuthentication", "(", "result", ",", "proto", ")", ":", "return", "proto", ".", "list", "(", "''", ",", "'*'", ")", ".", "addCallback", "(", "cbMailboxList", ",", "proto", ")" ]
callback after authentication has succeeded .
train
false
35,977
def subscription(): output = s3_rest_controller() return output
[ "def", "subscription", "(", ")", ":", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
restful crud controller .
train
false
35,979
def set_urlconf(urlconf_name): thread = currentThread() if urlconf_name: _urlconfs[thread] = urlconf_name elif (thread in _urlconfs): del _urlconfs[thread]
[ "def", "set_urlconf", "(", "urlconf_name", ")", ":", "thread", "=", "currentThread", "(", ")", "if", "urlconf_name", ":", "_urlconfs", "[", "thread", "]", "=", "urlconf_name", "elif", "(", "thread", "in", "_urlconfs", ")", ":", "del", "_urlconfs", "[", "thread", "]" ]
sets the urlconf for the current thread .
train
false
35,980
def get_taxa_prevalence(tax_counts): tax_ratios = apply_along_axis((lambda x: (x / float(sum(x)))), 0, tax_counts) lineage_sums = apply_along_axis((lambda x: sum(x)), 1, tax_ratios) total_count = sum(lineage_sums) prevalence = (lineage_sums / float(total_count)) prevalence = ((prevalence - min(prevalence)) / (max(prevalence) - min(prevalence))) return prevalence
[ "def", "get_taxa_prevalence", "(", "tax_counts", ")", ":", "tax_ratios", "=", "apply_along_axis", "(", "(", "lambda", "x", ":", "(", "x", "/", "float", "(", "sum", "(", "x", ")", ")", ")", ")", ",", "0", ",", "tax_counts", ")", "lineage_sums", "=", "apply_along_axis", "(", "(", "lambda", "x", ":", "sum", "(", "x", ")", ")", ",", "1", ",", "tax_ratios", ")", "total_count", "=", "sum", "(", "lineage_sums", ")", "prevalence", "=", "(", "lineage_sums", "/", "float", "(", "total_count", ")", ")", "prevalence", "=", "(", "(", "prevalence", "-", "min", "(", "prevalence", ")", ")", "/", "(", "max", "(", "prevalence", ")", "-", "min", "(", "prevalence", ")", ")", ")", "return", "prevalence" ]
returns the each lineages portion of the total count takes an otu_table .
train
false
35,981
def cluster_plan(): cmd = __execute_cmd('riak-admin', 'cluster plan') if (cmd['retcode'] != 0): return False return True
[ "def", "cluster_plan", "(", ")", ":", "cmd", "=", "__execute_cmd", "(", "'riak-admin'", ",", "'cluster plan'", ")", "if", "(", "cmd", "[", "'retcode'", "]", "!=", "0", ")", ":", "return", "False", "return", "True" ]
review cluster plan .
train
false
35,982
def get_all_users(path_prefix='/', region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if (not conn): return None _users = conn.get_all_users(path_prefix=path_prefix) users = _users.list_users_response.list_users_result.users marker = getattr(_users.list_users_response.list_users_result, 'marker', None) while marker: _users = conn.get_all_users(path_prefix=path_prefix, marker=marker) users = (users + _users.list_users_response.list_users_result.users) marker = getattr(_users.list_users_response.list_users_result, 'marker', None) return users
[ "def", "get_all_users", "(", "path_prefix", "=", "'/'", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "conn", ")", ":", "return", "None", "_users", "=", "conn", ".", "get_all_users", "(", "path_prefix", "=", "path_prefix", ")", "users", "=", "_users", ".", "list_users_response", ".", "list_users_result", ".", "users", "marker", "=", "getattr", "(", "_users", ".", "list_users_response", ".", "list_users_result", ",", "'marker'", ",", "None", ")", "while", "marker", ":", "_users", "=", "conn", ".", "get_all_users", "(", "path_prefix", "=", "path_prefix", ",", "marker", "=", "marker", ")", "users", "=", "(", "users", "+", "_users", ".", "list_users_response", ".", "list_users_result", ".", "users", ")", "marker", "=", "getattr", "(", "_users", ".", "list_users_response", ".", "list_users_result", ",", "'marker'", ",", "None", ")", "return", "users" ]
get and return all iam user details .
train
true
35,984
def open_text_files(urlpath, compression=None, mode='rt', encoding='utf8', errors='strict', **kwargs): return open_files(urlpath, compression, mode.replace('b', 't'), encoding, **kwargs)
[ "def", "open_text_files", "(", "urlpath", ",", "compression", "=", "None", ",", "mode", "=", "'rt'", ",", "encoding", "=", "'utf8'", ",", "errors", "=", "'strict'", ",", "**", "kwargs", ")", ":", "return", "open_files", "(", "urlpath", ",", "compression", ",", "mode", ".", "replace", "(", "'b'", ",", "'t'", ")", ",", "encoding", ",", "**", "kwargs", ")" ]
given path return dask .
train
false
35,986
@library.global_function def mozillians_field(element, required=False): template = get_template('includes/field.html') context = {'field': element, 'flag_required': required} return mark_safe(template.render(context))
[ "@", "library", ".", "global_function", "def", "mozillians_field", "(", "element", ",", "required", "=", "False", ")", ":", "template", "=", "get_template", "(", "'includes/field.html'", ")", "context", "=", "{", "'field'", ":", "element", ",", "'flag_required'", ":", "required", "}", "return", "mark_safe", "(", "template", ".", "render", "(", "context", ")", ")" ]
renders fields in jinja2 .
train
false
35,987
def params2stack(params, net_config): depth = len(net_config['layer_sizes']) stack = [dict() for i in range(depth)] prev_layer_size = net_config['input_size'] current_pos = 0 for i in range(depth): wlen = (prev_layer_size * net_config['layer_sizes'][i]) stack[i]['w'] = params[current_pos:(current_pos + wlen)].reshape(net_config['layer_sizes'][i], prev_layer_size) current_pos = (current_pos + wlen) blen = net_config['layer_sizes'][i] stack[i]['b'] = params[current_pos:(current_pos + blen)] current_pos = (current_pos + blen) prev_layer_size = net_config['layer_sizes'][i] return stack
[ "def", "params2stack", "(", "params", ",", "net_config", ")", ":", "depth", "=", "len", "(", "net_config", "[", "'layer_sizes'", "]", ")", "stack", "=", "[", "dict", "(", ")", "for", "i", "in", "range", "(", "depth", ")", "]", "prev_layer_size", "=", "net_config", "[", "'input_size'", "]", "current_pos", "=", "0", "for", "i", "in", "range", "(", "depth", ")", ":", "wlen", "=", "(", "prev_layer_size", "*", "net_config", "[", "'layer_sizes'", "]", "[", "i", "]", ")", "stack", "[", "i", "]", "[", "'w'", "]", "=", "params", "[", "current_pos", ":", "(", "current_pos", "+", "wlen", ")", "]", ".", "reshape", "(", "net_config", "[", "'layer_sizes'", "]", "[", "i", "]", ",", "prev_layer_size", ")", "current_pos", "=", "(", "current_pos", "+", "wlen", ")", "blen", "=", "net_config", "[", "'layer_sizes'", "]", "[", "i", "]", "stack", "[", "i", "]", "[", "'b'", "]", "=", "params", "[", "current_pos", ":", "(", "current_pos", "+", "blen", ")", "]", "current_pos", "=", "(", "current_pos", "+", "blen", ")", "prev_layer_size", "=", "net_config", "[", "'layer_sizes'", "]", "[", "i", "]", "return", "stack" ]
converts a flattened parameter vector into a nice "stack" structure for us to work with .
train
false
35,988
def encipher_bifid5(msg, key): (msg, key, _) = _prep(msg.upper(), key.upper(), None, bifid5) key = padded_key(key, bifid5) return encipher_bifid(msg, '', key)
[ "def", "encipher_bifid5", "(", "msg", ",", "key", ")", ":", "(", "msg", ",", "key", ",", "_", ")", "=", "_prep", "(", "msg", ".", "upper", "(", ")", ",", "key", ".", "upper", "(", ")", ",", "None", ",", "bifid5", ")", "key", "=", "padded_key", "(", "key", ",", "bifid5", ")", "return", "encipher_bifid", "(", "msg", ",", "''", ",", "key", ")" ]
performs the bifid cipher encryption on plaintext msg .
train
false
35,989
def list_associated_files(file_path, base_name_only=False, filter_ext=''): if (not file_path): return [] file_path_list = [] base_name = file_path.rpartition('.')[0] if (not base_name_only): base_name = (base_name + '.') if (not base_name): return [] base_name = re.sub('[\\[\\]\\*\\?]', '[\\g<0>]', base_name) if filter_ext: filter_ext = tuple((x.lower().strip() for x in filter_ext.split(','))) for associated_file_path in ek.ek(glob.glob, (base_name + '*')): if (associated_file_path == file_path): continue if ek.ek(os.path.isfile, associated_file_path): if filter_ext: if associated_file_path.lower().endswith(filter_ext): file_path_list.append(associated_file_path) else: file_path_list.append(associated_file_path) return file_path_list
[ "def", "list_associated_files", "(", "file_path", ",", "base_name_only", "=", "False", ",", "filter_ext", "=", "''", ")", ":", "if", "(", "not", "file_path", ")", ":", "return", "[", "]", "file_path_list", "=", "[", "]", "base_name", "=", "file_path", ".", "rpartition", "(", "'.'", ")", "[", "0", "]", "if", "(", "not", "base_name_only", ")", ":", "base_name", "=", "(", "base_name", "+", "'.'", ")", "if", "(", "not", "base_name", ")", ":", "return", "[", "]", "base_name", "=", "re", ".", "sub", "(", "'[\\\\[\\\\]\\\\*\\\\?]'", ",", "'[\\\\g<0>]'", ",", "base_name", ")", "if", "filter_ext", ":", "filter_ext", "=", "tuple", "(", "(", "x", ".", "lower", "(", ")", ".", "strip", "(", ")", "for", "x", "in", "filter_ext", ".", "split", "(", "','", ")", ")", ")", "for", "associated_file_path", "in", "ek", ".", "ek", "(", "glob", ".", "glob", ",", "(", "base_name", "+", "'*'", ")", ")", ":", "if", "(", "associated_file_path", "==", "file_path", ")", ":", "continue", "if", "ek", ".", "ek", "(", "os", ".", "path", ".", "isfile", ",", "associated_file_path", ")", ":", "if", "filter_ext", ":", "if", "associated_file_path", ".", "lower", "(", ")", ".", "endswith", "(", "filter_ext", ")", ":", "file_path_list", ".", "append", "(", "associated_file_path", ")", "else", ":", "file_path_list", ".", "append", "(", "associated_file_path", ")", "return", "file_path_list" ]
for a given file path searches for files with the same name but different extension and returns their absolute paths file_path: the file to check for associated files base_name_only: false add extra .
train
false
35,990
def is_simple(sum): for t in sum.types: if t.fields: return False return True
[ "def", "is_simple", "(", "sum", ")", ":", "for", "t", "in", "sum", ".", "types", ":", "if", "t", ".", "fields", ":", "return", "False", "return", "True" ]
return true if a sum is a simple .
train
false
35,991
def unquote(string): if (not string): return string if (string[0] in '"\''): string = string[1:] if (string[(-1)] in '"\''): string = string[:(-1)] return string
[ "def", "unquote", "(", "string", ")", ":", "if", "(", "not", "string", ")", ":", "return", "string", "if", "(", "string", "[", "0", "]", "in", "'\"\\''", ")", ":", "string", "=", "string", "[", "1", ":", "]", "if", "(", "string", "[", "(", "-", "1", ")", "]", "in", "'\"\\''", ")", ":", "string", "=", "string", "[", ":", "(", "-", "1", ")", "]", "return", "string" ]
undo the effects of quote() .
train
false
35,992
def get_default(versions): for version in versions: if (not version.rc): return version
[ "def", "get_default", "(", "versions", ")", ":", "for", "version", "in", "versions", ":", "if", "(", "not", "version", ".", "rc", ")", ":", "return", "version" ]
return a :class:version for the latest non-rc version .
train
false
35,993
@contextmanager def tsv_writer(response, fields, name=None, bom=False): if hasattr(response, u'headers'): response.headers['Content-Type'] = 'text/tab-separated-values; charset=utf-8' if name: response.headers['Content-disposition'] = 'attachment; filename="{name}.tsv"'.format(name=encode_rfc2231(name)) wr = unicodecsv.writer(response, encoding=u'utf-8', dialect=unicodecsv.excel_tab) if bom: response.write(UTF8_BOM) wr.writerow((f['id'] for f in fields)) (yield wr)
[ "@", "contextmanager", "def", "tsv_writer", "(", "response", ",", "fields", ",", "name", "=", "None", ",", "bom", "=", "False", ")", ":", "if", "hasattr", "(", "response", ",", "u'headers'", ")", ":", "response", ".", "headers", "[", "'Content-Type'", "]", "=", "'text/tab-separated-values; charset=utf-8'", "if", "name", ":", "response", ".", "headers", "[", "'Content-disposition'", "]", "=", "'attachment; filename=\"{name}.tsv\"'", ".", "format", "(", "name", "=", "encode_rfc2231", "(", "name", ")", ")", "wr", "=", "unicodecsv", ".", "writer", "(", "response", ",", "encoding", "=", "u'utf-8'", ",", "dialect", "=", "unicodecsv", ".", "excel_tab", ")", "if", "bom", ":", "response", ".", "write", "(", "UTF8_BOM", ")", "wr", ".", "writerow", "(", "(", "f", "[", "'id'", "]", "for", "f", "in", "fields", ")", ")", "(", "yield", "wr", ")" ]
context manager for writing utf-8 tsv data to response .
train
false
35,994
def set_health_check(name, health_check, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) hc = HealthCheck(**health_check) try: conn.configure_health_check(name, hc) log.info('Configured health check on ELB {0}'.format(name)) except boto.exception.BotoServerError as error: log.debug(error) log.info('Failed to configure health check on ELB {0}: {1}'.format(name, error)) return False return True
[ "def", "set_health_check", "(", "name", ",", "health_check", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "hc", "=", "HealthCheck", "(", "**", "health_check", ")", "try", ":", "conn", ".", "configure_health_check", "(", "name", ",", "hc", ")", "log", ".", "info", "(", "'Configured health check on ELB {0}'", ".", "format", "(", "name", ")", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "error", ":", "log", ".", "debug", "(", "error", ")", "log", ".", "info", "(", "'Failed to configure health check on ELB {0}: {1}'", ".", "format", "(", "name", ",", "error", ")", ")", "return", "False", "return", "True" ]
set attributes on an elb .
train
false
35,995
def token_list_len(tokenlist): ZeroWidthEscape = Token.ZeroWidthEscape return sum((len(item[1]) for item in tokenlist if (item[0] != ZeroWidthEscape)))
[ "def", "token_list_len", "(", "tokenlist", ")", ":", "ZeroWidthEscape", "=", "Token", ".", "ZeroWidthEscape", "return", "sum", "(", "(", "len", "(", "item", "[", "1", "]", ")", "for", "item", "in", "tokenlist", "if", "(", "item", "[", "0", "]", "!=", "ZeroWidthEscape", ")", ")", ")" ]
return the amount of characters in this token list .
train
true
35,996
def get_macs(vm_): macs = [] nics = get_nics(vm_) if (nics is None): return None for nic in nics: macs.append(nic) return macs
[ "def", "get_macs", "(", "vm_", ")", ":", "macs", "=", "[", "]", "nics", "=", "get_nics", "(", "vm_", ")", "if", "(", "nics", "is", "None", ")", ":", "return", "None", "for", "nic", "in", "nics", ":", "macs", ".", "append", "(", "nic", ")", "return", "macs" ]
return a list off mac addresses from the named vm cli example: .
train
true
35,997
def OutHeader2(text): OutHeader(text, '-')
[ "def", "OutHeader2", "(", "text", ")", ":", "OutHeader", "(", "text", ",", "'-'", ")" ]
output a level 2 header comment .
train
false
35,998
def cxCounter(ind1, ind2, indpb): for key in ITEMS.keys(): if (random.random() < indpb): (ind1[key], ind2[key]) = (ind2[key], ind1[key]) return (ind1, ind2)
[ "def", "cxCounter", "(", "ind1", ",", "ind2", ",", "indpb", ")", ":", "for", "key", "in", "ITEMS", ".", "keys", "(", ")", ":", "if", "(", "random", ".", "random", "(", ")", "<", "indpb", ")", ":", "(", "ind1", "[", "key", "]", ",", "ind2", "[", "key", "]", ")", "=", "(", "ind2", "[", "key", "]", ",", "ind1", "[", "key", "]", ")", "return", "(", "ind1", ",", "ind2", ")" ]
swaps the number of perticular items between two individuals .
train
false
35,999
def with_tempdir(callable): if isgeneratorfunction(callable): def proxy(*args, **kwargs): old_tmpdir = tempfile.gettempdir() new_tmpdir = tempfile.mkdtemp(prefix='temp-lgc-') tempfile.tempdir = new_tmpdir try: for x in callable(*args, **kwargs): (yield x) finally: try: rmtree(new_tmpdir, ignore_errors=True) finally: tempfile.tempdir = old_tmpdir return proxy @wraps(callable) def proxy(*args, **kargs): old_tmpdir = tempfile.gettempdir() new_tmpdir = tempfile.mkdtemp(prefix='temp-lgc-') tempfile.tempdir = new_tmpdir try: return callable(*args, **kargs) finally: try: rmtree(new_tmpdir, ignore_errors=True) finally: tempfile.tempdir = old_tmpdir return proxy
[ "def", "with_tempdir", "(", "callable", ")", ":", "if", "isgeneratorfunction", "(", "callable", ")", ":", "def", "proxy", "(", "*", "args", ",", "**", "kwargs", ")", ":", "old_tmpdir", "=", "tempfile", ".", "gettempdir", "(", ")", "new_tmpdir", "=", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "'temp-lgc-'", ")", "tempfile", ".", "tempdir", "=", "new_tmpdir", "try", ":", "for", "x", "in", "callable", "(", "*", "args", ",", "**", "kwargs", ")", ":", "(", "yield", "x", ")", "finally", ":", "try", ":", "rmtree", "(", "new_tmpdir", ",", "ignore_errors", "=", "True", ")", "finally", ":", "tempfile", ".", "tempdir", "=", "old_tmpdir", "return", "proxy", "@", "wraps", "(", "callable", ")", "def", "proxy", "(", "*", "args", ",", "**", "kargs", ")", ":", "old_tmpdir", "=", "tempfile", ".", "gettempdir", "(", ")", "new_tmpdir", "=", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "'temp-lgc-'", ")", "tempfile", ".", "tempdir", "=", "new_tmpdir", "try", ":", "return", "callable", "(", "*", "args", ",", "**", "kargs", ")", "finally", ":", "try", ":", "rmtree", "(", "new_tmpdir", ",", "ignore_errors", "=", "True", ")", "finally", ":", "tempfile", ".", "tempdir", "=", "old_tmpdir", "return", "proxy" ]
decorator to give a single test a tempdir as argument to test method .
train
false
36,000
def _chinese_remainder_reconstruction_multivariate(hp, hq, p, q): hpmonoms = set(hp.monoms()) hqmonoms = set(hq.monoms()) monoms = hpmonoms.intersection(hqmonoms) hpmonoms.difference_update(monoms) hqmonoms.difference_update(monoms) zero = hp.ring.domain.zero hpq = hp.ring.zero if isinstance(hp.ring.domain, PolynomialRing): crt_ = _chinese_remainder_reconstruction_multivariate else: def crt_(cp, cq, p, q): return crt([p, q], [cp, cq], symmetric=True)[0] for monom in monoms: hpq[monom] = crt_(hp[monom], hq[monom], p, q) for monom in hpmonoms: hpq[monom] = crt_(hp[monom], zero, p, q) for monom in hqmonoms: hpq[monom] = crt_(zero, hq[monom], p, q) return hpq
[ "def", "_chinese_remainder_reconstruction_multivariate", "(", "hp", ",", "hq", ",", "p", ",", "q", ")", ":", "hpmonoms", "=", "set", "(", "hp", ".", "monoms", "(", ")", ")", "hqmonoms", "=", "set", "(", "hq", ".", "monoms", "(", ")", ")", "monoms", "=", "hpmonoms", ".", "intersection", "(", "hqmonoms", ")", "hpmonoms", ".", "difference_update", "(", "monoms", ")", "hqmonoms", ".", "difference_update", "(", "monoms", ")", "zero", "=", "hp", ".", "ring", ".", "domain", ".", "zero", "hpq", "=", "hp", ".", "ring", ".", "zero", "if", "isinstance", "(", "hp", ".", "ring", ".", "domain", ",", "PolynomialRing", ")", ":", "crt_", "=", "_chinese_remainder_reconstruction_multivariate", "else", ":", "def", "crt_", "(", "cp", ",", "cq", ",", "p", ",", "q", ")", ":", "return", "crt", "(", "[", "p", ",", "q", "]", ",", "[", "cp", ",", "cq", "]", ",", "symmetric", "=", "True", ")", "[", "0", "]", "for", "monom", "in", "monoms", ":", "hpq", "[", "monom", "]", "=", "crt_", "(", "hp", "[", "monom", "]", ",", "hq", "[", "monom", "]", ",", "p", ",", "q", ")", "for", "monom", "in", "hpmonoms", ":", "hpq", "[", "monom", "]", "=", "crt_", "(", "hp", "[", "monom", "]", ",", "zero", ",", "p", ",", "q", ")", "for", "monom", "in", "hqmonoms", ":", "hpq", "[", "monom", "]", "=", "crt_", "(", "zero", ",", "hq", "[", "monom", "]", ",", "p", ",", "q", ")", "return", "hpq" ]
construct a polynomial h_{pq} in mathbb{z}_{p q}[x_0 .
train
false
36,004
@register.assignment_tag(takes_context=True) def assignment_no_params_with_context(context): return ('assignment_no_params_with_context - Expected result (context value: %s)' % context['value'])
[ "@", "register", ".", "assignment_tag", "(", "takes_context", "=", "True", ")", "def", "assignment_no_params_with_context", "(", "context", ")", ":", "return", "(", "'assignment_no_params_with_context - Expected result (context value: %s)'", "%", "context", "[", "'value'", "]", ")" ]
expected assignment_no_params_with_context __doc__ .
train
false
36,005
@flaskbb.command() def reindex(): click.secho('[+] Reindexing search index...', fg='cyan') whooshee.reindex()
[ "@", "flaskbb", ".", "command", "(", ")", "def", "reindex", "(", ")", ":", "click", ".", "secho", "(", "'[+] Reindexing search index...'", ",", "fg", "=", "'cyan'", ")", "whooshee", ".", "reindex", "(", ")" ]
reindex all instances of a given mapping type with celery tasks :arg mapping_type_names: list of mapping types to reindex .
train
false
36,006
def is_scalar(v): return isinstance(v, _scalar_types)
[ "def", "is_scalar", "(", "v", ")", ":", "return", "isinstance", "(", "v", ",", "_scalar_types", ")" ]
is the given value a scalar-like object? .
train
false
36,008
def setup_ssh_key(hostname, user, password, port=22): logging.debug(('Performing SSH key setup on %s:%d as %s.' % (hostname, port, user))) try: public_key = get_public_key() session = remote.remote_login(client='ssh', host=hostname, port=port, username=user, password=password, prompt='[$#%]') session.cmd_output('mkdir -p ~/.ssh') session.cmd_output('chmod 700 ~/.ssh') session.cmd_output(("echo '%s' >> ~/.ssh/authorized_keys; " % public_key)) session.cmd_output('chmod 600 ~/.ssh/authorized_keys') logging.debug('SSH key setup complete.') except Exception as err: logging.debug('SSH key setup has failed: %s', err) try: session.close() except: pass
[ "def", "setup_ssh_key", "(", "hostname", ",", "user", ",", "password", ",", "port", "=", "22", ")", ":", "logging", ".", "debug", "(", "(", "'Performing SSH key setup on %s:%d as %s.'", "%", "(", "hostname", ",", "port", ",", "user", ")", ")", ")", "try", ":", "public_key", "=", "get_public_key", "(", ")", "session", "=", "remote", ".", "remote_login", "(", "client", "=", "'ssh'", ",", "host", "=", "hostname", ",", "port", "=", "port", ",", "username", "=", "user", ",", "password", "=", "password", ",", "prompt", "=", "'[$#%]'", ")", "session", ".", "cmd_output", "(", "'mkdir -p ~/.ssh'", ")", "session", ".", "cmd_output", "(", "'chmod 700 ~/.ssh'", ")", "session", ".", "cmd_output", "(", "(", "\"echo '%s' >> ~/.ssh/authorized_keys; \"", "%", "public_key", ")", ")", "session", ".", "cmd_output", "(", "'chmod 600 ~/.ssh/authorized_keys'", ")", "logging", ".", "debug", "(", "'SSH key setup complete.'", ")", "except", "Exception", "as", "err", ":", "logging", ".", "debug", "(", "'SSH key setup has failed: %s'", ",", "err", ")", "try", ":", "session", ".", "close", "(", ")", "except", ":", "pass" ]
setup up remote login in another server by using public key .
train
false
36,009
def dependencies_graph(filename, dep_info): done = {} printer = DotBackend(filename[:(-4)], rankdir='LR') printer.emit('URL="." node[shape="box"]') for (modname, dependencies) in sorted(six.iteritems(dep_info)): done[modname] = 1 printer.emit_node(modname) for modname in dependencies: if (modname not in done): done[modname] = 1 printer.emit_node(modname) for (depmodname, dependencies) in sorted(six.iteritems(dep_info)): for modname in dependencies: printer.emit_edge(modname, depmodname) printer.generate(filename)
[ "def", "dependencies_graph", "(", "filename", ",", "dep_info", ")", ":", "done", "=", "{", "}", "printer", "=", "DotBackend", "(", "filename", "[", ":", "(", "-", "4", ")", "]", ",", "rankdir", "=", "'LR'", ")", "printer", ".", "emit", "(", "'URL=\".\" node[shape=\"box\"]'", ")", "for", "(", "modname", ",", "dependencies", ")", "in", "sorted", "(", "six", ".", "iteritems", "(", "dep_info", ")", ")", ":", "done", "[", "modname", "]", "=", "1", "printer", ".", "emit_node", "(", "modname", ")", "for", "modname", "in", "dependencies", ":", "if", "(", "modname", "not", "in", "done", ")", ":", "done", "[", "modname", "]", "=", "1", "printer", ".", "emit_node", "(", "modname", ")", "for", "(", "depmodname", ",", "dependencies", ")", "in", "sorted", "(", "six", ".", "iteritems", "(", "dep_info", ")", ")", ":", "for", "modname", "in", "dependencies", ":", "printer", ".", "emit_edge", "(", "modname", ",", "depmodname", ")", "printer", ".", "generate", "(", "filename", ")" ]
write dependencies as a dot file .
train
false
36,011
def _check_expression(text, allowed_variables=[]): try: module = parse(text) except SyntaxError: return False if (not isinstance(module, Module)): return False statements = module.body if (not (len(statements) == 1)): return False expression = statements[0] if (expression.__class__.__name__ != 'Expr'): return False for ast_node in walk(expression): ast_node_class = ast_node.__class__.__name__ if (ast_node_class not in AST_NODE_TYPE_WHITELIST): return False if (ast_node_class == 'Name'): if (not _check_name(ast_node, allowed_variables)): return False elif (ast_node_class == 'Call'): if (not _check_call(ast_node)): return False elif (ast_node_class == 'Attribute'): if (not _check_attribute(ast_node)): return False return True
[ "def", "_check_expression", "(", "text", ",", "allowed_variables", "=", "[", "]", ")", ":", "try", ":", "module", "=", "parse", "(", "text", ")", "except", "SyntaxError", ":", "return", "False", "if", "(", "not", "isinstance", "(", "module", ",", "Module", ")", ")", ":", "return", "False", "statements", "=", "module", ".", "body", "if", "(", "not", "(", "len", "(", "statements", ")", "==", "1", ")", ")", ":", "return", "False", "expression", "=", "statements", "[", "0", "]", "if", "(", "expression", ".", "__class__", ".", "__name__", "!=", "'Expr'", ")", ":", "return", "False", "for", "ast_node", "in", "walk", "(", "expression", ")", ":", "ast_node_class", "=", "ast_node", ".", "__class__", ".", "__name__", "if", "(", "ast_node_class", "not", "in", "AST_NODE_TYPE_WHITELIST", ")", ":", "return", "False", "if", "(", "ast_node_class", "==", "'Name'", ")", ":", "if", "(", "not", "_check_name", "(", "ast_node", ",", "allowed_variables", ")", ")", ":", "return", "False", "elif", "(", "ast_node_class", "==", "'Call'", ")", ":", "if", "(", "not", "_check_call", "(", "ast_node", ")", ")", ":", "return", "False", "elif", "(", "ast_node_class", "==", "'Attribute'", ")", ":", "if", "(", "not", "_check_attribute", "(", "ast_node", ")", ")", ":", "return", "False", "return", "True" ]
make sure an expression is not an empty string parameters expr : object an object that can be converted to a string raises valueerror * if expr is an empty string .
train
false
36,012
def getRoundedToPlacesString(decimalPlaces, number): roundedToPlaces = getRoundedToPlaces(decimalPlaces, number) roundedToPlacesString = str(roundedToPlaces) if ('e' in roundedToPlacesString): return ('%.15f' % roundedToPlaces).rstrip('0') return roundedToPlacesString
[ "def", "getRoundedToPlacesString", "(", "decimalPlaces", ",", "number", ")", ":", "roundedToPlaces", "=", "getRoundedToPlaces", "(", "decimalPlaces", ",", "number", ")", "roundedToPlacesString", "=", "str", "(", "roundedToPlaces", ")", "if", "(", "'e'", "in", "roundedToPlacesString", ")", ":", "return", "(", "'%.15f'", "%", "roundedToPlaces", ")", ".", "rstrip", "(", "'0'", ")", "return", "roundedToPlacesString" ]
get number rounded to a number of decimal places as a string .
train
false
36,013
def _kl_divergence_bh(params, P, neighbors, degrees_of_freedom, n_samples, n_components, angle=0.5, skip_num_points=0, verbose=False): params = astype(params, np.float32, copy=False) X_embedded = params.reshape(n_samples, n_components) neighbors = astype(neighbors, np.int64, copy=False) if (len(P.shape) == 1): sP = squareform(P).astype(np.float32) else: sP = P.astype(np.float32) grad = np.zeros(X_embedded.shape, dtype=np.float32) error = _barnes_hut_tsne.gradient(sP, X_embedded, neighbors, grad, angle, n_components, verbose, dof=degrees_of_freedom) c = ((2.0 * (degrees_of_freedom + 1.0)) / degrees_of_freedom) grad = grad.ravel() grad *= c return (error, grad)
[ "def", "_kl_divergence_bh", "(", "params", ",", "P", ",", "neighbors", ",", "degrees_of_freedom", ",", "n_samples", ",", "n_components", ",", "angle", "=", "0.5", ",", "skip_num_points", "=", "0", ",", "verbose", "=", "False", ")", ":", "params", "=", "astype", "(", "params", ",", "np", ".", "float32", ",", "copy", "=", "False", ")", "X_embedded", "=", "params", ".", "reshape", "(", "n_samples", ",", "n_components", ")", "neighbors", "=", "astype", "(", "neighbors", ",", "np", ".", "int64", ",", "copy", "=", "False", ")", "if", "(", "len", "(", "P", ".", "shape", ")", "==", "1", ")", ":", "sP", "=", "squareform", "(", "P", ")", ".", "astype", "(", "np", ".", "float32", ")", "else", ":", "sP", "=", "P", ".", "astype", "(", "np", ".", "float32", ")", "grad", "=", "np", ".", "zeros", "(", "X_embedded", ".", "shape", ",", "dtype", "=", "np", ".", "float32", ")", "error", "=", "_barnes_hut_tsne", ".", "gradient", "(", "sP", ",", "X_embedded", ",", "neighbors", ",", "grad", ",", "angle", ",", "n_components", ",", "verbose", ",", "dof", "=", "degrees_of_freedom", ")", "c", "=", "(", "(", "2.0", "*", "(", "degrees_of_freedom", "+", "1.0", ")", ")", "/", "degrees_of_freedom", ")", "grad", "=", "grad", ".", "ravel", "(", ")", "grad", "*=", "c", "return", "(", "error", ",", "grad", ")" ]
t-sne objective function: kl divergence of p_ijs and q_ijs .
train
true
36,014
def get_if_raw_addr(ifname): try: fd = os.popen(('%s %s' % (conf.prog.ifconfig, ifname))) except OSError as msg: warning(('Failed to execute ifconfig: (%s)' % msg)) return '\x00\x00\x00\x00' addresses = [l for l in fd if (l.find('netmask') >= 0)] if (not addresses): warning(('No IPv4 address found on %s !' % ifname)) return '\x00\x00\x00\x00' address = addresses[0].split(' ')[1] return socket.inet_pton(socket.AF_INET, address)
[ "def", "get_if_raw_addr", "(", "ifname", ")", ":", "try", ":", "fd", "=", "os", ".", "popen", "(", "(", "'%s %s'", "%", "(", "conf", ".", "prog", ".", "ifconfig", ",", "ifname", ")", ")", ")", "except", "OSError", "as", "msg", ":", "warning", "(", "(", "'Failed to execute ifconfig: (%s)'", "%", "msg", ")", ")", "return", "'\\x00\\x00\\x00\\x00'", "addresses", "=", "[", "l", "for", "l", "in", "fd", "if", "(", "l", ".", "find", "(", "'netmask'", ")", ">=", "0", ")", "]", "if", "(", "not", "addresses", ")", ":", "warning", "(", "(", "'No IPv4 address found on %s !'", "%", "ifname", ")", ")", "return", "'\\x00\\x00\\x00\\x00'", "address", "=", "addresses", "[", "0", "]", ".", "split", "(", "' '", ")", "[", "1", "]", "return", "socket", ".", "inet_pton", "(", "socket", ".", "AF_INET", ",", "address", ")" ]
returns the ipv4 address configured on ifname .
train
true
36,016
def parseHttpHeader(data): raw = data.decode('iso-8859-1').splitlines() http_status_line = raw[0].strip() http_headers = {} http_headers_cnt = {} for h in raw[1:]: i = h.find(':') if (i > 0): key = h[:i].strip().lower() value = h[(i + 1):].strip() if (key in http_headers): http_headers[key] += (', %s' % value) http_headers_cnt[key] += 1 else: http_headers[key] = value http_headers_cnt[key] = 1 else: pass return (http_status_line, http_headers, http_headers_cnt)
[ "def", "parseHttpHeader", "(", "data", ")", ":", "raw", "=", "data", ".", "decode", "(", "'iso-8859-1'", ")", ".", "splitlines", "(", ")", "http_status_line", "=", "raw", "[", "0", "]", ".", "strip", "(", ")", "http_headers", "=", "{", "}", "http_headers_cnt", "=", "{", "}", "for", "h", "in", "raw", "[", "1", ":", "]", ":", "i", "=", "h", ".", "find", "(", "':'", ")", "if", "(", "i", ">", "0", ")", ":", "key", "=", "h", "[", ":", "i", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "value", "=", "h", "[", "(", "i", "+", "1", ")", ":", "]", ".", "strip", "(", ")", "if", "(", "key", "in", "http_headers", ")", ":", "http_headers", "[", "key", "]", "+=", "(", "', %s'", "%", "value", ")", "http_headers_cnt", "[", "key", "]", "+=", "1", "else", ":", "http_headers", "[", "key", "]", "=", "value", "http_headers_cnt", "[", "key", "]", "=", "1", "else", ":", "pass", "return", "(", "http_status_line", ",", "http_headers", ",", "http_headers_cnt", ")" ]
parses the beginning of a http request header into a pair of status line and http headers dictionary .
train
false
36,017
def create_bucket(request, bucket_id): bucket_put = ((request.method.lower() == 'put') and request.path.endswith('buckets/default')) if bucket_put: return already_created = request.bound_data.setdefault('buckets', {}) if (bucket_id in already_created): return bucket_uri = instance_uri(request, 'bucket', id=bucket_id) bucket = resource_create_object(request=request, resource_cls=Bucket, uri=bucket_uri) already_created[bucket_id] = bucket
[ "def", "create_bucket", "(", "request", ",", "bucket_id", ")", ":", "bucket_put", "=", "(", "(", "request", ".", "method", ".", "lower", "(", ")", "==", "'put'", ")", "and", "request", ".", "path", ".", "endswith", "(", "'buckets/default'", ")", ")", "if", "bucket_put", ":", "return", "already_created", "=", "request", ".", "bound_data", ".", "setdefault", "(", "'buckets'", ",", "{", "}", ")", "if", "(", "bucket_id", "in", "already_created", ")", ":", "return", "bucket_uri", "=", "instance_uri", "(", "request", ",", "'bucket'", ",", "id", "=", "bucket_id", ")", "bucket", "=", "resource_create_object", "(", "request", "=", "request", ",", "resource_cls", "=", "Bucket", ",", "uri", "=", "bucket_uri", ")", "already_created", "[", "bucket_id", "]", "=", "bucket" ]
create a bucket if it doesnt exists .
train
false
36,020
def logout(request, next_page=None, template_name='registration/logged_out.html', redirect_field_name=REDIRECT_FIELD_NAME, current_app=None, extra_context=None): auth_logout(request) if (next_page is not None): next_page = resolve_url(next_page) if (redirect_field_name in request.REQUEST): next_page = request.REQUEST[redirect_field_name] if (not is_safe_url(url=next_page, host=request.get_host())): next_page = request.path if next_page: return HttpResponseRedirect(next_page) current_site = get_current_site(request) context = {'site': current_site, 'site_name': current_site.name, 'title': _('Logged out')} if (extra_context is not None): context.update(extra_context) return TemplateResponse(request, template_name, context, current_app=current_app)
[ "def", "logout", "(", "request", ",", "next_page", "=", "None", ",", "template_name", "=", "'registration/logged_out.html'", ",", "redirect_field_name", "=", "REDIRECT_FIELD_NAME", ",", "current_app", "=", "None", ",", "extra_context", "=", "None", ")", ":", "auth_logout", "(", "request", ")", "if", "(", "next_page", "is", "not", "None", ")", ":", "next_page", "=", "resolve_url", "(", "next_page", ")", "if", "(", "redirect_field_name", "in", "request", ".", "REQUEST", ")", ":", "next_page", "=", "request", ".", "REQUEST", "[", "redirect_field_name", "]", "if", "(", "not", "is_safe_url", "(", "url", "=", "next_page", ",", "host", "=", "request", ".", "get_host", "(", ")", ")", ")", ":", "next_page", "=", "request", ".", "path", "if", "next_page", ":", "return", "HttpResponseRedirect", "(", "next_page", ")", "current_site", "=", "get_current_site", "(", "request", ")", "context", "=", "{", "'site'", ":", "current_site", ",", "'site_name'", ":", "current_site", ".", "name", ",", "'title'", ":", "_", "(", "'Logged out'", ")", "}", "if", "(", "extra_context", "is", "not", "None", ")", ":", "context", ".", "update", "(", "extra_context", ")", "return", "TemplateResponse", "(", "request", ",", "template_name", ",", "context", ",", "current_app", "=", "current_app", ")" ]
view that logs out the user and redirects to home page .
train
true
36,021
def released_languages(): released_language_codes = DarkLangConfig.current().released_languages_list default_language_code = settings.LANGUAGE_CODE if (default_language_code not in released_language_codes): released_language_codes.append(default_language_code) released_language_codes.sort() return [Language(language_info[0], language_info[1]) for language_info in settings.LANGUAGES if (language_info[0] in released_language_codes)]
[ "def", "released_languages", "(", ")", ":", "released_language_codes", "=", "DarkLangConfig", ".", "current", "(", ")", ".", "released_languages_list", "default_language_code", "=", "settings", ".", "LANGUAGE_CODE", "if", "(", "default_language_code", "not", "in", "released_language_codes", ")", ":", "released_language_codes", ".", "append", "(", "default_language_code", ")", "released_language_codes", ".", "sort", "(", ")", "return", "[", "Language", "(", "language_info", "[", "0", "]", ",", "language_info", "[", "1", "]", ")", "for", "language_info", "in", "settings", ".", "LANGUAGES", "if", "(", "language_info", "[", "0", "]", "in", "released_language_codes", ")", "]" ]
retrieve the list of released languages .
train
false
36,022
def test_represent_ygate(): circuit = (YGate(0) * Qubit('00')) answer = represent(circuit, nqubits=2) assert ((answer[0] == 0) and (answer[1] == I) and (answer[2] == 0) and (answer[3] == 0))
[ "def", "test_represent_ygate", "(", ")", ":", "circuit", "=", "(", "YGate", "(", "0", ")", "*", "Qubit", "(", "'00'", ")", ")", "answer", "=", "represent", "(", "circuit", ",", "nqubits", "=", "2", ")", "assert", "(", "(", "answer", "[", "0", "]", "==", "0", ")", "and", "(", "answer", "[", "1", "]", "==", "I", ")", "and", "(", "answer", "[", "2", "]", "==", "0", ")", "and", "(", "answer", "[", "3", "]", "==", "0", ")", ")" ]
test the representation of the y gate .
train
false
36,023
def onStart(): pass
[ "def", "onStart", "(", ")", ":", "pass" ]
kbengine method .
train
false
36,024
def _handle_delete(gs_stub, filename): if gs_stub.delete_object(filename): return _FakeUrlFetchResult(204, {}, '') else: return _FakeUrlFetchResult(404, {}, '')
[ "def", "_handle_delete", "(", "gs_stub", ",", "filename", ")", ":", "if", "gs_stub", ".", "delete_object", "(", "filename", ")", ":", "return", "_FakeUrlFetchResult", "(", "204", ",", "{", "}", ",", "''", ")", "else", ":", "return", "_FakeUrlFetchResult", "(", "404", ",", "{", "}", ",", "''", ")" ]
handle delete object .
train
false
36,025
def symlink_ok(): try: open(libcuda_convnet_so).close() return True except IOError: return False
[ "def", "symlink_ok", "(", ")", ":", "try", ":", "open", "(", "libcuda_convnet_so", ")", ".", "close", "(", ")", "return", "True", "except", "IOError", ":", "return", "False" ]
check if an existing library exists and can be read .
train
false
36,026
@pytest.fixture def site_packages(modules_tmpdir, monkeypatch): rv = modules_tmpdir.mkdir('lib').mkdir('python{x[0]}.{x[1]}'.format(x=sys.version_info)).mkdir('site-packages') monkeypatch.syspath_prepend(str(rv)) return rv
[ "@", "pytest", ".", "fixture", "def", "site_packages", "(", "modules_tmpdir", ",", "monkeypatch", ")", ":", "rv", "=", "modules_tmpdir", ".", "mkdir", "(", "'lib'", ")", ".", "mkdir", "(", "'python{x[0]}.{x[1]}'", ".", "format", "(", "x", "=", "sys", ".", "version_info", ")", ")", ".", "mkdir", "(", "'site-packages'", ")", "monkeypatch", ".", "syspath_prepend", "(", "str", "(", "rv", ")", ")", "return", "rv" ]
create a fake site-packages .
train
false
36,027
def file_md5(file_name): md5 = hashlib.md5() with open(file_name, 'rb') as f: for chunk in iter((lambda : f.read((128 * md5.block_size))), ''): md5.update(chunk) return md5.hexdigest()
[ "def", "file_md5", "(", "file_name", ")", ":", "md5", "=", "hashlib", ".", "md5", "(", ")", "with", "open", "(", "file_name", ",", "'rb'", ")", "as", "f", ":", "for", "chunk", "in", "iter", "(", "(", "lambda", ":", "f", ".", "read", "(", "(", "128", "*", "md5", ".", "block_size", ")", ")", ")", ",", "''", ")", ":", "md5", ".", "update", "(", "chunk", ")", "return", "md5", ".", "hexdigest", "(", ")" ]
generate an md5 hash of the specified file .
train
true
36,029
def start_job(node, merge_otus_fp, queue, wrap_call=torque_job, submit=True): strfmt = {'MergeOTUs': merge_otus_fp, 'Output': node.FilePath, 'BIOM_A': node.Children[0].FilePath, 'BIOM_B': node.Children[1].FilePath} cmd = '%(MergeOTUs)s -i %(BIOM_A)s,%(BIOM_B)s -o %(Output)s' wrapped = wrap_call((cmd % strfmt), node.PollPath, node.Name, queue) if submit: system(wrapped) node.FullCommand = wrapped node.StartTime = time()
[ "def", "start_job", "(", "node", ",", "merge_otus_fp", ",", "queue", ",", "wrap_call", "=", "torque_job", ",", "submit", "=", "True", ")", ":", "strfmt", "=", "{", "'MergeOTUs'", ":", "merge_otus_fp", ",", "'Output'", ":", "node", ".", "FilePath", ",", "'BIOM_A'", ":", "node", ".", "Children", "[", "0", "]", ".", "FilePath", ",", "'BIOM_B'", ":", "node", ".", "Children", "[", "1", "]", ".", "FilePath", "}", "cmd", "=", "'%(MergeOTUs)s -i %(BIOM_A)s,%(BIOM_B)s -o %(Output)s'", "wrapped", "=", "wrap_call", "(", "(", "cmd", "%", "strfmt", ")", ",", "node", ".", "PollPath", ",", "node", ".", "Name", ",", "queue", ")", "if", "submit", ":", "system", "(", "wrapped", ")", "node", ".", "FullCommand", "=", "wrapped", "node", ".", "StartTime", "=", "time", "(", ")" ]
starts a process .
train
false
36,031
@require_POST def join_group(request, url): group = get_object_or_404(Group, url=url) profile_to_add = request.user.userprofile if group.has_member(profile_to_add): messages.error(request, _('You are already in this group.')) elif group.has_pending_member(profile_to_add): messages.error(request, _('Your request to join this group is still pending.')) elif (group.accepting_new_members == 'no'): messages.error(request, _('This group is not accepting requests to join.')) else: if (group.accepting_new_members == 'yes'): status = GroupMembership.MEMBER messages.info(request, _('You have been added to this group.')) if group.terms: status = GroupMembership.PENDING_TERMS elif (group.accepting_new_members == 'by_request'): status = GroupMembership.PENDING messages.info(request, _('Your membership request has been sent to the group curator(s).')) group.add_member(profile_to_add, status=status) return redirect(reverse('groups:show_group', args=[group.url]))
[ "@", "require_POST", "def", "join_group", "(", "request", ",", "url", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "url", "=", "url", ")", "profile_to_add", "=", "request", ".", "user", ".", "userprofile", "if", "group", ".", "has_member", "(", "profile_to_add", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "'You are already in this group.'", ")", ")", "elif", "group", ".", "has_pending_member", "(", "profile_to_add", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "'Your request to join this group is still pending.'", ")", ")", "elif", "(", "group", ".", "accepting_new_members", "==", "'no'", ")", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "'This group is not accepting requests to join.'", ")", ")", "else", ":", "if", "(", "group", ".", "accepting_new_members", "==", "'yes'", ")", ":", "status", "=", "GroupMembership", ".", "MEMBER", "messages", ".", "info", "(", "request", ",", "_", "(", "'You have been added to this group.'", ")", ")", "if", "group", ".", "terms", ":", "status", "=", "GroupMembership", ".", "PENDING_TERMS", "elif", "(", "group", ".", "accepting_new_members", "==", "'by_request'", ")", ":", "status", "=", "GroupMembership", ".", "PENDING", "messages", ".", "info", "(", "request", ",", "_", "(", "'Your membership request has been sent to the group curator(s).'", ")", ")", "group", ".", "add_member", "(", "profile_to_add", ",", "status", "=", "status", ")", "return", "redirect", "(", "reverse", "(", "'groups:show_group'", ",", "args", "=", "[", "group", ".", "url", "]", ")", ")" ]
user request to join group .
train
false
36,033
@task @needs('pavelib.i18n.i18n_validate_transifex_config') @timed def i18n_transifex_pull(): sh('i18n_tool transifex pull')
[ "@", "task", "@", "needs", "(", "'pavelib.i18n.i18n_validate_transifex_config'", ")", "@", "timed", "def", "i18n_transifex_pull", "(", ")", ":", "sh", "(", "'i18n_tool transifex pull'", ")" ]
pull translated strings from transifex .
train
false
36,034
def expr_to_tree(ind): def prim_to_list(prim, args): if isinstance(prim, deap.gp.Terminal): return prim.value return ([prim.name] + args) tree = [] stack = [] for node in ind: stack.append((node, [])) while (len(stack[(-1)][1]) == stack[(-1)][0].arity): (prim, args) = stack.pop() tree = prim_to_list(prim, args) if (len(stack) == 0): break stack[(-1)][1].append(tree) return tree
[ "def", "expr_to_tree", "(", "ind", ")", ":", "def", "prim_to_list", "(", "prim", ",", "args", ")", ":", "if", "isinstance", "(", "prim", ",", "deap", ".", "gp", ".", "Terminal", ")", ":", "return", "prim", ".", "value", "return", "(", "[", "prim", ".", "name", "]", "+", "args", ")", "tree", "=", "[", "]", "stack", "=", "[", "]", "for", "node", "in", "ind", ":", "stack", ".", "append", "(", "(", "node", ",", "[", "]", ")", ")", "while", "(", "len", "(", "stack", "[", "(", "-", "1", ")", "]", "[", "1", "]", ")", "==", "stack", "[", "(", "-", "1", ")", "]", "[", "0", "]", ".", "arity", ")", ":", "(", "prim", ",", "args", ")", "=", "stack", ".", "pop", "(", ")", "tree", "=", "prim_to_list", "(", "prim", ",", "args", ")", "if", "(", "len", "(", "stack", ")", "==", "0", ")", ":", "break", "stack", "[", "(", "-", "1", ")", "]", "[", "1", "]", ".", "append", "(", "tree", ")", "return", "tree" ]
convert the unstructured deap pipeline into a tree data-structure parameters ind: deap .
train
true
36,036
def test_descriptors_custom_attrs(): class mydesc(object, ): def __get__(self, instance, ctx): raise AttributeError class f(object, ): x = mydesc() def __getattr__(self, name): return 42 AreEqual(f().x, 42)
[ "def", "test_descriptors_custom_attrs", "(", ")", ":", "class", "mydesc", "(", "object", ",", ")", ":", "def", "__get__", "(", "self", ",", "instance", ",", "ctx", ")", ":", "raise", "AttributeError", "class", "f", "(", "object", ",", ")", ":", "x", "=", "mydesc", "(", ")", "def", "__getattr__", "(", "self", ",", "name", ")", ":", "return", "42", "AreEqual", "(", "f", "(", ")", ".", "x", ",", "42", ")" ]
verifies the interaction between descriptors and custom attribute access works properly .
train
false
36,037
def test_scenario_ignore_commented_lines_from_examples(): scenario = Scenario.from_string(OUTLINED_SCENARIO_WITH_COMMENTS_ON_EXAMPLES) assert_equals(scenario.outlines, [{'input_1': '20', 'input_2': '30', 'button': 'add', 'output': '50'}, {'input_1': '0', 'input_2': '40', 'button': 'add', 'output': '40'}])
[ "def", "test_scenario_ignore_commented_lines_from_examples", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "OUTLINED_SCENARIO_WITH_COMMENTS_ON_EXAMPLES", ")", "assert_equals", "(", "scenario", ".", "outlines", ",", "[", "{", "'input_1'", ":", "'20'", ",", "'input_2'", ":", "'30'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'50'", "}", ",", "{", "'input_1'", ":", "'0'", ",", "'input_2'", ":", "'40'", ",", "'button'", ":", "'add'", ",", "'output'", ":", "'40'", "}", "]", ")" ]
comments on scenario example should be ignored .
train
false
36,038
@register.assignment_tag def assignment_only_unlimited_args(*args): return ('assignment_only_unlimited_args - Expected result: %s' % ', '.join([unicode(arg) for arg in args]))
[ "@", "register", ".", "assignment_tag", "def", "assignment_only_unlimited_args", "(", "*", "args", ")", ":", "return", "(", "'assignment_only_unlimited_args - Expected result: %s'", "%", "', '", ".", "join", "(", "[", "unicode", "(", "arg", ")", "for", "arg", "in", "args", "]", ")", ")" ]
expected assignment_only_unlimited_args __doc__ .
train
false
36,040
def determine_page_content_type(content): tags = ['<p>', '<ul>', '<h1>', '<h2>', '<h3>', '<pre>', '<br', '<table>'] content_type = 'restructuredtext' content = content.lower() for t in tags: if (t in content): content_type = 'html' return content_type
[ "def", "determine_page_content_type", "(", "content", ")", ":", "tags", "=", "[", "'<p>'", ",", "'<ul>'", ",", "'<h1>'", ",", "'<h2>'", ",", "'<h3>'", ",", "'<pre>'", ",", "'<br'", ",", "'<table>'", "]", "content_type", "=", "'restructuredtext'", "content", "=", "content", ".", "lower", "(", ")", "for", "t", "in", "tags", ":", "if", "(", "t", "in", "content", ")", ":", "content_type", "=", "'html'", "return", "content_type" ]
attempt to determine if content is rest or html .
train
false
36,042
def ValidateMSBuildSettings(settings, stderr=sys.stderr): _ValidateSettings(_msbuild_validators, settings, stderr)
[ "def", "ValidateMSBuildSettings", "(", "settings", ",", "stderr", "=", "sys", ".", "stderr", ")", ":", "_ValidateSettings", "(", "_msbuild_validators", ",", "settings", ",", "stderr", ")" ]
validates that the names of the settings are valid for msbuild .
train
false
36,044
def _no_primary(max_staleness, selection): smax = selection.secondary_with_max_last_write_date() if (not smax): return selection.with_server_descriptions([]) sds = [] for s in selection.server_descriptions: if (s.server_type == SERVER_TYPE.RSSecondary): staleness = ((smax.last_write_date - s.last_write_date) + selection.heartbeat_frequency) if (staleness <= max_staleness): sds.append(s) else: sds.append(s) return selection.with_server_descriptions(sds)
[ "def", "_no_primary", "(", "max_staleness", ",", "selection", ")", ":", "smax", "=", "selection", ".", "secondary_with_max_last_write_date", "(", ")", "if", "(", "not", "smax", ")", ":", "return", "selection", ".", "with_server_descriptions", "(", "[", "]", ")", "sds", "=", "[", "]", "for", "s", "in", "selection", ".", "server_descriptions", ":", "if", "(", "s", ".", "server_type", "==", "SERVER_TYPE", ".", "RSSecondary", ")", ":", "staleness", "=", "(", "(", "smax", ".", "last_write_date", "-", "s", ".", "last_write_date", ")", "+", "selection", ".", "heartbeat_frequency", ")", "if", "(", "staleness", "<=", "max_staleness", ")", ":", "sds", ".", "append", "(", "s", ")", "else", ":", "sds", ".", "append", "(", "s", ")", "return", "selection", ".", "with_server_descriptions", "(", "sds", ")" ]
apply max_staleness .
train
true
36,045
def dup_gff_list(f, K): if (not f): raise ValueError("greatest factorial factorization doesn't exist for a zero polynomial") f = dup_monic(f, K) if (not dup_degree(f)): return [] else: g = dup_gcd(f, dup_shift(f, K.one, K), K) H = dup_gff_list(g, K) for (i, (h, k)) in enumerate(H): g = dup_mul(g, dup_shift(h, (- K(k)), K), K) H[i] = (h, (k + 1)) f = dup_quo(f, g, K) if (not dup_degree(f)): return H else: return ([(f, 1)] + H)
[ "def", "dup_gff_list", "(", "f", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "raise", "ValueError", "(", "\"greatest factorial factorization doesn't exist for a zero polynomial\"", ")", "f", "=", "dup_monic", "(", "f", ",", "K", ")", "if", "(", "not", "dup_degree", "(", "f", ")", ")", ":", "return", "[", "]", "else", ":", "g", "=", "dup_gcd", "(", "f", ",", "dup_shift", "(", "f", ",", "K", ".", "one", ",", "K", ")", ",", "K", ")", "H", "=", "dup_gff_list", "(", "g", ",", "K", ")", "for", "(", "i", ",", "(", "h", ",", "k", ")", ")", "in", "enumerate", "(", "H", ")", ":", "g", "=", "dup_mul", "(", "g", ",", "dup_shift", "(", "h", ",", "(", "-", "K", "(", "k", ")", ")", ",", "K", ")", ",", "K", ")", "H", "[", "i", "]", "=", "(", "h", ",", "(", "k", "+", "1", ")", ")", "f", "=", "dup_quo", "(", "f", ",", "g", ",", "K", ")", "if", "(", "not", "dup_degree", "(", "f", ")", ")", ":", "return", "H", "else", ":", "return", "(", "[", "(", "f", ",", "1", ")", "]", "+", "H", ")" ]
compute greatest factorial factorization of f in k[x] .
train
false
36,048
def reserve_vlanid(): LOG.debug(_('reserve_vlanid() called')) session = db.get_session() try: rvlan = session.query(network_models_v2.VlanID).filter_by(vlan_used=False).first() if (not rvlan): raise exc.NoResultFound rvlanid = session.query(network_models_v2.VlanID).filter_by(vlan_id=rvlan['vlan_id']).one() rvlanid['vlan_used'] = True session.merge(rvlanid) session.flush() return rvlan['vlan_id'] except exc.NoResultFound: raise c_exc.VlanIDNotAvailable()
[ "def", "reserve_vlanid", "(", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'reserve_vlanid() called'", ")", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "rvlan", "=", "session", ".", "query", "(", "network_models_v2", ".", "VlanID", ")", ".", "filter_by", "(", "vlan_used", "=", "False", ")", ".", "first", "(", ")", "if", "(", "not", "rvlan", ")", ":", "raise", "exc", ".", "NoResultFound", "rvlanid", "=", "session", ".", "query", "(", "network_models_v2", ".", "VlanID", ")", ".", "filter_by", "(", "vlan_id", "=", "rvlan", "[", "'vlan_id'", "]", ")", ".", "one", "(", ")", "rvlanid", "[", "'vlan_used'", "]", "=", "True", "session", ".", "merge", "(", "rvlanid", ")", "session", ".", "flush", "(", ")", "return", "rvlan", "[", "'vlan_id'", "]", "except", "exc", ".", "NoResultFound", ":", "raise", "c_exc", ".", "VlanIDNotAvailable", "(", ")" ]
reserves the first unused vlanid .
train
false
36,049
def get_messages(request): return getattr(request, '_messages', [])
[ "def", "get_messages", "(", "request", ")", ":", "return", "getattr", "(", "request", ",", "'_messages'", ",", "[", "]", ")" ]
fetches all messages of the given thread indexed by [exploration_id] .
train
false
36,052
def _api_test_notif(name, output, kwargs): logging.info('Sending test notification') res = sabnzbd.notifier.send_notification_center('SABnzbd', T('Test Notification'), 'other') return report(output, error=res)
[ "def", "_api_test_notif", "(", "name", ",", "output", ",", "kwargs", ")", ":", "logging", ".", "info", "(", "'Sending test notification'", ")", "res", "=", "sabnzbd", ".", "notifier", ".", "send_notification_center", "(", "'SABnzbd'", ",", "T", "(", "'Test Notification'", ")", ",", "'other'", ")", "return", "report", "(", "output", ",", "error", "=", "res", ")" ]
api: send a test to notification center .
train
false
36,054
def gitlab(registry, xml_parent, data): def _add_xml(elem, name, value): XML.SubElement(elem, name).text = value gitlab = XML.SubElement(xml_parent, 'com.dabsquared.gitlabjenkins.GitLabPushTrigger') plugin_info = registry.get_plugin_info('GitLab Plugin') plugin_ver = pkg_resources.parse_version(plugin_info.get('version', '0')) valid_merge_request = ['never', 'source', 'both'] if (plugin_ver >= pkg_resources.parse_version('1.1.26')): mapping = [('trigger-open-merge-request-push', 'triggerOpenMergeRequestOnPush', 'never', valid_merge_request)] convert_mapping_to_xml(gitlab, data, mapping, fail_required=True) else: mapping = [('trigger-open-merge-request-push', 'triggerOpenMergeRequestOnPush', True)] convert_mapping_to_xml(gitlab, data, mapping, fail_required=True) if (plugin_ver == pkg_resources.parse_version('1.1.29')): if (data.get('branch-filter-type', '') == 'All'): data['branch-filter-type'] = '' valid_filters = ['', 'NameBasedFilter', 'RegexBasedFilter'] mapping = [('branch-filter-type', 'branchFilterName', '', valid_filters)] convert_mapping_to_xml(gitlab, data, mapping, fail_required=True) else: valid_filters = ['All', 'NameBasedFilter', 'RegexBasedFilter'] mapping = [('branch-filter-type', 'branchFilterType', 'All', valid_filters)] convert_mapping_to_xml(gitlab, data, mapping, fail_required=True) XML.SubElement(gitlab, 'spec').text = '' mapping = [('trigger-push', 'triggerOnPush', True), ('trigger-merge-request', 'triggerOnMergeRequest', True), ('trigger-note', 'triggerOnNoteRequest', True), ('note-regex', 'noteRegex', 'Jenkins please retry a build'), ('ci-skip', 'ciSkip', True), ('wip-skip', 'skipWorkInProgressMergeRequest', True), ('set-build-description', 'setBuildDescription', True), ('add-note-merge-request', 'addNoteOnMergeRequest', True), ('add-vote-merge-request', 'addVoteOnMergeRequest', True), ('accept-merge-request-on-success', 'acceptMergeRequestOnSuccess', False), ('add-ci-message', 'addCiMessage', False), ('allow-all-branches', 'allowAllBranches', False), ('target-branch-regex', 'targetBranchRegex', '')] list_mapping = (('include-branches', 'includeBranchesSpec', []), ('exclude-branches', 'excludeBranchesSpec', [])) convert_mapping_to_xml(gitlab, data, mapping, fail_required=True) for (yaml_name, xml_name, default_val) in list_mapping: value = ', '.join(data.get(yaml_name, default_val)) _add_xml(gitlab, xml_name, value)
[ "def", "gitlab", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "def", "_add_xml", "(", "elem", ",", "name", ",", "value", ")", ":", "XML", ".", "SubElement", "(", "elem", ",", "name", ")", ".", "text", "=", "value", "gitlab", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.dabsquared.gitlabjenkins.GitLabPushTrigger'", ")", "plugin_info", "=", "registry", ".", "get_plugin_info", "(", "'GitLab Plugin'", ")", "plugin_ver", "=", "pkg_resources", ".", "parse_version", "(", "plugin_info", ".", "get", "(", "'version'", ",", "'0'", ")", ")", "valid_merge_request", "=", "[", "'never'", ",", "'source'", ",", "'both'", "]", "if", "(", "plugin_ver", ">=", "pkg_resources", ".", "parse_version", "(", "'1.1.26'", ")", ")", ":", "mapping", "=", "[", "(", "'trigger-open-merge-request-push'", ",", "'triggerOpenMergeRequestOnPush'", ",", "'never'", ",", "valid_merge_request", ")", "]", "convert_mapping_to_xml", "(", "gitlab", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")", "else", ":", "mapping", "=", "[", "(", "'trigger-open-merge-request-push'", ",", "'triggerOpenMergeRequestOnPush'", ",", "True", ")", "]", "convert_mapping_to_xml", "(", "gitlab", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")", "if", "(", "plugin_ver", "==", "pkg_resources", ".", "parse_version", "(", "'1.1.29'", ")", ")", ":", "if", "(", "data", ".", "get", "(", "'branch-filter-type'", ",", "''", ")", "==", "'All'", ")", ":", "data", "[", "'branch-filter-type'", "]", "=", "''", "valid_filters", "=", "[", "''", ",", "'NameBasedFilter'", ",", "'RegexBasedFilter'", "]", "mapping", "=", "[", "(", "'branch-filter-type'", ",", "'branchFilterName'", ",", "''", ",", "valid_filters", ")", "]", "convert_mapping_to_xml", "(", "gitlab", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")", "else", ":", "valid_filters", "=", "[", "'All'", ",", "'NameBasedFilter'", ",", "'RegexBasedFilter'", "]", "mapping", "=", "[", "(", "'branch-filter-type'", ",", "'branchFilterType'", ",", "'All'", ",", "valid_filters", ")", "]", "convert_mapping_to_xml", "(", "gitlab", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")", "XML", ".", "SubElement", "(", "gitlab", ",", "'spec'", ")", ".", "text", "=", "''", "mapping", "=", "[", "(", "'trigger-push'", ",", "'triggerOnPush'", ",", "True", ")", ",", "(", "'trigger-merge-request'", ",", "'triggerOnMergeRequest'", ",", "True", ")", ",", "(", "'trigger-note'", ",", "'triggerOnNoteRequest'", ",", "True", ")", ",", "(", "'note-regex'", ",", "'noteRegex'", ",", "'Jenkins please retry a build'", ")", ",", "(", "'ci-skip'", ",", "'ciSkip'", ",", "True", ")", ",", "(", "'wip-skip'", ",", "'skipWorkInProgressMergeRequest'", ",", "True", ")", ",", "(", "'set-build-description'", ",", "'setBuildDescription'", ",", "True", ")", ",", "(", "'add-note-merge-request'", ",", "'addNoteOnMergeRequest'", ",", "True", ")", ",", "(", "'add-vote-merge-request'", ",", "'addVoteOnMergeRequest'", ",", "True", ")", ",", "(", "'accept-merge-request-on-success'", ",", "'acceptMergeRequestOnSuccess'", ",", "False", ")", ",", "(", "'add-ci-message'", ",", "'addCiMessage'", ",", "False", ")", ",", "(", "'allow-all-branches'", ",", "'allowAllBranches'", ",", "False", ")", ",", "(", "'target-branch-regex'", ",", "'targetBranchRegex'", ",", "''", ")", "]", "list_mapping", "=", "(", "(", "'include-branches'", ",", "'includeBranchesSpec'", ",", "[", "]", ")", ",", "(", "'exclude-branches'", ",", "'excludeBranchesSpec'", ",", "[", "]", ")", ")", "convert_mapping_to_xml", "(", "gitlab", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")", "for", "(", "yaml_name", ",", "xml_name", ",", "default_val", ")", "in", "list_mapping", ":", "value", "=", "', '", ".", "join", "(", "data", ".", "get", "(", "yaml_name", ",", "default_val", ")", ")", "_add_xml", "(", "gitlab", ",", "xml_name", ",", "value", ")" ]
yaml: gitlab sets the gitlab connection for the project .
train
false
36,055
def get_pageview_pixel_url(): return ((g.tracker_url + '?v=') + _get_encrypted_user_slug())
[ "def", "get_pageview_pixel_url", "(", ")", ":", "return", "(", "(", "g", ".", "tracker_url", "+", "'?v='", ")", "+", "_get_encrypted_user_slug", "(", ")", ")" ]
return a url to use for tracking pageviews for the current request .
train
false
36,056
def matchclose(c_lineno, c_symbol, openers, pairmap): try: (o_lineno, o_symbol) = openers.pop() except IndexError: print ("\nDelimiter mismatch. On line %d, encountered closing '%s' without corresponding open" % (c_lineno, c_symbol)) return if (o_symbol in pairmap.get(c_symbol, [c_symbol])): return print ("\nOpener '%s' on line %d was not closed before encountering '%s' on line %d" % (o_symbol, o_lineno, c_symbol, c_lineno)) return
[ "def", "matchclose", "(", "c_lineno", ",", "c_symbol", ",", "openers", ",", "pairmap", ")", ":", "try", ":", "(", "o_lineno", ",", "o_symbol", ")", "=", "openers", ".", "pop", "(", ")", "except", "IndexError", ":", "print", "(", "\"\\nDelimiter mismatch. On line %d, encountered closing '%s' without corresponding open\"", "%", "(", "c_lineno", ",", "c_symbol", ")", ")", "return", "if", "(", "o_symbol", "in", "pairmap", ".", "get", "(", "c_symbol", ",", "[", "c_symbol", "]", ")", ")", ":", "return", "print", "(", "\"\\nOpener '%s' on line %d was not closed before encountering '%s' on line %d\"", "%", "(", "o_symbol", ",", "o_lineno", ",", "c_symbol", ",", "c_lineno", ")", ")", "return" ]
verify that closing delimiter matches most recent opening delimiter .
train
false
36,057
def guess_filename(obj): name = getattr(obj, 'name', None) if (name and isinstance(name, builtin_str) and (name[0] != '<') and (name[(-1)] != '>')): return os.path.basename(name)
[ "def", "guess_filename", "(", "obj", ")", ":", "name", "=", "getattr", "(", "obj", ",", "'name'", ",", "None", ")", "if", "(", "name", "and", "isinstance", "(", "name", ",", "builtin_str", ")", "and", "(", "name", "[", "0", "]", "!=", "'<'", ")", "and", "(", "name", "[", "(", "-", "1", ")", "]", "!=", "'>'", ")", ")", ":", "return", "os", ".", "path", ".", "basename", "(", "name", ")" ]
tries to guess the filename of the given object .
train
true
36,059
def _shell_escape(string): for char in ('"', '$', '`'): string = string.replace(char, ('\\%s' % char)) return string
[ "def", "_shell_escape", "(", "string", ")", ":", "for", "char", "in", "(", "'\"'", ",", "'$'", ",", "'`'", ")", ":", "string", "=", "string", ".", "replace", "(", "char", ",", "(", "'\\\\%s'", "%", "char", ")", ")", "return", "string" ]
escape double quotes .
train
false
36,060
def root_factors(f, *gens, **args): args = dict(args) filter = args.pop('filter', None) F = Poly(f, *gens, **args) if (not F.is_Poly): return [f] if F.is_multivariate: raise ValueError('multivariate polynomials are not supported') x = F.gens[0] zeros = roots(F, filter=filter) if (not zeros): factors = [F] else: (factors, N) = ([], 0) for (r, n) in ordered(zeros.items()): (factors, N) = ((factors + ([Poly((x - r), x)] * n)), (N + n)) if (N < F.degree()): G = reduce((lambda p, q: (p * q)), factors) factors.append(F.quo(G)) if (not isinstance(f, Poly)): factors = [f.as_expr() for f in factors] return factors
[ "def", "root_factors", "(", "f", ",", "*", "gens", ",", "**", "args", ")", ":", "args", "=", "dict", "(", "args", ")", "filter", "=", "args", ".", "pop", "(", "'filter'", ",", "None", ")", "F", "=", "Poly", "(", "f", ",", "*", "gens", ",", "**", "args", ")", "if", "(", "not", "F", ".", "is_Poly", ")", ":", "return", "[", "f", "]", "if", "F", ".", "is_multivariate", ":", "raise", "ValueError", "(", "'multivariate polynomials are not supported'", ")", "x", "=", "F", ".", "gens", "[", "0", "]", "zeros", "=", "roots", "(", "F", ",", "filter", "=", "filter", ")", "if", "(", "not", "zeros", ")", ":", "factors", "=", "[", "F", "]", "else", ":", "(", "factors", ",", "N", ")", "=", "(", "[", "]", ",", "0", ")", "for", "(", "r", ",", "n", ")", "in", "ordered", "(", "zeros", ".", "items", "(", ")", ")", ":", "(", "factors", ",", "N", ")", "=", "(", "(", "factors", "+", "(", "[", "Poly", "(", "(", "x", "-", "r", ")", ",", "x", ")", "]", "*", "n", ")", ")", ",", "(", "N", "+", "n", ")", ")", "if", "(", "N", "<", "F", ".", "degree", "(", ")", ")", ":", "G", "=", "reduce", "(", "(", "lambda", "p", ",", "q", ":", "(", "p", "*", "q", ")", ")", ",", "factors", ")", "factors", ".", "append", "(", "F", ".", "quo", "(", "G", ")", ")", "if", "(", "not", "isinstance", "(", "f", ",", "Poly", ")", ")", ":", "factors", "=", "[", "f", ".", "as_expr", "(", ")", "for", "f", "in", "factors", "]", "return", "factors" ]
returns all factors of a univariate polynomial .
train
false
36,061
def has_studio_read_access(user, course_key): return bool((STUDIO_VIEW_CONTENT & get_user_permissions(user, course_key)))
[ "def", "has_studio_read_access", "(", "user", ",", "course_key", ")", ":", "return", "bool", "(", "(", "STUDIO_VIEW_CONTENT", "&", "get_user_permissions", "(", "user", ",", "course_key", ")", ")", ")" ]
return true iff user is allowed to view this course/library in studio .
train
false
36,062
def recvmsg(socket, maxSize=8192, cmsgSize=4096, flags=0): if _PY3: (data, ancillary, flags) = socket.recvmsg(maxSize, CMSG_SPACE(cmsgSize), flags)[0:3] else: (data, flags, ancillary) = recv1msg(socket.fileno(), flags, maxSize, cmsgSize) return RecievedMessage(data=data, ancillary=ancillary, flags=flags)
[ "def", "recvmsg", "(", "socket", ",", "maxSize", "=", "8192", ",", "cmsgSize", "=", "4096", ",", "flags", "=", "0", ")", ":", "if", "_PY3", ":", "(", "data", ",", "ancillary", ",", "flags", ")", "=", "socket", ".", "recvmsg", "(", "maxSize", ",", "CMSG_SPACE", "(", "cmsgSize", ")", ",", "flags", ")", "[", "0", ":", "3", "]", "else", ":", "(", "data", ",", "flags", ",", "ancillary", ")", "=", "recv1msg", "(", "socket", ".", "fileno", "(", ")", ",", "flags", ",", "maxSize", ",", "cmsgSize", ")", "return", "RecievedMessage", "(", "data", "=", "data", ",", "ancillary", "=", "ancillary", ",", "flags", "=", "flags", ")" ]
receive a message on a socket .
train
false
36,063
def dst(x, type=2, n=None, axis=(-1), norm=None, overwrite_x=False): if ((type == 1) and (norm is not None)): raise NotImplementedError('Orthonormalization not yet supported for IDCT-I') return _dst(x, type, n, axis, normalize=norm, overwrite_x=overwrite_x)
[ "def", "dst", "(", "x", ",", "type", "=", "2", ",", "n", "=", "None", ",", "axis", "=", "(", "-", "1", ")", ",", "norm", "=", "None", ",", "overwrite_x", "=", "False", ")", ":", "if", "(", "(", "type", "==", "1", ")", "and", "(", "norm", "is", "not", "None", ")", ")", ":", "raise", "NotImplementedError", "(", "'Orthonormalization not yet supported for IDCT-I'", ")", "return", "_dst", "(", "x", ",", "type", ",", "n", ",", "axis", ",", "normalize", "=", "norm", ",", "overwrite_x", "=", "overwrite_x", ")" ]
return the discrete sine transform of arbitrary type sequence x .
train
false
36,064
def _check_cygwin_installed(cyg_arch='x86_64'): path_to_cygcheck = os.sep.join(['C:', _get_cyg_dir(cyg_arch), 'bin', 'cygcheck.exe']) LOG.debug('Path to cygcheck.exe: {0}'.format(path_to_cygcheck)) if (not os.path.exists(path_to_cygcheck)): LOG.debug('Could not find cygcheck.exe') return False return True
[ "def", "_check_cygwin_installed", "(", "cyg_arch", "=", "'x86_64'", ")", ":", "path_to_cygcheck", "=", "os", ".", "sep", ".", "join", "(", "[", "'C:'", ",", "_get_cyg_dir", "(", "cyg_arch", ")", ",", "'bin'", ",", "'cygcheck.exe'", "]", ")", "LOG", ".", "debug", "(", "'Path to cygcheck.exe: {0}'", ".", "format", "(", "path_to_cygcheck", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path_to_cygcheck", ")", ")", ":", "LOG", ".", "debug", "(", "'Could not find cygcheck.exe'", ")", "return", "False", "return", "True" ]
return true or false if cygwin is installed .
train
true
36,066
def is_minimal(G, ring): order = ring.order domain = ring.domain G.sort(key=(lambda g: order(g.LM))) for (i, g) in enumerate(G): if (g.LC != domain.one): return False for h in (G[:i] + G[(i + 1):]): if monomial_divides(h.LM, g.LM): return False return True
[ "def", "is_minimal", "(", "G", ",", "ring", ")", ":", "order", "=", "ring", ".", "order", "domain", "=", "ring", ".", "domain", "G", ".", "sort", "(", "key", "=", "(", "lambda", "g", ":", "order", "(", "g", ".", "LM", ")", ")", ")", "for", "(", "i", ",", "g", ")", "in", "enumerate", "(", "G", ")", ":", "if", "(", "g", ".", "LC", "!=", "domain", ".", "one", ")", ":", "return", "False", "for", "h", "in", "(", "G", "[", ":", "i", "]", "+", "G", "[", "(", "i", "+", "1", ")", ":", "]", ")", ":", "if", "monomial_divides", "(", "h", ".", "LM", ",", "g", ".", "LM", ")", ":", "return", "False", "return", "True" ]
checks if g is a minimal groebner basis .
train
false
36,067
def data_parallel(module, input, device_ids, output_device=None): if (not device_ids): return module(input) if (output_device is None): output_device = device_ids[0] replicas = replicate(module, device_ids) inputs = scatter(input, device_ids) replicas = replicas[:len(inputs)] outputs = parallel_apply(replicas, inputs) return gather(outputs, output_device)
[ "def", "data_parallel", "(", "module", ",", "input", ",", "device_ids", ",", "output_device", "=", "None", ")", ":", "if", "(", "not", "device_ids", ")", ":", "return", "module", "(", "input", ")", "if", "(", "output_device", "is", "None", ")", ":", "output_device", "=", "device_ids", "[", "0", "]", "replicas", "=", "replicate", "(", "module", ",", "device_ids", ")", "inputs", "=", "scatter", "(", "input", ",", "device_ids", ")", "replicas", "=", "replicas", "[", ":", "len", "(", "inputs", ")", "]", "outputs", "=", "parallel_apply", "(", "replicas", ",", "inputs", ")", "return", "gather", "(", "outputs", ",", "output_device", ")" ]
evaluates module in parallel across the gpus given in device_ids .
train
false