id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
10,029
def _read_epoch(fid): out = {'pts_in_epoch': read_int32(fid), 'epoch_duration': read_float(fid), 'expected_iti': read_float(fid), 'actual_iti': read_float(fid), 'total_var_events': read_int32(fid), 'checksum': read_int32(fid), 'epoch_timestamp': read_int32(fid)} fid.seek(28, 1) return out
[ "def", "_read_epoch", "(", "fid", ")", ":", "out", "=", "{", "'pts_in_epoch'", ":", "read_int32", "(", "fid", ")", ",", "'epoch_duration'", ":", "read_float", "(", "fid", ")", ",", "'expected_iti'", ":", "read_float", "(", "fid", ")", ",", "'actual_iti'", ":", "read_float", "(", "fid", ")", ",", "'total_var_events'", ":", "read_int32", "(", "fid", ")", ",", "'checksum'", ":", "read_int32", "(", "fid", ")", ",", "'epoch_timestamp'", ":", "read_int32", "(", "fid", ")", "}", "fid", ".", "seek", "(", "28", ",", "1", ")", "return", "out" ]
read bti pdf epoch .
train
false
10,030
def multi_theme_percentage_represent(id): if (not id): return current.messages['NONE'] s3db = current.s3db table = s3db.project_theme_percentage ttable = s3db.project_theme def represent_row(row): return ('%s (%s%s)' % (row.project_theme.name, row.project_theme_percentage.percentage, '%')) if isinstance(id, (list, tuple)): query = (table.id.belongs(id) & (ttable.id == table.theme_id)) rows = current.db(query).select(table.percentage, ttable.name) repr = ', '.join((represent_row(row) for row in rows)) return repr else: query = ((table.id == id) & (ttable.id == table.theme_id)) row = current.db(query).select(table.percentage, ttable.name).first() try: return represent_row(row) except: return current.messages.UNKNOWN_OPT
[ "def", "multi_theme_percentage_represent", "(", "id", ")", ":", "if", "(", "not", "id", ")", ":", "return", "current", ".", "messages", "[", "'NONE'", "]", "s3db", "=", "current", ".", "s3db", "table", "=", "s3db", ".", "project_theme_percentage", "ttable", "=", "s3db", ".", "project_theme", "def", "represent_row", "(", "row", ")", ":", "return", "(", "'%s (%s%s)'", "%", "(", "row", ".", "project_theme", ".", "name", ",", "row", ".", "project_theme_percentage", ".", "percentage", ",", "'%'", ")", ")", "if", "isinstance", "(", "id", ",", "(", "list", ",", "tuple", ")", ")", ":", "query", "=", "(", "table", ".", "id", ".", "belongs", "(", "id", ")", "&", "(", "ttable", ".", "id", "==", "table", ".", "theme_id", ")", ")", "rows", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "table", ".", "percentage", ",", "ttable", ".", "name", ")", "repr", "=", "', '", ".", "join", "(", "(", "represent_row", "(", "row", ")", "for", "row", "in", "rows", ")", ")", "return", "repr", "else", ":", "query", "=", "(", "(", "table", ".", "id", "==", "id", ")", "&", "(", "ttable", ".", "id", "==", "table", ".", "theme_id", ")", ")", "row", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "table", ".", "percentage", ",", "ttable", ".", "name", ")", ".", "first", "(", ")", "try", ":", "return", "represent_row", "(", "row", ")", "except", ":", "return", "current", ".", "messages", ".", "UNKNOWN_OPT" ]
representation for theme percentages for multiple=true options .
train
false
10,031
def _filter_bultins(module): name = module.__name__ return ((not name.startswith('django.contrib')) and (name != 'lettuce.django'))
[ "def", "_filter_bultins", "(", "module", ")", ":", "name", "=", "module", ".", "__name__", "return", "(", "(", "not", "name", ".", "startswith", "(", "'django.contrib'", ")", ")", "and", "(", "name", "!=", "'lettuce.django'", ")", ")" ]
returns only those apps that are not builtin django .
train
false
10,032
def _raise_error_network(option, expected): msg = _error_msg_network(option, expected) log.error(msg) raise AttributeError(msg)
[ "def", "_raise_error_network", "(", "option", ",", "expected", ")", ":", "msg", "=", "_error_msg_network", "(", "option", ",", "expected", ")", "log", ".", "error", "(", "msg", ")", "raise", "AttributeError", "(", "msg", ")" ]
log and raise an error with a logical formatted message .
train
true
10,034
def unproject(winx, winy, winz, modelMatrix, projMatrix, viewport): npModelMatrix = numpy.matrix(numpy.array(modelMatrix, numpy.float64).reshape((4, 4))) npProjMatrix = numpy.matrix(numpy.array(projMatrix, numpy.float64).reshape((4, 4))) finalMatrix = (npModelMatrix * npProjMatrix) finalMatrix = numpy.linalg.inv(finalMatrix) viewport = map(float, viewport) vector = numpy.array([((((winx - viewport[0]) / viewport[2]) * 2.0) - 1.0), ((((winy - viewport[1]) / viewport[3]) * 2.0) - 1.0), ((winz * 2.0) - 1.0), 1]).reshape((1, 4)) vector = (numpy.matrix(vector) * finalMatrix).getA().flatten() ret = (list(vector)[0:3] / vector[3]) return ret
[ "def", "unproject", "(", "winx", ",", "winy", ",", "winz", ",", "modelMatrix", ",", "projMatrix", ",", "viewport", ")", ":", "npModelMatrix", "=", "numpy", ".", "matrix", "(", "numpy", ".", "array", "(", "modelMatrix", ",", "numpy", ".", "float64", ")", ".", "reshape", "(", "(", "4", ",", "4", ")", ")", ")", "npProjMatrix", "=", "numpy", ".", "matrix", "(", "numpy", ".", "array", "(", "projMatrix", ",", "numpy", ".", "float64", ")", ".", "reshape", "(", "(", "4", ",", "4", ")", ")", ")", "finalMatrix", "=", "(", "npModelMatrix", "*", "npProjMatrix", ")", "finalMatrix", "=", "numpy", ".", "linalg", ".", "inv", "(", "finalMatrix", ")", "viewport", "=", "map", "(", "float", ",", "viewport", ")", "vector", "=", "numpy", ".", "array", "(", "[", "(", "(", "(", "(", "winx", "-", "viewport", "[", "0", "]", ")", "/", "viewport", "[", "2", "]", ")", "*", "2.0", ")", "-", "1.0", ")", ",", "(", "(", "(", "(", "winy", "-", "viewport", "[", "1", "]", ")", "/", "viewport", "[", "3", "]", ")", "*", "2.0", ")", "-", "1.0", ")", ",", "(", "(", "winz", "*", "2.0", ")", "-", "1.0", ")", ",", "1", "]", ")", ".", "reshape", "(", "(", "1", ",", "4", ")", ")", "vector", "=", "(", "numpy", ".", "matrix", "(", "vector", ")", "*", "finalMatrix", ")", ".", "getA", "(", ")", ".", "flatten", "(", ")", "ret", "=", "(", "list", "(", "vector", ")", "[", "0", ":", "3", "]", "/", "vector", "[", "3", "]", ")", "return", "ret" ]
projects window position to 3d space .
train
false
10,035
def saveNameCacheToDb(): cache_db_con = db.DBConnection('cache.db') for (name, indexer_id) in nameCache.iteritems(): cache_db_con.action('INSERT OR REPLACE INTO scene_names (indexer_id, name) VALUES (?, ?)', [indexer_id, name])
[ "def", "saveNameCacheToDb", "(", ")", ":", "cache_db_con", "=", "db", ".", "DBConnection", "(", "'cache.db'", ")", "for", "(", "name", ",", "indexer_id", ")", "in", "nameCache", ".", "iteritems", "(", ")", ":", "cache_db_con", ".", "action", "(", "'INSERT OR REPLACE INTO scene_names (indexer_id, name) VALUES (?, ?)'", ",", "[", "indexer_id", ",", "name", "]", ")" ]
commit cache to database file .
train
false
10,036
def render_link_tag(url, rel=u'stylesheet', media=None): attrs = {u'href': url, u'rel': rel} if media: attrs[u'media'] = media return render_tag(u'link', attrs=attrs, close=False)
[ "def", "render_link_tag", "(", "url", ",", "rel", "=", "u'stylesheet'", ",", "media", "=", "None", ")", ":", "attrs", "=", "{", "u'href'", ":", "url", ",", "u'rel'", ":", "rel", "}", "if", "media", ":", "attrs", "[", "u'media'", "]", "=", "media", "return", "render_tag", "(", "u'link'", ",", "attrs", "=", "attrs", ",", "close", "=", "False", ")" ]
build a link tag .
train
false
10,037
def group(seq, size): if (not hasattr(seq, 'next')): seq = iter(seq) while True: (yield [seq.next() for i in xrange(size)])
[ "def", "group", "(", "seq", ",", "size", ")", ":", "if", "(", "not", "hasattr", "(", "seq", ",", "'next'", ")", ")", ":", "seq", "=", "iter", "(", "seq", ")", "while", "True", ":", "(", "yield", "[", "seq", ".", "next", "(", ")", "for", "i", "in", "xrange", "(", "size", ")", "]", ")" ]
module-specific controller for teams @note: currently for development/testing/demo purposes only .
train
false
10,038
def dmp_to_dict(f, u, K=None, zero=False): if (not u): return dup_to_dict(f, K, zero=zero) if (dmp_zero_p(f, u) and zero): return {((0,) * (u + 1)): K.zero} (n, v, result) = (dmp_degree(f, u), (u - 1), {}) if (n == (- oo)): n = (-1) for k in range(0, (n + 1)): h = dmp_to_dict(f[(n - k)], v) for (exp, coeff) in h.items(): result[((k,) + exp)] = coeff return result
[ "def", "dmp_to_dict", "(", "f", ",", "u", ",", "K", "=", "None", ",", "zero", "=", "False", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_to_dict", "(", "f", ",", "K", ",", "zero", "=", "zero", ")", "if", "(", "dmp_zero_p", "(", "f", ",", "u", ")", "and", "zero", ")", ":", "return", "{", "(", "(", "0", ",", ")", "*", "(", "u", "+", "1", ")", ")", ":", "K", ".", "zero", "}", "(", "n", ",", "v", ",", "result", ")", "=", "(", "dmp_degree", "(", "f", ",", "u", ")", ",", "(", "u", "-", "1", ")", ",", "{", "}", ")", "if", "(", "n", "==", "(", "-", "oo", ")", ")", ":", "n", "=", "(", "-", "1", ")", "for", "k", "in", "range", "(", "0", ",", "(", "n", "+", "1", ")", ")", ":", "h", "=", "dmp_to_dict", "(", "f", "[", "(", "n", "-", "k", ")", "]", ",", "v", ")", "for", "(", "exp", ",", "coeff", ")", "in", "h", ".", "items", "(", ")", ":", "result", "[", "(", "(", "k", ",", ")", "+", "exp", ")", "]", "=", "coeff", "return", "result" ]
convert a k[x] polynomial to a dict .
train
false
10,040
def run_traffic_step(emr_connection, step, jobflow_name, wait=True, sleeptime=60, retries=1, **jobflow_kw): jobflowid = _add_step(emr_connection, step, jobflow_name, **jobflow_kw) if (not wait): return attempts = 1 exit_state = _wait_for_step(emr_connection, step, jobflowid, sleeptime) while ((attempts <= retries) and (exit_state != COMPLETED)): jobflowid = _add_step(emr_connection, step, jobflow_name, **jobflow_kw) exit_state = _wait_for_step(emr_connection, step, jobflowid, sleeptime) attempts += 1 if (exit_state != COMPLETED): msg = ('%s failed (exit: %s)' % (step.name, exit_state)) if retries: msg += ('retried %s times' % retries) raise EmrException(msg)
[ "def", "run_traffic_step", "(", "emr_connection", ",", "step", ",", "jobflow_name", ",", "wait", "=", "True", ",", "sleeptime", "=", "60", ",", "retries", "=", "1", ",", "**", "jobflow_kw", ")", ":", "jobflowid", "=", "_add_step", "(", "emr_connection", ",", "step", ",", "jobflow_name", ",", "**", "jobflow_kw", ")", "if", "(", "not", "wait", ")", ":", "return", "attempts", "=", "1", "exit_state", "=", "_wait_for_step", "(", "emr_connection", ",", "step", ",", "jobflowid", ",", "sleeptime", ")", "while", "(", "(", "attempts", "<=", "retries", ")", "and", "(", "exit_state", "!=", "COMPLETED", ")", ")", ":", "jobflowid", "=", "_add_step", "(", "emr_connection", ",", "step", ",", "jobflow_name", ",", "**", "jobflow_kw", ")", "exit_state", "=", "_wait_for_step", "(", "emr_connection", ",", "step", ",", "jobflowid", ",", "sleeptime", ")", "attempts", "+=", "1", "if", "(", "exit_state", "!=", "COMPLETED", ")", ":", "msg", "=", "(", "'%s failed (exit: %s)'", "%", "(", "step", ".", "name", ",", "exit_state", ")", ")", "if", "retries", ":", "msg", "+=", "(", "'retried %s times'", "%", "retries", ")", "raise", "EmrException", "(", "msg", ")" ]
run a traffic processing step .
train
false
10,041
def get_machine_ips(): addresses = [] for interface in netifaces.interfaces(): try: iface_data = netifaces.ifaddresses(interface) for family in iface_data: if (family not in (netifaces.AF_INET, netifaces.AF_INET6)): continue for address in iface_data[family]: addr = address['addr'] if (family == netifaces.AF_INET6): addr = addr.split('%')[0] addresses.append(addr) except ValueError: pass return addresses
[ "def", "get_machine_ips", "(", ")", ":", "addresses", "=", "[", "]", "for", "interface", "in", "netifaces", ".", "interfaces", "(", ")", ":", "try", ":", "iface_data", "=", "netifaces", ".", "ifaddresses", "(", "interface", ")", "for", "family", "in", "iface_data", ":", "if", "(", "family", "not", "in", "(", "netifaces", ".", "AF_INET", ",", "netifaces", ".", "AF_INET6", ")", ")", ":", "continue", "for", "address", "in", "iface_data", "[", "family", "]", ":", "addr", "=", "address", "[", "'addr'", "]", "if", "(", "family", "==", "netifaces", ".", "AF_INET6", ")", ":", "addr", "=", "addr", ".", "split", "(", "'%'", ")", "[", "0", "]", "addresses", ".", "append", "(", "addr", ")", "except", "ValueError", ":", "pass", "return", "addresses" ]
get the machines ip addresses :returns: list of strings of ip addresses .
train
false
10,044
def mirror(image): return image.transpose(Image.FLIP_LEFT_RIGHT)
[ "def", "mirror", "(", "image", ")", ":", "return", "image", ".", "transpose", "(", "Image", ".", "FLIP_LEFT_RIGHT", ")" ]
rotate a comparison operator 180 degrees .
train
false
10,045
def returns_None(function): def call_and_assert(*args, **kwargs): original_args = copy.deepcopy(args) original_kwargs = copy.deepcopy(kwargs) result = function(*args, **kwargs) assert (result is None), 'Should return None when called with args: {args} and kwargs: {kwargs}'.format(args=original_args, kwargs=original_kwargs) return result return call_and_assert
[ "def", "returns_None", "(", "function", ")", ":", "def", "call_and_assert", "(", "*", "args", ",", "**", "kwargs", ")", ":", "original_args", "=", "copy", ".", "deepcopy", "(", "args", ")", "original_kwargs", "=", "copy", ".", "deepcopy", "(", "kwargs", ")", "result", "=", "function", "(", "*", "args", ",", "**", "kwargs", ")", "assert", "(", "result", "is", "None", ")", ",", "'Should return None when called with args: {args} and kwargs: {kwargs}'", ".", "format", "(", "args", "=", "original_args", ",", "kwargs", "=", "original_kwargs", ")", "return", "result", "return", "call_and_assert" ]
a decorator that asserts that the decorated function returns none .
train
false
10,047
def install_hook(): for hook in sys.meta_path: if isinstance(hook, XonshImportHook): break else: sys.meta_path.append(XonshImportHook())
[ "def", "install_hook", "(", ")", ":", "for", "hook", "in", "sys", ".", "meta_path", ":", "if", "isinstance", "(", "hook", ",", "XonshImportHook", ")", ":", "break", "else", ":", "sys", ".", "meta_path", ".", "append", "(", "XonshImportHook", "(", ")", ")" ]
install xonsh import hook in sys .
train
false
10,048
def test_reflected_event_ops(): AreEqual(str(IronPythonTest.Events.StaticTest.Event), '<event# StaticTest on Events>') t_list = [IronPythonTest.Events.StaticTest.Event, IronPythonTest.Events().InstanceTest.Event] for stuff in t_list: for (inst, val) in [(None, None), (1, None), (None, 1), (1, 1), ('abc', 'xyz')]: AssertError(AttributeError, stuff.__set__, inst, val) AssertError(AttributeError, stuff.__delete__, inst) AssertError(AttributeError, IronPythonTest.Events.StaticTest.Event.__set__, None, IronPythonTest.Events().InstanceTest) AssertError(AttributeError, IronPythonTest.Events.StaticTest.Event.__delete__, IronPythonTest.Events().InstanceTest) for stuff in [None, 1, 'abc']: IronPythonTest.Events.StaticTest.Event.__set__(stuff, IronPythonTest.Events.StaticTest)
[ "def", "test_reflected_event_ops", "(", ")", ":", "AreEqual", "(", "str", "(", "IronPythonTest", ".", "Events", ".", "StaticTest", ".", "Event", ")", ",", "'<event# StaticTest on Events>'", ")", "t_list", "=", "[", "IronPythonTest", ".", "Events", ".", "StaticTest", ".", "Event", ",", "IronPythonTest", ".", "Events", "(", ")", ".", "InstanceTest", ".", "Event", "]", "for", "stuff", "in", "t_list", ":", "for", "(", "inst", ",", "val", ")", "in", "[", "(", "None", ",", "None", ")", ",", "(", "1", ",", "None", ")", ",", "(", "None", ",", "1", ")", ",", "(", "1", ",", "1", ")", ",", "(", "'abc'", ",", "'xyz'", ")", "]", ":", "AssertError", "(", "AttributeError", ",", "stuff", ".", "__set__", ",", "inst", ",", "val", ")", "AssertError", "(", "AttributeError", ",", "stuff", ".", "__delete__", ",", "inst", ")", "AssertError", "(", "AttributeError", ",", "IronPythonTest", ".", "Events", ".", "StaticTest", ".", "Event", ".", "__set__", ",", "None", ",", "IronPythonTest", ".", "Events", "(", ")", ".", "InstanceTest", ")", "AssertError", "(", "AttributeError", ",", "IronPythonTest", ".", "Events", ".", "StaticTest", ".", "Event", ".", "__delete__", ",", "IronPythonTest", ".", "Events", "(", ")", ".", "InstanceTest", ")", "for", "stuff", "in", "[", "None", ",", "1", ",", "'abc'", "]", ":", "IronPythonTest", ".", "Events", ".", "StaticTest", ".", "Event", ".", "__set__", "(", "stuff", ",", "IronPythonTest", ".", "Events", ".", "StaticTest", ")" ]
test to hit ironpython .
train
false
10,049
@register.filter def thumbnailer(obj, relative_name=None): return get_thumbnailer(obj, relative_name=relative_name)
[ "@", "register", ".", "filter", "def", "thumbnailer", "(", "obj", ",", "relative_name", "=", "None", ")", ":", "return", "get_thumbnailer", "(", "obj", ",", "relative_name", "=", "relative_name", ")" ]
creates a thumbnailer from an object .
train
false
10,051
def IIDToInterfaceName(iid): try: return pythoncom.ServerInterfaces[iid] except KeyError: try: try: return win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT, ('Interface\\%s' % iid)) except win32api.error: pass except ImportError: pass return str(iid)
[ "def", "IIDToInterfaceName", "(", "iid", ")", ":", "try", ":", "return", "pythoncom", ".", "ServerInterfaces", "[", "iid", "]", "except", "KeyError", ":", "try", ":", "try", ":", "return", "win32api", ".", "RegQueryValue", "(", "win32con", ".", "HKEY_CLASSES_ROOT", ",", "(", "'Interface\\\\%s'", "%", "iid", ")", ")", "except", "win32api", ".", "error", ":", "pass", "except", "ImportError", ":", "pass", "return", "str", "(", "iid", ")" ]
converts an iid to a string interface name .
train
false
10,053
def unlock(hass, entity_id=None, code=None): data = {} if code: data[ATTR_CODE] = code if entity_id: data[ATTR_ENTITY_ID] = entity_id hass.services.call(DOMAIN, SERVICE_UNLOCK, data)
[ "def", "unlock", "(", "hass", ",", "entity_id", "=", "None", ",", "code", "=", "None", ")", ":", "data", "=", "{", "}", "if", "code", ":", "data", "[", "ATTR_CODE", "]", "=", "code", "if", "entity_id", ":", "data", "[", "ATTR_ENTITY_ID", "]", "=", "entity_id", "hass", ".", "services", ".", "call", "(", "DOMAIN", ",", "SERVICE_UNLOCK", ",", "data", ")" ]
remove lease from semaphore .
train
false
10,054
def cr(method): method._api = 'cr' return method
[ "def", "cr", "(", "method", ")", ":", "method", ".", "_api", "=", "'cr'", "return", "method" ]
decorate a traditional-style method that takes cr as a parameter .
train
false
10,057
def nexus_artifact_uploader(registry, xml_parent, data): nexus_artifact_uploader = XML.SubElement(xml_parent, 'sp.sd.nexusartifactuploader.NexusArtifactUploader') mapping = [('protocol', 'protocol', 'https'), ('nexus_url', 'nexusUrl', ''), ('nexus_user', 'nexusUser', ''), ('nexus_password', 'nexusPassword', ''), ('group_id', 'groupId', ''), ('artifact_id', 'artifactId', ''), ('version', 'version', ''), ('packaging', 'packaging', ''), ('type', 'type', ''), ('classifier', 'classifier', ''), ('repository', 'repository', ''), ('file', 'file', ''), ('credentials_id', 'credentialsId', '')] convert_mapping_to_xml(nexus_artifact_uploader, data, mapping, fail_required=True)
[ "def", "nexus_artifact_uploader", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "nexus_artifact_uploader", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'sp.sd.nexusartifactuploader.NexusArtifactUploader'", ")", "mapping", "=", "[", "(", "'protocol'", ",", "'protocol'", ",", "'https'", ")", ",", "(", "'nexus_url'", ",", "'nexusUrl'", ",", "''", ")", ",", "(", "'nexus_user'", ",", "'nexusUser'", ",", "''", ")", ",", "(", "'nexus_password'", ",", "'nexusPassword'", ",", "''", ")", ",", "(", "'group_id'", ",", "'groupId'", ",", "''", ")", ",", "(", "'artifact_id'", ",", "'artifactId'", ",", "''", ")", ",", "(", "'version'", ",", "'version'", ",", "''", ")", ",", "(", "'packaging'", ",", "'packaging'", ",", "''", ")", ",", "(", "'type'", ",", "'type'", ",", "''", ")", ",", "(", "'classifier'", ",", "'classifier'", ",", "''", ")", ",", "(", "'repository'", ",", "'repository'", ",", "''", ")", ",", "(", "'file'", ",", "'file'", ",", "''", ")", ",", "(", "'credentials_id'", ",", "'credentialsId'", ",", "''", ")", "]", "convert_mapping_to_xml", "(", "nexus_artifact_uploader", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")" ]
yaml: nexus-artifact-uploader to upload result of a build as an artifact in nexus without the need of maven .
train
false
10,058
def show_vpnservice(vpnservice, profile=None, **kwargs): conn = _auth(profile) return conn.show_vpnservice(vpnservice, **kwargs)
[ "def", "show_vpnservice", "(", "vpnservice", ",", "profile", "=", "None", ",", "**", "kwargs", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "show_vpnservice", "(", "vpnservice", ",", "**", "kwargs", ")" ]
fetches information of a specific vpn service cli example: .
train
true
10,059
def sparse(v): for (f, w) in list(v.items()): if (w == 0): del v[f] return v
[ "def", "sparse", "(", "v", ")", ":", "for", "(", "f", ",", "w", ")", "in", "list", "(", "v", ".", "items", "(", ")", ")", ":", "if", "(", "w", "==", "0", ")", ":", "del", "v", "[", "f", "]", "return", "v" ]
returns the vector with features that have weight 0 removed .
train
false
10,060
def binary_repr(number, max_length=1025): shifts = list(map(operator.rshift, (max_length * [number]), range((max_length - 1), (-1), (-1)))) digits = list(map(operator.mod, shifts, (max_length * [2]))) if (not digits.count(1)): return 0 digits = digits[digits.index(1):] return u''.join(map(repr, digits)).replace(u'L', u'')
[ "def", "binary_repr", "(", "number", ",", "max_length", "=", "1025", ")", ":", "shifts", "=", "list", "(", "map", "(", "operator", ".", "rshift", ",", "(", "max_length", "*", "[", "number", "]", ")", ",", "range", "(", "(", "max_length", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ")", ")", "digits", "=", "list", "(", "map", "(", "operator", ".", "mod", ",", "shifts", ",", "(", "max_length", "*", "[", "2", "]", ")", ")", ")", "if", "(", "not", "digits", ".", "count", "(", "1", ")", ")", ":", "return", "0", "digits", "=", "digits", "[", "digits", ".", "index", "(", "1", ")", ":", "]", "return", "u''", ".", "join", "(", "map", "(", "repr", ",", "digits", ")", ")", ".", "replace", "(", "u'L'", ",", "u''", ")" ]
return the binary representation of the input *number* as a string .
train
false
10,061
def role_get(user): user_roles = [] with salt.utils.fopen('/etc/user_attr', 'r') as user_attr: for role in user_attr: role = role.strip().strip().split(':') if (len(role) != 5): continue if (role[0] != user): continue attrs = {} for attr in role[4].strip().split(';'): (attr_key, attr_val) = attr.strip().split('=') if (attr_key in ['auths', 'profiles', 'roles']): attrs[attr_key] = attr_val.strip().split(',') else: attrs[attr_key] = attr_val if ('roles' in attrs): user_roles.extend(attrs['roles']) return list(set(user_roles))
[ "def", "role_get", "(", "user", ")", ":", "user_roles", "=", "[", "]", "with", "salt", ".", "utils", ".", "fopen", "(", "'/etc/user_attr'", ",", "'r'", ")", "as", "user_attr", ":", "for", "role", "in", "user_attr", ":", "role", "=", "role", ".", "strip", "(", ")", ".", "strip", "(", ")", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "role", ")", "!=", "5", ")", ":", "continue", "if", "(", "role", "[", "0", "]", "!=", "user", ")", ":", "continue", "attrs", "=", "{", "}", "for", "attr", "in", "role", "[", "4", "]", ".", "strip", "(", ")", ".", "split", "(", "';'", ")", ":", "(", "attr_key", ",", "attr_val", ")", "=", "attr", ".", "strip", "(", ")", ".", "split", "(", "'='", ")", "if", "(", "attr_key", "in", "[", "'auths'", ",", "'profiles'", ",", "'roles'", "]", ")", ":", "attrs", "[", "attr_key", "]", "=", "attr_val", ".", "strip", "(", ")", ".", "split", "(", "','", ")", "else", ":", "attrs", "[", "attr_key", "]", "=", "attr_val", "if", "(", "'roles'", "in", "attrs", ")", ":", "user_roles", ".", "extend", "(", "attrs", "[", "'roles'", "]", ")", "return", "list", "(", "set", "(", "user_roles", ")", ")" ]
return a dict with information about users of a postgres server .
train
true
10,062
def localpath(*args): plist = ([ROOT] + list(args)) return os.path.abspath(pjoin(*plist))
[ "def", "localpath", "(", "*", "args", ")", ":", "plist", "=", "(", "[", "ROOT", "]", "+", "list", "(", "args", ")", ")", "return", "os", ".", "path", ".", "abspath", "(", "pjoin", "(", "*", "plist", ")", ")" ]
construct an absolute path from a list relative to the root pyzmq directory .
train
true
10,063
def valid_max_age(number): if isinstance(number, basestring): try: number = long(number) except (ValueError, TypeError): return False if ((number >= 0) and ((number % 1) == 0)): return True return False
[ "def", "valid_max_age", "(", "number", ")", ":", "if", "isinstance", "(", "number", ",", "basestring", ")", ":", "try", ":", "number", "=", "long", "(", "number", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "False", "if", "(", "(", "number", ">=", "0", ")", "and", "(", "(", "number", "%", "1", ")", "==", "0", ")", ")", ":", "return", "True", "return", "False" ]
validate a cookie max-age .
train
true
10,064
def _find_shallow(store, heads, depth): parents = {} def get_parents(sha): result = parents.get(sha, None) if (not result): result = store[sha].parents parents[sha] = result return result todo = [] for head_sha in heads: obj = store.peel_sha(head_sha) if isinstance(obj, Commit): todo.append((obj.id, 1)) not_shallow = set() shallow = set() while todo: (sha, cur_depth) = todo.pop() if (cur_depth < depth): not_shallow.add(sha) new_depth = (cur_depth + 1) todo.extend(((p, new_depth) for p in get_parents(sha))) else: shallow.add(sha) return (shallow, not_shallow)
[ "def", "_find_shallow", "(", "store", ",", "heads", ",", "depth", ")", ":", "parents", "=", "{", "}", "def", "get_parents", "(", "sha", ")", ":", "result", "=", "parents", ".", "get", "(", "sha", ",", "None", ")", "if", "(", "not", "result", ")", ":", "result", "=", "store", "[", "sha", "]", ".", "parents", "parents", "[", "sha", "]", "=", "result", "return", "result", "todo", "=", "[", "]", "for", "head_sha", "in", "heads", ":", "obj", "=", "store", ".", "peel_sha", "(", "head_sha", ")", "if", "isinstance", "(", "obj", ",", "Commit", ")", ":", "todo", ".", "append", "(", "(", "obj", ".", "id", ",", "1", ")", ")", "not_shallow", "=", "set", "(", ")", "shallow", "=", "set", "(", ")", "while", "todo", ":", "(", "sha", ",", "cur_depth", ")", "=", "todo", ".", "pop", "(", ")", "if", "(", "cur_depth", "<", "depth", ")", ":", "not_shallow", ".", "add", "(", "sha", ")", "new_depth", "=", "(", "cur_depth", "+", "1", ")", "todo", ".", "extend", "(", "(", "(", "p", ",", "new_depth", ")", "for", "p", "in", "get_parents", "(", "sha", ")", ")", ")", "else", ":", "shallow", ".", "add", "(", "sha", ")", "return", "(", "shallow", ",", "not_shallow", ")" ]
find shallow commits according to a given depth .
train
false
10,065
def configure_cache(client, test_name): client.http_client.cache_test_name = test_name cache_name = client.http_client.get_cache_file_name() if (options.get_value('clearcache') == 'true'): client.http_client.delete_session(cache_name) client.http_client.use_cached_session(cache_name)
[ "def", "configure_cache", "(", "client", ",", "test_name", ")", ":", "client", ".", "http_client", ".", "cache_test_name", "=", "test_name", "cache_name", "=", "client", ".", "http_client", ".", "get_cache_file_name", "(", ")", "if", "(", "options", ".", "get_value", "(", "'clearcache'", ")", "==", "'true'", ")", ":", "client", ".", "http_client", ".", "delete_session", "(", "cache_name", ")", "client", ".", "http_client", ".", "use_cached_session", "(", "cache_name", ")" ]
loads or begins a cached session to record http traffic .
train
false
10,066
def LSTD_PI_policy(fMap, Ts, R, discountFactor, initpolicy=None, maxIters=20): def veval(T): return LSTD_values(T, R, fMap, discountFactor) return policyIteration(Ts, R, discountFactor, VEvaluator=veval, initpolicy=initpolicy, maxIters=maxIters)
[ "def", "LSTD_PI_policy", "(", "fMap", ",", "Ts", ",", "R", ",", "discountFactor", ",", "initpolicy", "=", "None", ",", "maxIters", "=", "20", ")", ":", "def", "veval", "(", "T", ")", ":", "return", "LSTD_values", "(", "T", ",", "R", ",", "fMap", ",", "discountFactor", ")", "return", "policyIteration", "(", "Ts", ",", "R", ",", "discountFactor", ",", "VEvaluator", "=", "veval", ",", "initpolicy", "=", "initpolicy", ",", "maxIters", "=", "maxIters", ")" ]
alternative version of lspi using value functions instead of state-action values as intermediate .
train
false
10,067
@receiver(post_save, sender=Check) def update_failed_check_flag(sender, instance, **kwargs): if (instance.language is None): return related = get_related_units(instance) if (instance.for_unit is not None): related = related.exclude(pk=instance.for_unit) for unit in related: unit.update_has_failing_check(False)
[ "@", "receiver", "(", "post_save", ",", "sender", "=", "Check", ")", "def", "update_failed_check_flag", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "if", "(", "instance", ".", "language", "is", "None", ")", ":", "return", "related", "=", "get_related_units", "(", "instance", ")", "if", "(", "instance", ".", "for_unit", "is", "not", "None", ")", ":", "related", "=", "related", ".", "exclude", "(", "pk", "=", "instance", ".", "for_unit", ")", "for", "unit", "in", "related", ":", "unit", ".", "update_has_failing_check", "(", "False", ")" ]
update related unit failed check flag .
train
false
10,068
def _tolerateErrors(wrapped): def infoCallback(connection, where, ret): try: return wrapped(connection, where, ret) except: f = Failure() log.err(f, 'Error during info_callback') connection.get_app_data().failVerification(f) return infoCallback
[ "def", "_tolerateErrors", "(", "wrapped", ")", ":", "def", "infoCallback", "(", "connection", ",", "where", ",", "ret", ")", ":", "try", ":", "return", "wrapped", "(", "connection", ",", "where", ",", "ret", ")", "except", ":", "f", "=", "Failure", "(", ")", "log", ".", "err", "(", "f", ",", "'Error during info_callback'", ")", "connection", ".", "get_app_data", "(", ")", ".", "failVerification", "(", "f", ")", "return", "infoCallback" ]
wrap up an c{info_callback} for pyopenssl so that if something goes wrong the error is immediately logged and the connection is dropped if possible .
train
false
10,070
def test_continuous_error(): y = np.linspace(0, 1, 20) cnn = CondensedNearestNeighbour(random_state=RND_SEED) assert_warns(UserWarning, cnn.fit, X, y)
[ "def", "test_continuous_error", "(", ")", ":", "y", "=", "np", ".", "linspace", "(", "0", ",", "1", ",", "20", ")", "cnn", "=", "CondensedNearestNeighbour", "(", "random_state", "=", "RND_SEED", ")", "assert_warns", "(", "UserWarning", ",", "cnn", ".", "fit", ",", "X", ",", "y", ")" ]
test either if an error is raised when the target are continuous type .
train
false
10,071
def immnodeset_union(iterable, *args): set = mutnodeset_union(iterable) return immnodeset(set, *args)
[ "def", "immnodeset_union", "(", "iterable", ",", "*", "args", ")", ":", "set", "=", "mutnodeset_union", "(", "iterable", ")", "return", "immnodeset", "(", "set", ",", "*", "args", ")" ]
return an immmutable nodeset which is the union of all nodesets in iterable .
train
false
10,072
def pareto_sequence(n, exponent=1.0): return [random.paretovariate(exponent) for i in range(n)]
[ "def", "pareto_sequence", "(", "n", ",", "exponent", "=", "1.0", ")", ":", "return", "[", "random", ".", "paretovariate", "(", "exponent", ")", "for", "i", "in", "range", "(", "n", ")", "]" ]
return sample sequence of length n from a pareto distribution .
train
false
10,073
def TurnIntIntoStrInDict(the_dict): for (k, v) in the_dict.items(): if (type(v) is int): v = str(v) the_dict[k] = v elif (type(v) is dict): TurnIntIntoStrInDict(v) elif (type(v) is list): TurnIntIntoStrInList(v) if (type(k) is int): del the_dict[k] the_dict[str(k)] = v
[ "def", "TurnIntIntoStrInDict", "(", "the_dict", ")", ":", "for", "(", "k", ",", "v", ")", "in", "the_dict", ".", "items", "(", ")", ":", "if", "(", "type", "(", "v", ")", "is", "int", ")", ":", "v", "=", "str", "(", "v", ")", "the_dict", "[", "k", "]", "=", "v", "elif", "(", "type", "(", "v", ")", "is", "dict", ")", ":", "TurnIntIntoStrInDict", "(", "v", ")", "elif", "(", "type", "(", "v", ")", "is", "list", ")", ":", "TurnIntIntoStrInList", "(", "v", ")", "if", "(", "type", "(", "k", ")", "is", "int", ")", ":", "del", "the_dict", "[", "k", "]", "the_dict", "[", "str", "(", "k", ")", "]", "=", "v" ]
given dict the_dict .
train
false
10,074
def fasta_ids(fasta_files, verbose=False): all_ids = set([]) for fasta_in in fasta_files: for (label, seq) in parse_fasta(fasta_in): rid = label.split()[0] if (rid in all_ids): raise ValueError(('Duplicate ID found in FASTA/qual file: %s' % label)) all_ids.add(rid) return all_ids
[ "def", "fasta_ids", "(", "fasta_files", ",", "verbose", "=", "False", ")", ":", "all_ids", "=", "set", "(", "[", "]", ")", "for", "fasta_in", "in", "fasta_files", ":", "for", "(", "label", ",", "seq", ")", "in", "parse_fasta", "(", "fasta_in", ")", ":", "rid", "=", "label", ".", "split", "(", ")", "[", "0", "]", "if", "(", "rid", "in", "all_ids", ")", ":", "raise", "ValueError", "(", "(", "'Duplicate ID found in FASTA/qual file: %s'", "%", "label", ")", ")", "all_ids", ".", "add", "(", "rid", ")", "return", "all_ids" ]
returns list of ids in fasta files .
train
false
10,076
@dec.skip('Testing the skip decorator') def test_deliberately_broken2(): (1 / 0)
[ "@", "dec", ".", "skip", "(", "'Testing the skip decorator'", ")", "def", "test_deliberately_broken2", "(", ")", ":", "(", "1", "/", "0", ")" ]
another deliberately broken test - we want to skip this one .
train
false
10,078
def global_parameters(b, c): return ((y, ((b[0] - x) - y), x) for (x, y) in zip((b + [0]), ([0] + c)))
[ "def", "global_parameters", "(", "b", ",", "c", ")", ":", "return", "(", "(", "y", ",", "(", "(", "b", "[", "0", "]", "-", "x", ")", "-", "y", ")", ",", "x", ")", "for", "(", "x", ",", "y", ")", "in", "zip", "(", "(", "b", "+", "[", "0", "]", ")", ",", "(", "[", "0", "]", "+", "c", ")", ")", ")" ]
return global parameters for a given intersection array .
train
false
10,079
def bytes_feature(values): return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
[ "def", "bytes_feature", "(", "values", ")", ":", "return", "tf", ".", "train", ".", "Feature", "(", "bytes_list", "=", "tf", ".", "train", ".", "BytesList", "(", "value", "=", "[", "values", "]", ")", ")" ]
returns a tf-feature of bytes .
train
false
10,080
def match_all(string, trie): matches = [] for i in range(len(string)): substr = string[:(i + 1)] if (not trie.has_prefix(substr)): break if (substr in trie): matches.append(substr) return matches
[ "def", "match_all", "(", "string", ",", "trie", ")", ":", "matches", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "string", ")", ")", ":", "substr", "=", "string", "[", ":", "(", "i", "+", "1", ")", "]", "if", "(", "not", "trie", ".", "has_prefix", "(", "substr", ")", ")", ":", "break", "if", "(", "substr", "in", "trie", ")", ":", "matches", ".", "append", "(", "substr", ")", "return", "matches" ]
match_all -> list of keys find all the keys in the trie that matches the beginning of the string .
train
false
10,081
def email_reply(): from treeio.core.mail import IMAP_SERVER, EMAIL_USERNAME, EMAIL_PASSWORD emailreplier = EmailReplier('IMAP-SSL', IMAP_SERVER, EMAIL_USERNAME, EMAIL_PASSWORD, getattr(settings, 'HARDTREE_MESSAGING_IMAP_DEFAULT_FOLDER_NAME', 'UNSEEN')) emailreplier.get_emails()
[ "def", "email_reply", "(", ")", ":", "from", "treeio", ".", "core", ".", "mail", "import", "IMAP_SERVER", ",", "EMAIL_USERNAME", ",", "EMAIL_PASSWORD", "emailreplier", "=", "EmailReplier", "(", "'IMAP-SSL'", ",", "IMAP_SERVER", ",", "EMAIL_USERNAME", ",", "EMAIL_PASSWORD", ",", "getattr", "(", "settings", ",", "'HARDTREE_MESSAGING_IMAP_DEFAULT_FOLDER_NAME'", ",", "'UNSEEN'", ")", ")", "emailreplier", ".", "get_emails", "(", ")" ]
fetches emails .
train
false
10,082
def create_collection_summary(collection_id, contributor_id_to_add): collection = get_collection_by_id(collection_id) collection_summary = compute_summary_of_collection(collection, contributor_id_to_add) save_collection_summary(collection_summary)
[ "def", "create_collection_summary", "(", "collection_id", ",", "contributor_id_to_add", ")", ":", "collection", "=", "get_collection_by_id", "(", "collection_id", ")", "collection_summary", "=", "compute_summary_of_collection", "(", "collection", ",", "contributor_id_to_add", ")", "save_collection_summary", "(", "collection_summary", ")" ]
creates and stores a summary of the given collection .
train
false
10,083
@require_GET def ajax_status(request): if (not request.user.is_authenticated()): raise PermissionDenied qs = UserPreference.objects.filter(user=request.user, key=NOTIFICATION_PREF_KEY) return HttpResponse(json.dumps({'status': len(qs)}), content_type='application/json')
[ "@", "require_GET", "def", "ajax_status", "(", "request", ")", ":", "if", "(", "not", "request", ".", "user", ".", "is_authenticated", "(", ")", ")", ":", "raise", "PermissionDenied", "qs", "=", "UserPreference", ".", "objects", ".", "filter", "(", "user", "=", "request", ".", "user", ",", "key", "=", "NOTIFICATION_PREF_KEY", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'status'", ":", "len", "(", "qs", ")", "}", ")", ",", "content_type", "=", "'application/json'", ")" ]
a view that retrieves notifications status for the authenticated user .
train
false
10,084
def fourier_ellipsoid(input, size, n=(-1), axis=(-1), output=None): input = numpy.asarray(input) (output, return_value) = _get_output_fourier(output, input) axis = _ni_support._check_axis(axis, input.ndim) sizes = _ni_support._normalize_sequence(size, input.ndim) sizes = numpy.asarray(sizes, dtype=numpy.float64) if (not sizes.flags.contiguous): sizes = sizes.copy() _nd_image.fourier_filter(input, sizes, n, axis, output, 2) return return_value
[ "def", "fourier_ellipsoid", "(", "input", ",", "size", ",", "n", "=", "(", "-", "1", ")", ",", "axis", "=", "(", "-", "1", ")", ",", "output", "=", "None", ")", ":", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "(", "output", ",", "return_value", ")", "=", "_get_output_fourier", "(", "output", ",", "input", ")", "axis", "=", "_ni_support", ".", "_check_axis", "(", "axis", ",", "input", ".", "ndim", ")", "sizes", "=", "_ni_support", ".", "_normalize_sequence", "(", "size", ",", "input", ".", "ndim", ")", "sizes", "=", "numpy", ".", "asarray", "(", "sizes", ",", "dtype", "=", "numpy", ".", "float64", ")", "if", "(", "not", "sizes", ".", "flags", ".", "contiguous", ")", ":", "sizes", "=", "sizes", ".", "copy", "(", ")", "_nd_image", ".", "fourier_filter", "(", "input", ",", "sizes", ",", "n", ",", "axis", ",", "output", ",", "2", ")", "return", "return_value" ]
multi-dimensional ellipsoid fourier filter .
train
false
10,085
def delete_cache_subnet_group(name, region=None, key=None, keyid=None, profile=None, **args): return _delete_resource(name, name_param='CacheSubnetGroupName', desc='cache subnet group', res_type='cache_subnet_group', region=region, key=key, keyid=keyid, profile=profile, **args)
[ "def", "delete_cache_subnet_group", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "**", "args", ")", ":", "return", "_delete_resource", "(", "name", ",", "name_param", "=", "'CacheSubnetGroupName'", ",", "desc", "=", "'cache subnet group'", ",", "res_type", "=", "'cache_subnet_group'", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ",", "**", "args", ")" ]
delete an elasticache subnet group .
train
true
10,086
def find_mapreduce_yaml(status_file=__file__): checked = set() yaml = _find_mapreduce_yaml(os.path.dirname(status_file), checked) if (not yaml): yaml = _find_mapreduce_yaml(os.getcwd(), checked) return yaml
[ "def", "find_mapreduce_yaml", "(", "status_file", "=", "__file__", ")", ":", "checked", "=", "set", "(", ")", "yaml", "=", "_find_mapreduce_yaml", "(", "os", ".", "path", ".", "dirname", "(", "status_file", ")", ",", "checked", ")", "if", "(", "not", "yaml", ")", ":", "yaml", "=", "_find_mapreduce_yaml", "(", "os", ".", "getcwd", "(", ")", ",", "checked", ")", "return", "yaml" ]
traverse directory trees to find mapreduce .
train
true
10,087
def test_transform_output(argument_pair): g = Expression.fromstring(argument_pair[0]) alist = [lp.parse(a) for a in argument_pair[1]] m = MaceCommand(g, assumptions=alist) m.build_model() for a in alist: print((' %s' % a)) print(('|- %s: %s\n' % (g, m.build_model()))) for format in ['standard', 'portable', 'xml', 'cooked']: spacer() print(("Using '%s' format" % format)) spacer() print(m.model(format=format))
[ "def", "test_transform_output", "(", "argument_pair", ")", ":", "g", "=", "Expression", ".", "fromstring", "(", "argument_pair", "[", "0", "]", ")", "alist", "=", "[", "lp", ".", "parse", "(", "a", ")", "for", "a", "in", "argument_pair", "[", "1", "]", "]", "m", "=", "MaceCommand", "(", "g", ",", "assumptions", "=", "alist", ")", "m", ".", "build_model", "(", ")", "for", "a", "in", "alist", ":", "print", "(", "(", "' %s'", "%", "a", ")", ")", "print", "(", "(", "'|- %s: %s\\n'", "%", "(", "g", ",", "m", ".", "build_model", "(", ")", ")", ")", ")", "for", "format", "in", "[", "'standard'", ",", "'portable'", ",", "'xml'", ",", "'cooked'", "]", ":", "spacer", "(", ")", "print", "(", "(", "\"Using '%s' format\"", "%", "format", ")", ")", "spacer", "(", ")", "print", "(", "m", ".", "model", "(", "format", "=", "format", ")", ")" ]
transform the model into various mace4 interpformat formats .
train
false
10,089
def default_if_none(value, arg): if (value is None): return arg return value
[ "def", "default_if_none", "(", "value", ",", "arg", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "arg", "return", "value" ]
if value is none .
train
false
10,090
def find_or_none(key, search_maps, _map_index=0): try: attr = getattr(search_maps[_map_index], key) return (attr if (attr is not None) else find_or_none(key, search_maps[1:])) except AttributeError: return find_or_none(key, search_maps, (_map_index + 1)) except IndexError: return None
[ "def", "find_or_none", "(", "key", ",", "search_maps", ",", "_map_index", "=", "0", ")", ":", "try", ":", "attr", "=", "getattr", "(", "search_maps", "[", "_map_index", "]", ",", "key", ")", "return", "(", "attr", "if", "(", "attr", "is", "not", "None", ")", "else", "find_or_none", "(", "key", ",", "search_maps", "[", "1", ":", "]", ")", ")", "except", "AttributeError", ":", "return", "find_or_none", "(", "key", ",", "search_maps", ",", "(", "_map_index", "+", "1", ")", ")", "except", "IndexError", ":", "return", "None" ]
return the value of the first key found in the list of search_maps .
train
false
10,091
@task def render_document_chunk(pks, cache_control='no-cache', base_url=None, force=False): logger = render_document_chunk.get_logger() logger.info((u'Starting to render document chunk: %s' % ','.join([str(pk) for pk in pks]))) base_url = (base_url or settings.SITE_URL) for pk in pks: result = render_document(pk, cache_control, base_url, force=force) if result: logger.error((u'Error while rendering document %s with error: %s' % (pk, result))) logger.info(u'Finished rendering of document chunk')
[ "@", "task", "def", "render_document_chunk", "(", "pks", ",", "cache_control", "=", "'no-cache'", ",", "base_url", "=", "None", ",", "force", "=", "False", ")", ":", "logger", "=", "render_document_chunk", ".", "get_logger", "(", ")", "logger", ".", "info", "(", "(", "u'Starting to render document chunk: %s'", "%", "','", ".", "join", "(", "[", "str", "(", "pk", ")", "for", "pk", "in", "pks", "]", ")", ")", ")", "base_url", "=", "(", "base_url", "or", "settings", ".", "SITE_URL", ")", "for", "pk", "in", "pks", ":", "result", "=", "render_document", "(", "pk", ",", "cache_control", ",", "base_url", ",", "force", "=", "force", ")", "if", "result", ":", "logger", ".", "error", "(", "(", "u'Error while rendering document %s with error: %s'", "%", "(", "pk", ",", "result", ")", ")", ")", "logger", ".", "info", "(", "u'Finished rendering of document chunk'", ")" ]
simple task to render a chunk of documents instead of one per each .
train
false
10,092
def getVersionString(version): result = ('%s %s' % (version.package, version.short())) return result
[ "def", "getVersionString", "(", "version", ")", ":", "result", "=", "(", "'%s %s'", "%", "(", "version", ".", "package", ",", "version", ".", "short", "(", ")", ")", ")", "return", "result" ]
get a friendly string for the given version object .
train
false
10,093
def get_instance_uuid_by_ec2_id(context, ec2_id): return IMPL.get_instance_uuid_by_ec2_id(context, ec2_id)
[ "def", "get_instance_uuid_by_ec2_id", "(", "context", ",", "ec2_id", ")", ":", "return", "IMPL", ".", "get_instance_uuid_by_ec2_id", "(", "context", ",", "ec2_id", ")" ]
get uuid through ec2 id from instance_id_mappings table .
train
false
10,094
def setOptimize(): conf.keepAlive = True conf.threads = (3 if (conf.threads < 3) else conf.threads) conf.nullConnection = (not any((conf.data, conf.textOnly, conf.titles, conf.string, conf.notString, conf.regexp, conf.tor))) if (not conf.nullConnection): debugMsg = "turning off switch '--null-connection' used indirectly by switch '-o'" logger.debug(debugMsg)
[ "def", "setOptimize", "(", ")", ":", "conf", ".", "keepAlive", "=", "True", "conf", ".", "threads", "=", "(", "3", "if", "(", "conf", ".", "threads", "<", "3", ")", "else", "conf", ".", "threads", ")", "conf", ".", "nullConnection", "=", "(", "not", "any", "(", "(", "conf", ".", "data", ",", "conf", ".", "textOnly", ",", "conf", ".", "titles", ",", "conf", ".", "string", ",", "conf", ".", "notString", ",", "conf", ".", "regexp", ",", "conf", ".", "tor", ")", ")", ")", "if", "(", "not", "conf", ".", "nullConnection", ")", ":", "debugMsg", "=", "\"turning off switch '--null-connection' used indirectly by switch '-o'\"", "logger", ".", "debug", "(", "debugMsg", ")" ]
sets options turned on by switch -o .
train
false
10,095
@register(u'next-history') def next_history(event): event.current_buffer.history_forward(count=event.arg)
[ "@", "register", "(", "u'next-history'", ")", "def", "next_history", "(", "event", ")", ":", "event", ".", "current_buffer", ".", "history_forward", "(", "count", "=", "event", ".", "arg", ")" ]
move forward through the history list .
train
false
10,096
def _read_byte(f): return np.uint8(struct.unpack('>B', f.read(4)[:1])[0])
[ "def", "_read_byte", "(", "f", ")", ":", "return", "np", ".", "uint8", "(", "struct", ".", "unpack", "(", "'>B'", ",", "f", ".", "read", "(", "4", ")", "[", ":", "1", "]", ")", "[", "0", "]", ")" ]
read a single byte .
train
false
10,097
def test_ajd(): (n_times, n_channels) = (10, 3) seed = np.random.RandomState(0) diags = (2.0 + (0.1 * seed.randn(n_times, n_channels))) A = ((2 * seed.rand(n_channels, n_channels)) - 1) A /= np.atleast_2d(np.sqrt(np.sum((A ** 2), 1))).T covmats = np.empty((n_times, n_channels, n_channels)) for i in range(n_times): covmats[i] = np.dot(np.dot(A, np.diag(diags[i])), A.T) (V, D) = _ajd_pham(covmats) V_matlab = [[(-3.507280775058041), (-5.498189967306344), 7.720624541198574], [0.69468901323461, 0.775690358505945, (-1.162043086446043)], [(-0.592603135588066), (-0.59899692569626), 1.009550086271192]] assert_array_almost_equal(V, V_matlab)
[ "def", "test_ajd", "(", ")", ":", "(", "n_times", ",", "n_channels", ")", "=", "(", "10", ",", "3", ")", "seed", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "diags", "=", "(", "2.0", "+", "(", "0.1", "*", "seed", ".", "randn", "(", "n_times", ",", "n_channels", ")", ")", ")", "A", "=", "(", "(", "2", "*", "seed", ".", "rand", "(", "n_channels", ",", "n_channels", ")", ")", "-", "1", ")", "A", "/=", "np", ".", "atleast_2d", "(", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "A", "**", "2", ")", ",", "1", ")", ")", ")", ".", "T", "covmats", "=", "np", ".", "empty", "(", "(", "n_times", ",", "n_channels", ",", "n_channels", ")", ")", "for", "i", "in", "range", "(", "n_times", ")", ":", "covmats", "[", "i", "]", "=", "np", ".", "dot", "(", "np", ".", "dot", "(", "A", ",", "np", ".", "diag", "(", "diags", "[", "i", "]", ")", ")", ",", "A", ".", "T", ")", "(", "V", ",", "D", ")", "=", "_ajd_pham", "(", "covmats", ")", "V_matlab", "=", "[", "[", "(", "-", "3.507280775058041", ")", ",", "(", "-", "5.498189967306344", ")", ",", "7.720624541198574", "]", ",", "[", "0.69468901323461", ",", "0.775690358505945", ",", "(", "-", "1.162043086446043", ")", "]", ",", "[", "(", "-", "0.592603135588066", ")", ",", "(", "-", "0.59899692569626", ")", ",", "1.009550086271192", "]", "]", "assert_array_almost_equal", "(", "V", ",", "V_matlab", ")" ]
test if approximate joint diagonalization implementation obtains same results as the matlab implementation by pham dinh-tuan .
train
false
10,098
def init_state_collection(state, dict_, key): attr = state.manager[key].impl user_data = attr.initialize(state, dict_) return attr.get_collection(state, dict_, user_data)
[ "def", "init_state_collection", "(", "state", ",", "dict_", ",", "key", ")", ":", "attr", "=", "state", ".", "manager", "[", "key", "]", ".", "impl", "user_data", "=", "attr", ".", "initialize", "(", "state", ",", "dict_", ")", "return", "attr", ".", "get_collection", "(", "state", ",", "dict_", ",", "user_data", ")" ]
initialize a collection attribute and return the collection adapter .
train
false
10,099
def _item_to_changes(iterator, resource): return Changes.from_api_repr(resource, iterator.zone)
[ "def", "_item_to_changes", "(", "iterator", ",", "resource", ")", ":", "return", "Changes", ".", "from_api_repr", "(", "resource", ",", "iterator", ".", "zone", ")" ]
convert a json "changes" value to the native object .
train
false
10,100
def set_sync(value): return set_var('SYNC', value)
[ "def", "set_sync", "(", "value", ")", ":", "return", "set_var", "(", "'SYNC'", ",", "value", ")" ]
set the sync variable return a dict containing the new value for variable:: {<variable>: {old: <old-value> .
train
false
10,101
def get_task_logger(name): if (name in RESERVED_LOGGER_NAMES): raise RuntimeError(u'Logger name {0!r} is reserved!'.format(name)) return _using_logger_parent(task_logger, get_logger(name))
[ "def", "get_task_logger", "(", "name", ")", ":", "if", "(", "name", "in", "RESERVED_LOGGER_NAMES", ")", ":", "raise", "RuntimeError", "(", "u'Logger name {0!r} is reserved!'", ".", "format", "(", "name", ")", ")", "return", "_using_logger_parent", "(", "task_logger", ",", "get_logger", "(", "name", ")", ")" ]
get logger for task module by name .
train
false
10,102
def is_ec2_timestamp_expired(request, expires=None): timestamp = request.get('Timestamp') expiry_time = request.get('Expires') def parse_strtime(strtime): if _ms_time_regex.match(strtime): time_format = '%Y-%m-%dT%H:%M:%S.%fZ' else: time_format = '%Y-%m-%dT%H:%M:%SZ' return timeutils.parse_strtime(strtime, time_format) try: if (timestamp and expiry_time): msg = _('Request must include either Timestamp or Expires, but cannot contain both') LOG.error(msg) raise exception.InvalidRequest(msg) elif expiry_time: query_time = parse_strtime(expiry_time) return timeutils.is_older_than(query_time, (-1)) elif timestamp: query_time = parse_strtime(timestamp) if (query_time and expires): return (timeutils.is_older_than(query_time, expires) or timeutils.is_newer_than(query_time, expires)) return False except ValueError: LOG.info(_LI('Timestamp is invalid.')) return True
[ "def", "is_ec2_timestamp_expired", "(", "request", ",", "expires", "=", "None", ")", ":", "timestamp", "=", "request", ".", "get", "(", "'Timestamp'", ")", "expiry_time", "=", "request", ".", "get", "(", "'Expires'", ")", "def", "parse_strtime", "(", "strtime", ")", ":", "if", "_ms_time_regex", ".", "match", "(", "strtime", ")", ":", "time_format", "=", "'%Y-%m-%dT%H:%M:%S.%fZ'", "else", ":", "time_format", "=", "'%Y-%m-%dT%H:%M:%SZ'", "return", "timeutils", ".", "parse_strtime", "(", "strtime", ",", "time_format", ")", "try", ":", "if", "(", "timestamp", "and", "expiry_time", ")", ":", "msg", "=", "_", "(", "'Request must include either Timestamp or Expires, but cannot contain both'", ")", "LOG", ".", "error", "(", "msg", ")", "raise", "exception", ".", "InvalidRequest", "(", "msg", ")", "elif", "expiry_time", ":", "query_time", "=", "parse_strtime", "(", "expiry_time", ")", "return", "timeutils", ".", "is_older_than", "(", "query_time", ",", "(", "-", "1", ")", ")", "elif", "timestamp", ":", "query_time", "=", "parse_strtime", "(", "timestamp", ")", "if", "(", "query_time", "and", "expires", ")", ":", "return", "(", "timeutils", ".", "is_older_than", "(", "query_time", ",", "expires", ")", "or", "timeutils", ".", "is_newer_than", "(", "query_time", ",", "expires", ")", ")", "return", "False", "except", "ValueError", ":", "LOG", ".", "info", "(", "_LI", "(", "'Timestamp is invalid.'", ")", ")", "return", "True" ]
checks the timestamp or expiry time included in an ec2 request and returns true if the request is expired .
train
false
10,103
def unbind_floating_ip(floating_ip, device): _execute('ip', 'addr', 'del', (str(floating_ip) + '/32'), 'dev', device, run_as_root=True, check_exit_code=[0, 2, 254])
[ "def", "unbind_floating_ip", "(", "floating_ip", ",", "device", ")", ":", "_execute", "(", "'ip'", ",", "'addr'", ",", "'del'", ",", "(", "str", "(", "floating_ip", ")", "+", "'/32'", ")", ",", "'dev'", ",", "device", ",", "run_as_root", "=", "True", ",", "check_exit_code", "=", "[", "0", ",", "2", ",", "254", "]", ")" ]
unbind a public ip from public interface .
train
false
10,106
def get_scale(x): scales = [20, 50, 100, 200, 400, 600, 800, 1000] for scale in scales: if (x <= scale): return scale return x
[ "def", "get_scale", "(", "x", ")", ":", "scales", "=", "[", "20", ",", "50", ",", "100", ",", "200", ",", "400", ",", "600", ",", "800", ",", "1000", "]", "for", "scale", "in", "scales", ":", "if", "(", "x", "<=", "scale", ")", ":", "return", "scale", "return", "x" ]
finds the lowest scale where x <= scale .
train
false
10,108
def set_items(widget, items): widget.clear() add_items(widget, items)
[ "def", "set_items", "(", "widget", ",", "items", ")", ":", "widget", ".", "clear", "(", ")", "add_items", "(", "widget", ",", "items", ")" ]
clear the existing widget contents and set the new items .
train
false
10,109
@dec.skipif_not_numpy def test_numpy_reset_array_undec(): _ip.ex('import numpy as np') _ip.ex('a = np.empty(2)') nt.assert_in('a', _ip.user_ns) _ip.magic('reset -f array') nt.assert_not_in('a', _ip.user_ns)
[ "@", "dec", ".", "skipif_not_numpy", "def", "test_numpy_reset_array_undec", "(", ")", ":", "_ip", ".", "ex", "(", "'import numpy as np'", ")", "_ip", ".", "ex", "(", "'a = np.empty(2)'", ")", "nt", ".", "assert_in", "(", "'a'", ",", "_ip", ".", "user_ns", ")", "_ip", ".", "magic", "(", "'reset -f array'", ")", "nt", ".", "assert_not_in", "(", "'a'", ",", "_ip", ".", "user_ns", ")" ]
test %reset array functionality .
train
false
10,110
def CDLLONGLEGGEDDOJI(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLLONGLEGGEDDOJI)
[ "def", "CDLLONGLEGGEDDOJI", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLLONGLEGGEDDOJI", ")" ]
long legged doji .
train
false
10,111
def test_step_represent_matrix(): step = core.Step.from_string(STEP_WITH_MATRIX2) assert_equals(step.represent_columns(), ' | a | a |\n | 2 | a |\n | | 67|\n')
[ "def", "test_step_represent_matrix", "(", ")", ":", "step", "=", "core", ".", "Step", ".", "from_string", "(", "STEP_WITH_MATRIX2", ")", "assert_equals", "(", "step", ".", "represent_columns", "(", ")", ",", "' | a | a |\\n | 2 | a |\\n | | 67|\\n'", ")" ]
step with a more suggestive representation for a matrix .
train
false
10,113
def F_from_ransac(x1, x2, model, maxiter=5000, match_theshold=1e-06): import ransac data = vstack((x1, x2)) (F, ransac_data) = ransac.ransac(data.T, model, 8, maxiter, match_theshold, 20, return_all=True) return (F, ransac_data['inliers'])
[ "def", "F_from_ransac", "(", "x1", ",", "x2", ",", "model", ",", "maxiter", "=", "5000", ",", "match_theshold", "=", "1e-06", ")", ":", "import", "ransac", "data", "=", "vstack", "(", "(", "x1", ",", "x2", ")", ")", "(", "F", ",", "ransac_data", ")", "=", "ransac", ".", "ransac", "(", "data", ".", "T", ",", "model", ",", "8", ",", "maxiter", ",", "match_theshold", ",", "20", ",", "return_all", "=", "True", ")", "return", "(", "F", ",", "ransac_data", "[", "'inliers'", "]", ")" ]
robust estimation of a fundamental matrix f from point correspondences using ransac .
train
false
10,114
def toplevel(func): synctaskletfunc = synctasklet(func) @utils.wrapping(func) def add_context_wrapper(*args, **kwds): __ndb_debug__ = utils.func_info(func) _state.clear_all_pending() ctx = make_default_context() try: set_context(ctx) return synctaskletfunc(*args, **kwds) finally: set_context(None) ctx.flush().check_success() eventloop.run() return add_context_wrapper
[ "def", "toplevel", "(", "func", ")", ":", "synctaskletfunc", "=", "synctasklet", "(", "func", ")", "@", "utils", ".", "wrapping", "(", "func", ")", "def", "add_context_wrapper", "(", "*", "args", ",", "**", "kwds", ")", ":", "__ndb_debug__", "=", "utils", ".", "func_info", "(", "func", ")", "_state", ".", "clear_all_pending", "(", ")", "ctx", "=", "make_default_context", "(", ")", "try", ":", "set_context", "(", "ctx", ")", "return", "synctaskletfunc", "(", "*", "args", ",", "**", "kwds", ")", "finally", ":", "set_context", "(", "None", ")", "ctx", ".", "flush", "(", ")", ".", "check_success", "(", ")", "eventloop", ".", "run", "(", ")", "return", "add_context_wrapper" ]
a sync tasklet that sets a fresh default context .
train
true
10,115
def config_auto_int_value(value, default): if ((value is None) or (isinstance(value, six.string_types) and (value.lower() == 'auto'))): return default try: value = int(value) except (TypeError, ValueError): raise ValueError(('Config option must be an integer or the string "auto", not "%s".' % value)) return value
[ "def", "config_auto_int_value", "(", "value", ",", "default", ")", ":", "if", "(", "(", "value", "is", "None", ")", "or", "(", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", "and", "(", "value", ".", "lower", "(", ")", "==", "'auto'", ")", ")", ")", ":", "return", "default", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "(", "'Config option must be an integer or the string \"auto\", not \"%s\".'", "%", "value", ")", ")", "return", "value" ]
returns default if value is none or auto .
train
false
10,116
def getDOMImplementation(name=None, features=()): import os creator = None mod = well_known_implementations.get(name) if mod: mod = __import__(mod, {}, {}, ['getDOMImplementation']) return mod.getDOMImplementation() elif name: return registered[name]() elif ('PYTHON_DOM' in os.environ): return getDOMImplementation(name=os.environ['PYTHON_DOM']) if isinstance(features, str): features = _parse_feature_string(features) for creator in registered.values(): dom = creator() if _good_enough(dom, features): return dom for creator in well_known_implementations.keys(): try: dom = getDOMImplementation(name=creator) except Exception: continue if _good_enough(dom, features): return dom raise ImportError('no suitable DOM implementation found')
[ "def", "getDOMImplementation", "(", "name", "=", "None", ",", "features", "=", "(", ")", ")", ":", "import", "os", "creator", "=", "None", "mod", "=", "well_known_implementations", ".", "get", "(", "name", ")", "if", "mod", ":", "mod", "=", "__import__", "(", "mod", ",", "{", "}", ",", "{", "}", ",", "[", "'getDOMImplementation'", "]", ")", "return", "mod", ".", "getDOMImplementation", "(", ")", "elif", "name", ":", "return", "registered", "[", "name", "]", "(", ")", "elif", "(", "'PYTHON_DOM'", "in", "os", ".", "environ", ")", ":", "return", "getDOMImplementation", "(", "name", "=", "os", ".", "environ", "[", "'PYTHON_DOM'", "]", ")", "if", "isinstance", "(", "features", ",", "str", ")", ":", "features", "=", "_parse_feature_string", "(", "features", ")", "for", "creator", "in", "registered", ".", "values", "(", ")", ":", "dom", "=", "creator", "(", ")", "if", "_good_enough", "(", "dom", ",", "features", ")", ":", "return", "dom", "for", "creator", "in", "well_known_implementations", ".", "keys", "(", ")", ":", "try", ":", "dom", "=", "getDOMImplementation", "(", "name", "=", "creator", ")", "except", "Exception", ":", "continue", "if", "_good_enough", "(", "dom", ",", "features", ")", ":", "return", "dom", "raise", "ImportError", "(", "'no suitable DOM implementation found'", ")" ]
getdomimplementation(name = none .
train
false
10,119
def get_references(file_name, encoding='utf-8'): text = '' if (file_name is not None): if os.path.exists(file_name): try: with codecs.open(file_name, 'r', encoding=encoding) as f: text = f.read() except: print traceback.format_exc() else: print ('Could not find reference file %s!', file_name) return text
[ "def", "get_references", "(", "file_name", ",", "encoding", "=", "'utf-8'", ")", ":", "text", "=", "''", "if", "(", "file_name", "is", "not", "None", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "file_name", ")", ":", "try", ":", "with", "codecs", ".", "open", "(", "file_name", ",", "'r'", ",", "encoding", "=", "encoding", ")", "as", "f", ":", "text", "=", "f", ".", "read", "(", ")", "except", ":", "print", "traceback", ".", "format_exc", "(", ")", "else", ":", "print", "(", "'Could not find reference file %s!'", ",", "file_name", ")", "return", "text" ]
get footnote and general references from outside source .
train
false
10,121
@login_required def view_survey(request, survey_name): redirect_url = request.GET.get('redirect_url') return view_student_survey(request.user, survey_name, redirect_url=redirect_url)
[ "@", "login_required", "def", "view_survey", "(", "request", ",", "survey_name", ")", ":", "redirect_url", "=", "request", ".", "GET", ".", "get", "(", "'redirect_url'", ")", "return", "view_student_survey", "(", "request", ".", "user", ",", "survey_name", ",", "redirect_url", "=", "redirect_url", ")" ]
view to render the survey to the end user .
train
false
10,123
def mapto_v6(addr): try: inet_pton(socket.AF_INET, addr) return '::ffff:{}'.format(addr) except socket.error: try: inet_pton(socket.AF_INET6, addr) return addr except socket.error: log.debug('%s is not a valid IP address.', addr) return None
[ "def", "mapto_v6", "(", "addr", ")", ":", "try", ":", "inet_pton", "(", "socket", ".", "AF_INET", ",", "addr", ")", "return", "'::ffff:{}'", ".", "format", "(", "addr", ")", "except", "socket", ".", "error", ":", "try", ":", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "addr", ")", "return", "addr", "except", "socket", ".", "error", ":", "log", ".", "debug", "(", "'%s is not a valid IP address.'", ",", "addr", ")", "return", "None" ]
map an ipv4 address to an ipv6 one .
train
false
10,124
def wrap_socket(sock, keyfile=None, certfile=None, server_side=False, cert_reqs=CERT_NONE, ssl_version=PROTOCOL_SSLv23, ca_certs=None, do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None): return SSLSocket(sock, keyfile=keyfile, certfile=certfile, server_side=server_side, cert_reqs=cert_reqs, ssl_version=ssl_version, ca_certs=ca_certs, do_handshake_on_connect=do_handshake_on_connect, suppress_ragged_eofs=suppress_ragged_eofs, ciphers=ciphers)
[ "def", "wrap_socket", "(", "sock", ",", "keyfile", "=", "None", ",", "certfile", "=", "None", ",", "server_side", "=", "False", ",", "cert_reqs", "=", "CERT_NONE", ",", "ssl_version", "=", "PROTOCOL_SSLv23", ",", "ca_certs", "=", "None", ",", "do_handshake_on_connect", "=", "True", ",", "suppress_ragged_eofs", "=", "True", ",", "ciphers", "=", "None", ")", ":", "return", "SSLSocket", "(", "sock", ",", "keyfile", "=", "keyfile", ",", "certfile", "=", "certfile", ",", "server_side", "=", "server_side", ",", "cert_reqs", "=", "cert_reqs", ",", "ssl_version", "=", "ssl_version", ",", "ca_certs", "=", "ca_certs", ",", "do_handshake_on_connect", "=", "do_handshake_on_connect", ",", "suppress_ragged_eofs", "=", "suppress_ragged_eofs", ",", "ciphers", "=", "ciphers", ")" ]
create a new :class:sslobject instance .
train
false
10,125
def tight_layout(pad=1.2, h_pad=None, w_pad=None, fig=None): import matplotlib.pyplot as plt fig = (plt.gcf() if (fig is None) else fig) fig.canvas.draw() try: fig.tight_layout(pad=pad, h_pad=h_pad, w_pad=w_pad) except Exception: try: fig.set_tight_layout(dict(pad=pad, h_pad=h_pad, w_pad=w_pad)) except Exception: warn('Matplotlib function "tight_layout" is not supported. Skipping subplot adjustment.')
[ "def", "tight_layout", "(", "pad", "=", "1.2", ",", "h_pad", "=", "None", ",", "w_pad", "=", "None", ",", "fig", "=", "None", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "fig", "=", "(", "plt", ".", "gcf", "(", ")", "if", "(", "fig", "is", "None", ")", "else", "fig", ")", "fig", ".", "canvas", ".", "draw", "(", ")", "try", ":", "fig", ".", "tight_layout", "(", "pad", "=", "pad", ",", "h_pad", "=", "h_pad", ",", "w_pad", "=", "w_pad", ")", "except", "Exception", ":", "try", ":", "fig", ".", "set_tight_layout", "(", "dict", "(", "pad", "=", "pad", ",", "h_pad", "=", "h_pad", ",", "w_pad", "=", "w_pad", ")", ")", "except", "Exception", ":", "warn", "(", "'Matplotlib function \"tight_layout\" is not supported. Skipping subplot adjustment.'", ")" ]
automatically adjust subplot parameters to give specified padding .
train
false
10,127
@raises(ValueError) def test_raises_value_error_non_2dim(): gth_solve(np.array([0.4, 0.6]))
[ "@", "raises", "(", "ValueError", ")", "def", "test_raises_value_error_non_2dim", "(", ")", ":", "gth_solve", "(", "np", ".", "array", "(", "[", "0.4", ",", "0.6", "]", ")", ")" ]
test with non 2dim input .
train
false
10,128
def get_axis_properties(axis): props = {} label1On = axis._major_tick_kw.get('label1On', True) if isinstance(axis, matplotlib.axis.XAxis): if label1On: props['position'] = 'bottom' else: props['position'] = 'top' elif isinstance(axis, matplotlib.axis.YAxis): if label1On: props['position'] = 'left' else: props['position'] = 'right' else: raise ValueError('{0} should be an Axis instance'.format(axis)) locator = axis.get_major_locator() props['nticks'] = len(locator()) if isinstance(locator, ticker.FixedLocator): props['tickvalues'] = list(locator()) else: props['tickvalues'] = None formatter = axis.get_major_formatter() if isinstance(formatter, ticker.NullFormatter): props['tickformat'] = '' elif isinstance(formatter, ticker.FixedFormatter): props['tickformat'] = list(formatter.seq) elif (not any((label.get_visible() for label in axis.get_ticklabels()))): props['tickformat'] = '' else: props['tickformat'] = None props['scale'] = axis.get_scale() labels = axis.get_ticklabels() if labels: props['fontsize'] = labels[0].get_fontsize() else: props['fontsize'] = None props['grid'] = get_grid_style(axis) props['visible'] = axis.get_visible() return props
[ "def", "get_axis_properties", "(", "axis", ")", ":", "props", "=", "{", "}", "label1On", "=", "axis", ".", "_major_tick_kw", ".", "get", "(", "'label1On'", ",", "True", ")", "if", "isinstance", "(", "axis", ",", "matplotlib", ".", "axis", ".", "XAxis", ")", ":", "if", "label1On", ":", "props", "[", "'position'", "]", "=", "'bottom'", "else", ":", "props", "[", "'position'", "]", "=", "'top'", "elif", "isinstance", "(", "axis", ",", "matplotlib", ".", "axis", ".", "YAxis", ")", ":", "if", "label1On", ":", "props", "[", "'position'", "]", "=", "'left'", "else", ":", "props", "[", "'position'", "]", "=", "'right'", "else", ":", "raise", "ValueError", "(", "'{0} should be an Axis instance'", ".", "format", "(", "axis", ")", ")", "locator", "=", "axis", ".", "get_major_locator", "(", ")", "props", "[", "'nticks'", "]", "=", "len", "(", "locator", "(", ")", ")", "if", "isinstance", "(", "locator", ",", "ticker", ".", "FixedLocator", ")", ":", "props", "[", "'tickvalues'", "]", "=", "list", "(", "locator", "(", ")", ")", "else", ":", "props", "[", "'tickvalues'", "]", "=", "None", "formatter", "=", "axis", ".", "get_major_formatter", "(", ")", "if", "isinstance", "(", "formatter", ",", "ticker", ".", "NullFormatter", ")", ":", "props", "[", "'tickformat'", "]", "=", "''", "elif", "isinstance", "(", "formatter", ",", "ticker", ".", "FixedFormatter", ")", ":", "props", "[", "'tickformat'", "]", "=", "list", "(", "formatter", ".", "seq", ")", "elif", "(", "not", "any", "(", "(", "label", ".", "get_visible", "(", ")", "for", "label", "in", "axis", ".", "get_ticklabels", "(", ")", ")", ")", ")", ":", "props", "[", "'tickformat'", "]", "=", "''", "else", ":", "props", "[", "'tickformat'", "]", "=", "None", "props", "[", "'scale'", "]", "=", "axis", ".", "get_scale", "(", ")", "labels", "=", "axis", ".", "get_ticklabels", "(", ")", "if", "labels", ":", "props", "[", "'fontsize'", "]", "=", "labels", "[", "0", "]", ".", "get_fontsize", "(", ")", "else", ":", "props", "[", "'fontsize'", "]", "=", "None", "props", "[", "'grid'", "]", "=", "get_grid_style", "(", "axis", ")", "props", "[", "'visible'", "]", "=", "axis", ".", "get_visible", "(", ")", "return", "props" ]
return the property dictionary for a matplotlib .
train
true
10,132
def _ci(arr, ci): from scipy import stats (mean, sigma) = (arr.mean(0), stats.sem(arr, 0)) return np.asarray([stats.t.interval(ci, arr.shape[0], loc=mean_, scale=sigma_) for (mean_, sigma_) in zip(mean, sigma)]).T
[ "def", "_ci", "(", "arr", ",", "ci", ")", ":", "from", "scipy", "import", "stats", "(", "mean", ",", "sigma", ")", "=", "(", "arr", ".", "mean", "(", "0", ")", ",", "stats", ".", "sem", "(", "arr", ",", "0", ")", ")", "return", "np", ".", "asarray", "(", "[", "stats", ".", "t", ".", "interval", "(", "ci", ",", "arr", ".", "shape", "[", "0", "]", ",", "loc", "=", "mean_", ",", "scale", "=", "sigma_", ")", "for", "(", "mean_", ",", "sigma_", ")", "in", "zip", "(", "mean", ",", "sigma", ")", "]", ")", ".", "T" ]
calculate the ci% parametric confidence interval for arr .
train
false
10,134
def PearsonMedianSkewness(xs): median = Median(xs) mean = RawMoment(xs, 1) var = CentralMoment(xs, 2) std = math.sqrt(var) gp = ((3 * (mean - median)) / std) return gp
[ "def", "PearsonMedianSkewness", "(", "xs", ")", ":", "median", "=", "Median", "(", "xs", ")", "mean", "=", "RawMoment", "(", "xs", ",", "1", ")", "var", "=", "CentralMoment", "(", "xs", ",", "2", ")", "std", "=", "math", ".", "sqrt", "(", "var", ")", "gp", "=", "(", "(", "3", "*", "(", "mean", "-", "median", ")", ")", "/", "std", ")", "return", "gp" ]
computes the pearson median skewness .
train
false
10,135
def _retry_exception_async(reactor, f, steps=((0.1,) * 10)): saved_failure = [None] saved_result = [None] def handle_success(result): saved_result[0] = result return True def handle_failure(failure): Message.log(message_type=u'flocker:provision:libcloud:retry_exception:got_exception') write_failure(failure) saved_failure[0] = failure return False def make_call(): d = maybeDeferred(f) d = DeferredContext(d) d.addCallbacks(handle_success, errback=handle_failure) return d.result action = start_action(action_type=u'flocker:provision:libcloud:retry_exception', function=function_serializer(f)) with action.context(): d = loop_until(reactor, make_call, steps) d = DeferredContext(d) d.addCallbacks((lambda _: saved_result[0]), errback=(lambda _: saved_failure[0])) return d.addActionFinish()
[ "def", "_retry_exception_async", "(", "reactor", ",", "f", ",", "steps", "=", "(", "(", "0.1", ",", ")", "*", "10", ")", ")", ":", "saved_failure", "=", "[", "None", "]", "saved_result", "=", "[", "None", "]", "def", "handle_success", "(", "result", ")", ":", "saved_result", "[", "0", "]", "=", "result", "return", "True", "def", "handle_failure", "(", "failure", ")", ":", "Message", ".", "log", "(", "message_type", "=", "u'flocker:provision:libcloud:retry_exception:got_exception'", ")", "write_failure", "(", "failure", ")", "saved_failure", "[", "0", "]", "=", "failure", "return", "False", "def", "make_call", "(", ")", ":", "d", "=", "maybeDeferred", "(", "f", ")", "d", "=", "DeferredContext", "(", "d", ")", "d", ".", "addCallbacks", "(", "handle_success", ",", "errback", "=", "handle_failure", ")", "return", "d", ".", "result", "action", "=", "start_action", "(", "action_type", "=", "u'flocker:provision:libcloud:retry_exception'", ",", "function", "=", "function_serializer", "(", "f", ")", ")", "with", "action", ".", "context", "(", ")", ":", "d", "=", "loop_until", "(", "reactor", ",", "make_call", ",", "steps", ")", "d", "=", "DeferredContext", "(", "d", ")", "d", ".", "addCallbacks", "(", "(", "lambda", "_", ":", "saved_result", "[", "0", "]", ")", ",", "errback", "=", "(", "lambda", "_", ":", "saved_failure", "[", "0", "]", ")", ")", "return", "d", ".", "addActionFinish", "(", ")" ]
retry a function if it raises an exception .
train
false
10,136
def fractional_matrix_power(A, t): A = _asarray_square(A) import scipy.linalg._matfuncs_inv_ssq return scipy.linalg._matfuncs_inv_ssq._fractional_matrix_power(A, t)
[ "def", "fractional_matrix_power", "(", "A", ",", "t", ")", ":", "A", "=", "_asarray_square", "(", "A", ")", "import", "scipy", ".", "linalg", ".", "_matfuncs_inv_ssq", "return", "scipy", ".", "linalg", ".", "_matfuncs_inv_ssq", ".", "_fractional_matrix_power", "(", "A", ",", "t", ")" ]
compute the fractional power of a matrix .
train
false
10,137
def removeSVGFile(svgFilePath): if archive.getEndsWithList(svgFilePath, ['_bottom.svg', '_carve.svg', '_chop.svg', '_cleave.svg', '_scale.svg', '_vectorwrite.svg']): os.remove(svgFilePath) print ('removeGeneratedFiles deleted ' + svgFilePath)
[ "def", "removeSVGFile", "(", "svgFilePath", ")", ":", "if", "archive", ".", "getEndsWithList", "(", "svgFilePath", ",", "[", "'_bottom.svg'", ",", "'_carve.svg'", ",", "'_chop.svg'", ",", "'_cleave.svg'", ",", "'_scale.svg'", ",", "'_vectorwrite.svg'", "]", ")", ":", "os", ".", "remove", "(", "svgFilePath", ")", "print", "(", "'removeGeneratedFiles deleted '", "+", "svgFilePath", ")" ]
remove svg file .
train
false
10,138
def _add_current_user_id(graph, user): if graph: graph.current_user_id = None if user.is_authenticated(): profile = try_get_profile(user) facebook_id = get_user_attribute(user, profile, 'facebook_id') if facebook_id: graph.current_user_id = facebook_id
[ "def", "_add_current_user_id", "(", "graph", ",", "user", ")", ":", "if", "graph", ":", "graph", ".", "current_user_id", "=", "None", "if", "user", ".", "is_authenticated", "(", ")", ":", "profile", "=", "try_get_profile", "(", "user", ")", "facebook_id", "=", "get_user_attribute", "(", "user", ",", "profile", ",", "'facebook_id'", ")", "if", "facebook_id", ":", "graph", ".", "current_user_id", "=", "facebook_id" ]
set the current user id .
train
false
10,139
def filldedent(s, w=70): return ('\n' + fill(dedent(str(s)).strip('\n'), width=w))
[ "def", "filldedent", "(", "s", ",", "w", "=", "70", ")", ":", "return", "(", "'\\n'", "+", "fill", "(", "dedent", "(", "str", "(", "s", ")", ")", ".", "strip", "(", "'\\n'", ")", ",", "width", "=", "w", ")", ")" ]
strips leading and trailing empty lines from a copy of s .
train
false
10,140
def _is_1(expr): try: v = opt.get_scalar_constant_value(expr) return numpy.allclose(v, 1) except tensor.NotScalarConstantError: return False
[ "def", "_is_1", "(", "expr", ")", ":", "try", ":", "v", "=", "opt", ".", "get_scalar_constant_value", "(", "expr", ")", "return", "numpy", ".", "allclose", "(", "v", ",", "1", ")", "except", "tensor", ".", "NotScalarConstantError", ":", "return", "False" ]
returns bool true iff expr is a constant close to 1 .
train
false
10,141
def get_preview_fragment(request, descriptor, context): module = _load_preview_module(request, descriptor) preview_view = (AUTHOR_VIEW if has_author_view(module) else STUDENT_VIEW) try: fragment = module.render(preview_view, context) except Exception as exc: log.warning('Unable to render %s for %r', preview_view, module, exc_info=True) fragment = Fragment(render_to_string('html_error.html', {'message': str(exc)})) return fragment
[ "def", "get_preview_fragment", "(", "request", ",", "descriptor", ",", "context", ")", ":", "module", "=", "_load_preview_module", "(", "request", ",", "descriptor", ")", "preview_view", "=", "(", "AUTHOR_VIEW", "if", "has_author_view", "(", "module", ")", "else", "STUDENT_VIEW", ")", "try", ":", "fragment", "=", "module", ".", "render", "(", "preview_view", ",", "context", ")", "except", "Exception", "as", "exc", ":", "log", ".", "warning", "(", "'Unable to render %s for %r'", ",", "preview_view", ",", "module", ",", "exc_info", "=", "True", ")", "fragment", "=", "Fragment", "(", "render_to_string", "(", "'html_error.html'", ",", "{", "'message'", ":", "str", "(", "exc", ")", "}", ")", ")", "return", "fragment" ]
returns the html returned by the xmodules student_view or author_view .
train
false
10,142
def test_ensure_list(): schema = vol.Schema(cv.ensure_list) assert ([] == schema(None)) assert ([1] == schema(1)) assert ([1] == schema([1])) assert (['1'] == schema('1')) assert (['1'] == schema(['1'])) assert ([{'1': '2'}] == schema({'1': '2'}))
[ "def", "test_ensure_list", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "ensure_list", ")", "assert", "(", "[", "]", "==", "schema", "(", "None", ")", ")", "assert", "(", "[", "1", "]", "==", "schema", "(", "1", ")", ")", "assert", "(", "[", "1", "]", "==", "schema", "(", "[", "1", "]", ")", ")", "assert", "(", "[", "'1'", "]", "==", "schema", "(", "'1'", ")", ")", "assert", "(", "[", "'1'", "]", "==", "schema", "(", "[", "'1'", "]", ")", ")", "assert", "(", "[", "{", "'1'", ":", "'2'", "}", "]", "==", "schema", "(", "{", "'1'", ":", "'2'", "}", ")", ")" ]
test ensure_list .
train
false
10,143
def first_diff(a, b): i = (-1) for i in xrange(0, len(a)): if (a[i] != b[1]): return i if (i == 255): return i
[ "def", "first_diff", "(", "a", ",", "b", ")", ":", "i", "=", "(", "-", "1", ")", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "a", ")", ")", ":", "if", "(", "a", "[", "i", "]", "!=", "b", "[", "1", "]", ")", ":", "return", "i", "if", "(", "i", "==", "255", ")", ":", "return", "i" ]
returns the position of the first differing character in the strings a and b .
train
false
10,145
def _check_window_params(data, window_length): if (window_length < 1): raise WindowLengthNotPositive(window_length=window_length) if (window_length > data.shape[0]): raise WindowLengthTooLong(nrows=data.shape[0], window_length=window_length)
[ "def", "_check_window_params", "(", "data", ",", "window_length", ")", ":", "if", "(", "window_length", "<", "1", ")", ":", "raise", "WindowLengthNotPositive", "(", "window_length", "=", "window_length", ")", "if", "(", "window_length", ">", "data", ".", "shape", "[", "0", "]", ")", ":", "raise", "WindowLengthTooLong", "(", "nrows", "=", "data", ".", "shape", "[", "0", "]", ",", "window_length", "=", "window_length", ")" ]
check that a window of length window_length is well-defined on data .
train
true
10,146
def test_multiple_subordinate_steps_are_run(): @step('I run two subordinate steps') def two_subordinate_steps(step): step.behave_as('\n When I run the first sub-step\n And I run the second sub-step\n ') global first_ran global second_ran first_ran = False second_ran = False @step('I run the first sub-step$') def increment(step): global first_ran first_ran = True @step('I run the second sub-step') def increment_twice(step): global second_ran second_ran = True runnable_step = Step.from_string('Given I run two subordinate steps') runnable_step.run(True) assert_equals((first_ran, second_ran), (True, True)) del first_ran del second_ran
[ "def", "test_multiple_subordinate_steps_are_run", "(", ")", ":", "@", "step", "(", "'I run two subordinate steps'", ")", "def", "two_subordinate_steps", "(", "step", ")", ":", "step", ".", "behave_as", "(", "'\\n When I run the first sub-step\\n And I run the second sub-step\\n '", ")", "global", "first_ran", "global", "second_ran", "first_ran", "=", "False", "second_ran", "=", "False", "@", "step", "(", "'I run the first sub-step$'", ")", "def", "increment", "(", "step", ")", ":", "global", "first_ran", "first_ran", "=", "True", "@", "step", "(", "'I run the second sub-step'", ")", "def", "increment_twice", "(", "step", ")", ":", "global", "second_ran", "second_ran", "=", "True", "runnable_step", "=", "Step", ".", "from_string", "(", "'Given I run two subordinate steps'", ")", "runnable_step", ".", "run", "(", "True", ")", "assert_equals", "(", "(", "first_ran", ",", "second_ran", ")", ",", "(", "True", ",", "True", ")", ")", "del", "first_ran", "del", "second_ran" ]
when a step definition calls two subordinate step definitions .
train
false
10,147
def default_listener(col_attr, default): @event.listens_for(col_attr, 'init_scalar', retval=True, propagate=True) def init_scalar(target, value, dict_): if default.is_callable: value = default.arg(None) elif default.is_scalar: value = default.arg else: raise NotImplementedError("Can't invoke pre-default for a SQL-level column default") dict_[col_attr.key] = value return value
[ "def", "default_listener", "(", "col_attr", ",", "default", ")", ":", "@", "event", ".", "listens_for", "(", "col_attr", ",", "'init_scalar'", ",", "retval", "=", "True", ",", "propagate", "=", "True", ")", "def", "init_scalar", "(", "target", ",", "value", ",", "dict_", ")", ":", "if", "default", ".", "is_callable", ":", "value", "=", "default", ".", "arg", "(", "None", ")", "elif", "default", ".", "is_scalar", ":", "value", "=", "default", ".", "arg", "else", ":", "raise", "NotImplementedError", "(", "\"Can't invoke pre-default for a SQL-level column default\"", ")", "dict_", "[", "col_attr", ".", "key", "]", "=", "value", "return", "value" ]
establish a default-setting listener .
train
true
10,148
def get_dataset(name, split_name, dataset_dir, file_pattern=None, reader=None): if (name not in datasets_map): raise ValueError(('Name of dataset unknown %s' % name)) return datasets_map[name].get_split(split_name, dataset_dir, file_pattern, reader)
[ "def", "get_dataset", "(", "name", ",", "split_name", ",", "dataset_dir", ",", "file_pattern", "=", "None", ",", "reader", "=", "None", ")", ":", "if", "(", "name", "not", "in", "datasets_map", ")", ":", "raise", "ValueError", "(", "(", "'Name of dataset unknown %s'", "%", "name", ")", ")", "return", "datasets_map", "[", "name", "]", ".", "get_split", "(", "split_name", ",", "dataset_dir", ",", "file_pattern", ",", "reader", ")" ]
given a dataset name and a split_name returns a dataset .
train
false
10,149
def _verify_bsgs(group, base, gens): from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if (current_stabilizer.order() != candidate.order()): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if (current_stabilizer.order() != 1): return False return True
[ "def", "_verify_bsgs", "(", "group", ",", "base", ",", "gens", ")", ":", "from", "sympy", ".", "combinatorics", ".", "perm_groups", "import", "PermutationGroup", "strong_gens_distr", "=", "_distribute_gens_by_base", "(", "base", ",", "gens", ")", "current_stabilizer", "=", "group", "for", "i", "in", "range", "(", "len", "(", "base", ")", ")", ":", "candidate", "=", "PermutationGroup", "(", "strong_gens_distr", "[", "i", "]", ")", "if", "(", "current_stabilizer", ".", "order", "(", ")", "!=", "candidate", ".", "order", "(", ")", ")", ":", "return", "False", "current_stabilizer", "=", "current_stabilizer", ".", "stabilizer", "(", "base", "[", "i", "]", ")", "if", "(", "current_stabilizer", ".", "order", "(", ")", "!=", "1", ")", ":", "return", "False", "return", "True" ]
verify the correctness of a base and strong generating set .
train
false
10,150
def cornacchia(a, b, m): sols = set() a1 = igcdex(a, m)[0] v = sqrt_mod(((- b) * a1), m, all_roots=True) if (not v): return None for t in v: if (t < (m // 2)): continue (u, r) = (t, m) while True: (u, r) = (r, (u % r)) if ((a * (r ** 2)) < m): break m1 = (m - (a * (r ** 2))) if ((m1 % b) == 0): m1 = (m1 // b) (s, _exact) = integer_nthroot(m1, 2) if _exact: if ((a == b) and (r < s)): (r, s) = (s, r) sols.add((int(r), int(s))) return sols
[ "def", "cornacchia", "(", "a", ",", "b", ",", "m", ")", ":", "sols", "=", "set", "(", ")", "a1", "=", "igcdex", "(", "a", ",", "m", ")", "[", "0", "]", "v", "=", "sqrt_mod", "(", "(", "(", "-", "b", ")", "*", "a1", ")", ",", "m", ",", "all_roots", "=", "True", ")", "if", "(", "not", "v", ")", ":", "return", "None", "for", "t", "in", "v", ":", "if", "(", "t", "<", "(", "m", "//", "2", ")", ")", ":", "continue", "(", "u", ",", "r", ")", "=", "(", "t", ",", "m", ")", "while", "True", ":", "(", "u", ",", "r", ")", "=", "(", "r", ",", "(", "u", "%", "r", ")", ")", "if", "(", "(", "a", "*", "(", "r", "**", "2", ")", ")", "<", "m", ")", ":", "break", "m1", "=", "(", "m", "-", "(", "a", "*", "(", "r", "**", "2", ")", ")", ")", "if", "(", "(", "m1", "%", "b", ")", "==", "0", ")", ":", "m1", "=", "(", "m1", "//", "b", ")", "(", "s", ",", "_exact", ")", "=", "integer_nthroot", "(", "m1", ",", "2", ")", "if", "_exact", ":", "if", "(", "(", "a", "==", "b", ")", "and", "(", "r", "<", "s", ")", ")", ":", "(", "r", ",", "s", ")", "=", "(", "s", ",", "r", ")", "sols", ".", "add", "(", "(", "int", "(", "r", ")", ",", "int", "(", "s", ")", ")", ")", "return", "sols" ]
solves ax^2 + by^2 = m where gcd = 1 = gcd and a .
train
false
10,151
def cubehelix_palette(n_colors=6, start=0, rot=0.4, gamma=1.0, hue=0.8, light=0.85, dark=0.15, reverse=False, as_cmap=False): cdict = mpl._cm.cubehelix(gamma, start, rot, hue) cmap = mpl.colors.LinearSegmentedColormap('cubehelix', cdict) x = np.linspace(light, dark, n_colors) pal = cmap(x)[:, :3].tolist() if reverse: pal = pal[::(-1)] if as_cmap: x_256 = np.linspace(light, dark, 256) if reverse: x_256 = x_256[::(-1)] pal_256 = cmap(x_256) cmap = mpl.colors.ListedColormap(pal_256) return cmap else: return _ColorPalette(pal)
[ "def", "cubehelix_palette", "(", "n_colors", "=", "6", ",", "start", "=", "0", ",", "rot", "=", "0.4", ",", "gamma", "=", "1.0", ",", "hue", "=", "0.8", ",", "light", "=", "0.85", ",", "dark", "=", "0.15", ",", "reverse", "=", "False", ",", "as_cmap", "=", "False", ")", ":", "cdict", "=", "mpl", ".", "_cm", ".", "cubehelix", "(", "gamma", ",", "start", ",", "rot", ",", "hue", ")", "cmap", "=", "mpl", ".", "colors", ".", "LinearSegmentedColormap", "(", "'cubehelix'", ",", "cdict", ")", "x", "=", "np", ".", "linspace", "(", "light", ",", "dark", ",", "n_colors", ")", "pal", "=", "cmap", "(", "x", ")", "[", ":", ",", ":", "3", "]", ".", "tolist", "(", ")", "if", "reverse", ":", "pal", "=", "pal", "[", ":", ":", "(", "-", "1", ")", "]", "if", "as_cmap", ":", "x_256", "=", "np", ".", "linspace", "(", "light", ",", "dark", ",", "256", ")", "if", "reverse", ":", "x_256", "=", "x_256", "[", ":", ":", "(", "-", "1", ")", "]", "pal_256", "=", "cmap", "(", "x_256", ")", "cmap", "=", "mpl", ".", "colors", ".", "ListedColormap", "(", "pal_256", ")", "return", "cmap", "else", ":", "return", "_ColorPalette", "(", "pal", ")" ]
make a sequential palette from the cubehelix system .
train
true
10,152
def coset_enumeration_r(fp_grp, Y): C = CosetTable(fp_grp, Y) R = fp_grp.relators() A_dict = C.A_dict A_dict_inv = C.A_dict_inv p = C.p for w in Y: C.scan_and_fill(0, w) alpha = 0 while (alpha < C.n): if (p[alpha] == alpha): for w in R: C.scan_and_fill(alpha, w) if (p[alpha] < alpha): break if (p[alpha] == alpha): for x in A_dict: if (C.table[alpha][A_dict[x]] is None): C.define(alpha, x) alpha += 1 return C
[ "def", "coset_enumeration_r", "(", "fp_grp", ",", "Y", ")", ":", "C", "=", "CosetTable", "(", "fp_grp", ",", "Y", ")", "R", "=", "fp_grp", ".", "relators", "(", ")", "A_dict", "=", "C", ".", "A_dict", "A_dict_inv", "=", "C", ".", "A_dict_inv", "p", "=", "C", ".", "p", "for", "w", "in", "Y", ":", "C", ".", "scan_and_fill", "(", "0", ",", "w", ")", "alpha", "=", "0", "while", "(", "alpha", "<", "C", ".", "n", ")", ":", "if", "(", "p", "[", "alpha", "]", "==", "alpha", ")", ":", "for", "w", "in", "R", ":", "C", ".", "scan_and_fill", "(", "alpha", ",", "w", ")", "if", "(", "p", "[", "alpha", "]", "<", "alpha", ")", ":", "break", "if", "(", "p", "[", "alpha", "]", "==", "alpha", ")", ":", "for", "x", "in", "A_dict", ":", "if", "(", "C", ".", "table", "[", "alpha", "]", "[", "A_dict", "[", "x", "]", "]", "is", "None", ")", ":", "C", ".", "define", "(", "alpha", ",", "x", ")", "alpha", "+=", "1", "return", "C" ]
this is easier of the two implemented methods of coset enumeration .
train
false
10,156
def lopen_loc(x): lineno = (x._lopen_lineno if hasattr(x, '_lopen_lineno') else x.lineno) col = (x._lopen_col if hasattr(x, '_lopen_col') else x.col_offset) return (lineno, col)
[ "def", "lopen_loc", "(", "x", ")", ":", "lineno", "=", "(", "x", ".", "_lopen_lineno", "if", "hasattr", "(", "x", ",", "'_lopen_lineno'", ")", "else", "x", ".", "lineno", ")", "col", "=", "(", "x", ".", "_lopen_col", "if", "hasattr", "(", "x", ",", "'_lopen_col'", ")", "else", "x", ".", "col_offset", ")", "return", "(", "lineno", ",", "col", ")" ]
extracts the line and column number for a node that may have anb opening parenthesis .
train
false
10,157
def update_dir_prior(prior, N, logphat, rho): dprior = np.copy(prior) gradf = (N * ((psi(np.sum(prior)) - psi(prior)) + logphat)) c = (N * polygamma(1, np.sum(prior))) q = ((- N) * polygamma(1, prior)) b = (np.sum((gradf / q)) / ((1 / c) + np.sum((1 / q)))) dprior = ((- (gradf - b)) / q) if all((((rho * dprior) + prior) > 0)): prior += (rho * dprior) else: logger.warning('updated prior not positive') return prior
[ "def", "update_dir_prior", "(", "prior", ",", "N", ",", "logphat", ",", "rho", ")", ":", "dprior", "=", "np", ".", "copy", "(", "prior", ")", "gradf", "=", "(", "N", "*", "(", "(", "psi", "(", "np", ".", "sum", "(", "prior", ")", ")", "-", "psi", "(", "prior", ")", ")", "+", "logphat", ")", ")", "c", "=", "(", "N", "*", "polygamma", "(", "1", ",", "np", ".", "sum", "(", "prior", ")", ")", ")", "q", "=", "(", "(", "-", "N", ")", "*", "polygamma", "(", "1", ",", "prior", ")", ")", "b", "=", "(", "np", ".", "sum", "(", "(", "gradf", "/", "q", ")", ")", "/", "(", "(", "1", "/", "c", ")", "+", "np", ".", "sum", "(", "(", "1", "/", "q", ")", ")", ")", ")", "dprior", "=", "(", "(", "-", "(", "gradf", "-", "b", ")", ")", "/", "q", ")", "if", "all", "(", "(", "(", "(", "rho", "*", "dprior", ")", "+", "prior", ")", ">", "0", ")", ")", ":", "prior", "+=", "(", "rho", "*", "dprior", ")", "else", ":", "logger", ".", "warning", "(", "'updated prior not positive'", ")", "return", "prior" ]
updates a given prior using newtons method .
train
false
10,160
def _CheckLimit(limit): return _CheckInteger(limit, 'limit', zero_ok=False, upper_bound=MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH)
[ "def", "_CheckLimit", "(", "limit", ")", ":", "return", "_CheckInteger", "(", "limit", ",", "'limit'", ",", "zero_ok", "=", "False", ",", "upper_bound", "=", "MAXIMUM_DOCUMENTS_RETURNED_PER_SEARCH", ")" ]
checks the limit of documents to return is an integer within range .
train
false