id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
53,083
def makeImage(argdata, c): size = len(argdata) img = Image.new('RGB', (size, size), 'black') pixels = img.load() for i in range(img.size[0]): for j in range(img.size[1]): pixels[(j, i)] = ((255, 255, 255) if (argdata[i][j] == '+') else (0, 0, 0)) img = img.resize(((size * 10), (size * 10))) img.save(('qrcode%d.png' % c)) return img
[ "def", "makeImage", "(", "argdata", ",", "c", ")", ":", "size", "=", "len", "(", "argdata", ")", "img", "=", "Image", ".", "new", "(", "'RGB'", ",", "(", "size", ",", "size", ")", ",", "'black'", ")", "pixels", "=", "img", ".", "load", "(", ")", "for", "i", "in", "range", "(", "img", ".", "size", "[", "0", "]", ")", ":", "for", "j", "in", "range", "(", "img", ".", "size", "[", "1", "]", ")", ":", "pixels", "[", "(", "j", ",", "i", ")", "]", "=", "(", "(", "255", ",", "255", ",", "255", ")", "if", "(", "argdata", "[", "i", "]", "[", "j", "]", "==", "'+'", ")", "else", "(", "0", ",", "0", ",", "0", ")", ")", "img", "=", "img", ".", "resize", "(", "(", "(", "size", "*", "10", ")", ",", "(", "size", "*", "10", ")", ")", ")", "img", ".", "save", "(", "(", "'qrcode%d.png'", "%", "c", ")", ")", "return", "img" ]
turn a code into an image .
train
false
53,084
def print_xen_object(obj_type, obj, indent_level=0, spaces_per_indent=4): if (not CONF.verbose): return uuid = obj['uuid'] try: name_label = obj['name_label'] except KeyError: name_label = '' msg = ("%s (%s) '%s'" % (obj_type, uuid, name_label)) indent = ((' ' * spaces_per_indent) * indent_level) print ''.join([indent, msg])
[ "def", "print_xen_object", "(", "obj_type", ",", "obj", ",", "indent_level", "=", "0", ",", "spaces_per_indent", "=", "4", ")", ":", "if", "(", "not", "CONF", ".", "verbose", ")", ":", "return", "uuid", "=", "obj", "[", "'uuid'", "]", "try", ":", "name_label", "=", "obj", "[", "'name_label'", "]", "except", "KeyError", ":", "name_label", "=", "''", "msg", "=", "(", "\"%s (%s) '%s'\"", "%", "(", "obj_type", ",", "uuid", ",", "name_label", ")", ")", "indent", "=", "(", "(", "' '", "*", "spaces_per_indent", ")", "*", "indent_level", ")", "print", "''", ".", "join", "(", "[", "indent", ",", "msg", "]", ")" ]
pretty-print a xen object .
train
false
53,085
def All(other, arrexpr, operator=operators.eq): return arrexpr.all(other, operator)
[ "def", "All", "(", "other", ",", "arrexpr", ",", "operator", "=", "operators", ".", "eq", ")", ":", "return", "arrexpr", ".", "all", "(", "other", ",", "operator", ")" ]
a synonym for the :meth: .
train
false
53,086
def ObjectSpecification(direct, cls): return Provides(cls, direct)
[ "def", "ObjectSpecification", "(", "direct", ",", "cls", ")", ":", "return", "Provides", "(", "cls", ",", "direct", ")" ]
provide object specifications these combine information for the object and for its classes .
train
false
53,087
def test_arg_of_sigmoid_bad(): X = T.matrix() Y = T.nnet.softmax(X) try: Z = arg_of_sigmoid(Y) except TypeError: return assert False
[ "def", "test_arg_of_sigmoid_bad", "(", ")", ":", "X", "=", "T", ".", "matrix", "(", ")", "Y", "=", "T", ".", "nnet", ".", "softmax", "(", "X", ")", "try", ":", "Z", "=", "arg_of_sigmoid", "(", "Y", ")", "except", "TypeError", ":", "return", "assert", "False" ]
tests that arg_of_sigmoid raises an error when given a bad input .
train
false
53,091
def clear_pricing_data(): invalidate_pricing_cache()
[ "def", "clear_pricing_data", "(", ")", ":", "invalidate_pricing_cache", "(", ")" ]
invalidate pricing cache for all the drivers .
train
false
53,092
def to_lines(chunks): leftovers = [] for chunk in chunks: start = 0 while (start < len(chunk)): end = (chunk.find('\n', start) + 1) if (end == 0): leftovers.append(chunk[start:]) break if leftovers: leftovers.append(chunk[start:end]) (yield ''.join(leftovers)) leftovers = [] else: (yield chunk[start:end]) start = end if leftovers: (yield ''.join(leftovers))
[ "def", "to_lines", "(", "chunks", ")", ":", "leftovers", "=", "[", "]", "for", "chunk", "in", "chunks", ":", "start", "=", "0", "while", "(", "start", "<", "len", "(", "chunk", ")", ")", ":", "end", "=", "(", "chunk", ".", "find", "(", "'\\n'", ",", "start", ")", "+", "1", ")", "if", "(", "end", "==", "0", ")", ":", "leftovers", ".", "append", "(", "chunk", "[", "start", ":", "]", ")", "break", "if", "leftovers", ":", "leftovers", ".", "append", "(", "chunk", "[", "start", ":", "end", "]", ")", "(", "yield", "''", ".", "join", "(", "leftovers", ")", ")", "leftovers", "=", "[", "]", "else", ":", "(", "yield", "chunk", "[", "start", ":", "end", "]", ")", "start", "=", "end", "if", "leftovers", ":", "(", "yield", "''", ".", "join", "(", "leftovers", ")", ")" ]
take in data as a sequence of bytes .
train
false
53,093
def get_status_from_instructor_task(instructor_task): status = {} if (instructor_task is not None): status['task_id'] = instructor_task.task_id status['task_state'] = instructor_task.task_state status['in_progress'] = (instructor_task.task_state not in READY_STATES) if (instructor_task.task_output is not None): status['task_progress'] = json.loads(instructor_task.task_output) return status
[ "def", "get_status_from_instructor_task", "(", "instructor_task", ")", ":", "status", "=", "{", "}", "if", "(", "instructor_task", "is", "not", "None", ")", ":", "status", "[", "'task_id'", "]", "=", "instructor_task", ".", "task_id", "status", "[", "'task_state'", "]", "=", "instructor_task", ".", "task_state", "status", "[", "'in_progress'", "]", "=", "(", "instructor_task", ".", "task_state", "not", "in", "READY_STATES", ")", "if", "(", "instructor_task", ".", "task_output", "is", "not", "None", ")", ":", "status", "[", "'task_progress'", "]", "=", "json", ".", "loads", "(", "instructor_task", ".", "task_output", ")", "return", "status" ]
get the status for a given instructortask entry .
train
false
53,095
def get_decoder(encoding, *args, **kwargs): def _get_decoder_class(): if (encoding == AMF0): try: from cpyamf import amf0 except ImportError: from pyamf import amf0 return amf0.Decoder elif (encoding == AMF3): try: from cpyamf import amf3 except ImportError: from pyamf import amf3 return amf3.Decoder raise ValueError(('Unknown encoding %r' % (encoding,))) return _get_decoder_class()(*args, **kwargs)
[ "def", "get_decoder", "(", "encoding", ",", "*", "args", ",", "**", "kwargs", ")", ":", "def", "_get_decoder_class", "(", ")", ":", "if", "(", "encoding", "==", "AMF0", ")", ":", "try", ":", "from", "cpyamf", "import", "amf0", "except", "ImportError", ":", "from", "pyamf", "import", "amf0", "return", "amf0", ".", "Decoder", "elif", "(", "encoding", "==", "AMF3", ")", ":", "try", ":", "from", "cpyamf", "import", "amf3", "except", "ImportError", ":", "from", "pyamf", "import", "amf3", "return", "amf3", ".", "Decoder", "raise", "ValueError", "(", "(", "'Unknown encoding %r'", "%", "(", "encoding", ",", ")", ")", ")", "return", "_get_decoder_class", "(", ")", "(", "*", "args", ",", "**", "kwargs", ")" ]
returns a l{codec .
train
true
53,097
@pytest.mark.skipif(u'not HAS_SCIPY') def test_scipy_poisson_limit(): assert_allclose(funcs._scipy_kraft_burrows_nousek(5.0, 2.5, 0.99), (0, 10.67), rtol=0.001) conf = funcs.poisson_conf_interval([5.0, 6.0], u'kraft-burrows-nousek', background=[2.5, 2.0], conflevel=[0.99, 0.9]) assert_allclose(conf[:, 0], (0, 10.67), rtol=0.001) assert_allclose(conf[:, 1], (0.81, 8.99), rtol=0.005)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "u'not HAS_SCIPY'", ")", "def", "test_scipy_poisson_limit", "(", ")", ":", "assert_allclose", "(", "funcs", ".", "_scipy_kraft_burrows_nousek", "(", "5.0", ",", "2.5", ",", "0.99", ")", ",", "(", "0", ",", "10.67", ")", ",", "rtol", "=", "0.001", ")", "conf", "=", "funcs", ".", "poisson_conf_interval", "(", "[", "5.0", ",", "6.0", "]", ",", "u'kraft-burrows-nousek'", ",", "background", "=", "[", "2.5", ",", "2.0", "]", ",", "conflevel", "=", "[", "0.99", ",", "0.9", "]", ")", "assert_allclose", "(", "conf", "[", ":", ",", "0", "]", ",", "(", "0", ",", "10.67", ")", ",", "rtol", "=", "0.001", ")", "assert_allclose", "(", "conf", "[", ":", ",", "1", "]", ",", "(", "0.81", ",", "8.99", ")", ",", "rtol", "=", "0.005", ")" ]
test that the lower-level routine gives the snae number .
train
false
53,098
def hash_path(account, container=None, object=None, raw_digest=False): if (object and (not container)): raise ValueError('container is required if object is provided') paths = [account] if container: paths.append(container) if object: paths.append(object) if raw_digest: return md5((((HASH_PATH_PREFIX + '/') + '/'.join(paths)) + HASH_PATH_SUFFIX)).digest() else: return md5((((HASH_PATH_PREFIX + '/') + '/'.join(paths)) + HASH_PATH_SUFFIX)).hexdigest()
[ "def", "hash_path", "(", "account", ",", "container", "=", "None", ",", "object", "=", "None", ",", "raw_digest", "=", "False", ")", ":", "if", "(", "object", "and", "(", "not", "container", ")", ")", ":", "raise", "ValueError", "(", "'container is required if object is provided'", ")", "paths", "=", "[", "account", "]", "if", "container", ":", "paths", ".", "append", "(", "container", ")", "if", "object", ":", "paths", ".", "append", "(", "object", ")", "if", "raw_digest", ":", "return", "md5", "(", "(", "(", "(", "HASH_PATH_PREFIX", "+", "'/'", ")", "+", "'/'", ".", "join", "(", "paths", ")", ")", "+", "HASH_PATH_SUFFIX", ")", ")", ".", "digest", "(", ")", "else", ":", "return", "md5", "(", "(", "(", "(", "HASH_PATH_PREFIX", "+", "'/'", ")", "+", "'/'", ".", "join", "(", "paths", ")", ")", "+", "HASH_PATH_SUFFIX", ")", ")", ".", "hexdigest", "(", ")" ]
get the connonical hash for an account/container/object .
train
false
53,099
def flags2segs(Flags, window): preFlag = 0 curFlag = 0 numOfSegments = 0 curVal = Flags[curFlag] segsList = [] classes = [] while (curFlag < (len(Flags) - 1)): stop = 0 preFlag = curFlag preVal = curVal while (stop == 0): curFlag = (curFlag + 1) tempVal = Flags[curFlag] if ((tempVal != curVal) | (curFlag == (len(Flags) - 1))): numOfSegments = (numOfSegments + 1) stop = 1 curSegment = curVal curVal = Flags[curFlag] segsList.append((curFlag * window)) classes.append(preVal) segs = numpy.zeros((len(segsList), 2)) for i in range(len(segsList)): if (i > 0): segs[(i, 0)] = segsList[(i - 1)] segs[(i, 1)] = segsList[i] return (segs, classes)
[ "def", "flags2segs", "(", "Flags", ",", "window", ")", ":", "preFlag", "=", "0", "curFlag", "=", "0", "numOfSegments", "=", "0", "curVal", "=", "Flags", "[", "curFlag", "]", "segsList", "=", "[", "]", "classes", "=", "[", "]", "while", "(", "curFlag", "<", "(", "len", "(", "Flags", ")", "-", "1", ")", ")", ":", "stop", "=", "0", "preFlag", "=", "curFlag", "preVal", "=", "curVal", "while", "(", "stop", "==", "0", ")", ":", "curFlag", "=", "(", "curFlag", "+", "1", ")", "tempVal", "=", "Flags", "[", "curFlag", "]", "if", "(", "(", "tempVal", "!=", "curVal", ")", "|", "(", "curFlag", "==", "(", "len", "(", "Flags", ")", "-", "1", ")", ")", ")", ":", "numOfSegments", "=", "(", "numOfSegments", "+", "1", ")", "stop", "=", "1", "curSegment", "=", "curVal", "curVal", "=", "Flags", "[", "curFlag", "]", "segsList", ".", "append", "(", "(", "curFlag", "*", "window", ")", ")", "classes", ".", "append", "(", "preVal", ")", "segs", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "segsList", ")", ",", "2", ")", ")", "for", "i", "in", "range", "(", "len", "(", "segsList", ")", ")", ":", "if", "(", "i", ">", "0", ")", ":", "segs", "[", "(", "i", ",", "0", ")", "]", "=", "segsList", "[", "(", "i", "-", "1", ")", "]", "segs", "[", "(", "i", ",", "1", ")", "]", "=", "segsList", "[", "i", "]", "return", "(", "segs", ",", "classes", ")" ]
arguments: - flags: a sequence of class flags - window: window duration returns: - segs: a sequence of segments limits: segs[i .
train
false
53,100
def is_renamed(kev): return (kev.fflags & select.KQ_NOTE_RENAME)
[ "def", "is_renamed", "(", "kev", ")", ":", "return", "(", "kev", ".", "fflags", "&", "select", ".", "KQ_NOTE_RENAME", ")" ]
determines whether the given kevent represents movement .
train
false
53,102
def get_vertical_bar(pos, size): vertical = ColorBarVisual(pos=pos, size=size, label_str='iterations to escape', cmap=colormap, orientation='left') vertical.label.font_size = 15 vertical.label.color = 'white' vertical.clim = (0, MAX_ITERATIONS) vertical.ticks[0].font_size = 10 vertical.ticks[1].font_size = 10 vertical.ticks[0].color = 'white' vertical.ticks[1].color = 'white' vertical.border_width = 1 vertical.border_color = Color('#ababab') return vertical
[ "def", "get_vertical_bar", "(", "pos", ",", "size", ")", ":", "vertical", "=", "ColorBarVisual", "(", "pos", "=", "pos", ",", "size", "=", "size", ",", "label_str", "=", "'iterations to escape'", ",", "cmap", "=", "colormap", ",", "orientation", "=", "'left'", ")", "vertical", ".", "label", ".", "font_size", "=", "15", "vertical", ".", "label", ".", "color", "=", "'white'", "vertical", ".", "clim", "=", "(", "0", ",", "MAX_ITERATIONS", ")", "vertical", ".", "ticks", "[", "0", "]", ".", "font_size", "=", "10", "vertical", ".", "ticks", "[", "1", "]", ".", "font_size", "=", "10", "vertical", ".", "ticks", "[", "0", "]", ".", "color", "=", "'white'", "vertical", ".", "ticks", "[", "1", "]", ".", "color", "=", "'white'", "vertical", ".", "border_width", "=", "1", "vertical", ".", "border_color", "=", "Color", "(", "'#ababab'", ")", "return", "vertical" ]
constructs the vertical bar that represents the color values for the mandlebrot set returns a vispy .
train
false
53,104
@event.listens_for(Event, 'after_insert') def create_version_info(mapper, conn, target): version = Version(event_id=target.id) target.version = version
[ "@", "event", ".", "listens_for", "(", "Event", ",", "'after_insert'", ")", "def", "create_version_info", "(", "mapper", ",", "conn", ",", "target", ")", ":", "version", "=", "Version", "(", "event_id", "=", "target", ".", "id", ")", "target", ".", "version", "=", "version" ]
create version instance after event created .
train
false
53,105
def qsimplify_pauli(e): if isinstance(e, Operator): return e if isinstance(e, (Add, Pow, exp)): t = type(e) return t(*(qsimplify_pauli(arg) for arg in e.args)) if isinstance(e, Mul): (c, nc) = e.args_cnc() nc_s = [] while nc: curr = nc.pop(0) while (len(nc) and isinstance(curr, SigmaOpBase) and isinstance(nc[0], SigmaOpBase) and (curr.name == nc[0].name)): x = nc.pop(0) y = _qsimplify_pauli_product(curr, x) (c1, nc1) = y.args_cnc() curr = Mul(*nc1) c = (c + c1) nc_s.append(curr) return (Mul(*c) * Mul(*nc_s)) return e
[ "def", "qsimplify_pauli", "(", "e", ")", ":", "if", "isinstance", "(", "e", ",", "Operator", ")", ":", "return", "e", "if", "isinstance", "(", "e", ",", "(", "Add", ",", "Pow", ",", "exp", ")", ")", ":", "t", "=", "type", "(", "e", ")", "return", "t", "(", "*", "(", "qsimplify_pauli", "(", "arg", ")", "for", "arg", "in", "e", ".", "args", ")", ")", "if", "isinstance", "(", "e", ",", "Mul", ")", ":", "(", "c", ",", "nc", ")", "=", "e", ".", "args_cnc", "(", ")", "nc_s", "=", "[", "]", "while", "nc", ":", "curr", "=", "nc", ".", "pop", "(", "0", ")", "while", "(", "len", "(", "nc", ")", "and", "isinstance", "(", "curr", ",", "SigmaOpBase", ")", "and", "isinstance", "(", "nc", "[", "0", "]", ",", "SigmaOpBase", ")", "and", "(", "curr", ".", "name", "==", "nc", "[", "0", "]", ".", "name", ")", ")", ":", "x", "=", "nc", ".", "pop", "(", "0", ")", "y", "=", "_qsimplify_pauli_product", "(", "curr", ",", "x", ")", "(", "c1", ",", "nc1", ")", "=", "y", ".", "args_cnc", "(", ")", "curr", "=", "Mul", "(", "*", "nc1", ")", "c", "=", "(", "c", "+", "c1", ")", "nc_s", ".", "append", "(", "curr", ")", "return", "(", "Mul", "(", "*", "c", ")", "*", "Mul", "(", "*", "nc_s", ")", ")", "return", "e" ]
simplify an expression that includes products of pauli operators .
train
false
53,106
def write_julian(fid, kind, data): assert (len(data) == 3) data_size = 4 jd = np.sum(jcal2jd(*data)) data = np.array(jd, dtype='>i4') _write(fid, data, kind, data_size, FIFF.FIFFT_JULIAN, '>i4')
[ "def", "write_julian", "(", "fid", ",", "kind", ",", "data", ")", ":", "assert", "(", "len", "(", "data", ")", "==", "3", ")", "data_size", "=", "4", "jd", "=", "np", ".", "sum", "(", "jcal2jd", "(", "*", "data", ")", ")", "data", "=", "np", ".", "array", "(", "jd", ",", "dtype", "=", "'>i4'", ")", "_write", "(", "fid", ",", "data", ",", "kind", ",", "data_size", ",", "FIFF", ".", "FIFFT_JULIAN", ",", "'>i4'", ")" ]
write a julian-formatted date to a fif file .
train
false
53,107
def DelTree(l, dn, scope=ldap.SCOPE_ONELEVEL): leafs_deleter = DeleteLeafs(l) leafs_deleter.startSearch(dn, scope) leafs_deleter.processResults() deleted_entries = leafs_deleter.deletedEntries non_leaf_entries = leafs_deleter.nonLeafEntries[:] while non_leaf_entries: dn = non_leaf_entries.pop() print deleted_entries, len(non_leaf_entries), dn leafs_deleter.startSearch(dn, ldap.SCOPE_SUBTREE) leafs_deleter.processResults() deleted_entries = (deleted_entries + leafs_deleter.deletedEntries) non_leaf_entries.extend(leafs_deleter.nonLeafEntries) return
[ "def", "DelTree", "(", "l", ",", "dn", ",", "scope", "=", "ldap", ".", "SCOPE_ONELEVEL", ")", ":", "leafs_deleter", "=", "DeleteLeafs", "(", "l", ")", "leafs_deleter", ".", "startSearch", "(", "dn", ",", "scope", ")", "leafs_deleter", ".", "processResults", "(", ")", "deleted_entries", "=", "leafs_deleter", ".", "deletedEntries", "non_leaf_entries", "=", "leafs_deleter", ".", "nonLeafEntries", "[", ":", "]", "while", "non_leaf_entries", ":", "dn", "=", "non_leaf_entries", ".", "pop", "(", ")", "print", "deleted_entries", ",", "len", "(", "non_leaf_entries", ")", ",", "dn", "leafs_deleter", ".", "startSearch", "(", "dn", ",", "ldap", ".", "SCOPE_SUBTREE", ")", "leafs_deleter", ".", "processResults", "(", ")", "deleted_entries", "=", "(", "deleted_entries", "+", "leafs_deleter", ".", "deletedEntries", ")", "non_leaf_entries", ".", "extend", "(", "leafs_deleter", ".", "nonLeafEntries", ")", "return" ]
recursively delete entries below or including entry with name dn .
train
false
53,108
def _buffer_recv_worker(rt_client, nchan): try: for raw_buffer in rt_client.raw_buffers(nchan): rt_client._push_raw_buffer(raw_buffer) except RuntimeError as err: rt_client._recv_thread = None print(('Buffer receive thread stopped: %s' % err))
[ "def", "_buffer_recv_worker", "(", "rt_client", ",", "nchan", ")", ":", "try", ":", "for", "raw_buffer", "in", "rt_client", ".", "raw_buffers", "(", "nchan", ")", ":", "rt_client", ".", "_push_raw_buffer", "(", "raw_buffer", ")", "except", "RuntimeError", "as", "err", ":", "rt_client", ".", "_recv_thread", "=", "None", "print", "(", "(", "'Buffer receive thread stopped: %s'", "%", "err", ")", ")" ]
worker thread that constantly receives buffers .
train
false
53,110
@pytest.mark.django_db def test_project_save_no_code(project0): project0.code = '' with pytest.raises(ValidationError): project0.save()
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_project_save_no_code", "(", "project0", ")", ":", "project0", ".", "code", "=", "''", "with", "pytest", ".", "raises", "(", "ValidationError", ")", ":", "project0", ".", "save", "(", ")" ]
test that an existing project cant be removed its code .
train
false
53,111
def _divide_with_ceil(a, b): if (a % b): return ((a // b) + 1) return (a // b)
[ "def", "_divide_with_ceil", "(", "a", ",", "b", ")", ":", "if", "(", "a", "%", "b", ")", ":", "return", "(", "(", "a", "//", "b", ")", "+", "1", ")", "return", "(", "a", "//", "b", ")" ]
returns a divided by b .
train
false
53,112
def _get_well_known_file(): WELL_KNOWN_CREDENTIALS_FILE = 'application_default_credentials.json' default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR) if (default_config_dir is None): if (os.name == 'nt'): try: default_config_dir = os.path.join(os.environ['APPDATA'], _CLOUDSDK_CONFIG_DIRECTORY) except KeyError: drive = os.environ.get('SystemDrive', 'C:') default_config_dir = os.path.join(drive, '\\', _CLOUDSDK_CONFIG_DIRECTORY) else: default_config_dir = os.path.join(os.path.expanduser('~'), '.config', _CLOUDSDK_CONFIG_DIRECTORY) return os.path.join(default_config_dir, WELL_KNOWN_CREDENTIALS_FILE)
[ "def", "_get_well_known_file", "(", ")", ":", "WELL_KNOWN_CREDENTIALS_FILE", "=", "'application_default_credentials.json'", "default_config_dir", "=", "os", ".", "getenv", "(", "_CLOUDSDK_CONFIG_ENV_VAR", ")", "if", "(", "default_config_dir", "is", "None", ")", ":", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "try", ":", "default_config_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "environ", "[", "'APPDATA'", "]", ",", "_CLOUDSDK_CONFIG_DIRECTORY", ")", "except", "KeyError", ":", "drive", "=", "os", ".", "environ", ".", "get", "(", "'SystemDrive'", ",", "'C:'", ")", "default_config_dir", "=", "os", ".", "path", ".", "join", "(", "drive", ",", "'\\\\'", ",", "_CLOUDSDK_CONFIG_DIRECTORY", ")", "else", ":", "default_config_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ",", "'.config'", ",", "_CLOUDSDK_CONFIG_DIRECTORY", ")", "return", "os", ".", "path", ".", "join", "(", "default_config_dir", ",", "WELL_KNOWN_CREDENTIALS_FILE", ")" ]
get the well known file produced by command gcloud auth login .
train
true
53,113
def get_backup_files(backup_info, selected_kinds=None): if backup_info.blob_files: return backup_info.blob_files else: kinds_backup_files = backup_info.get_kind_backup_files(selected_kinds) return list(itertools.chain(*(kind_backup_files.files for kind_backup_files in kinds_backup_files)))
[ "def", "get_backup_files", "(", "backup_info", ",", "selected_kinds", "=", "None", ")", ":", "if", "backup_info", ".", "blob_files", ":", "return", "backup_info", ".", "blob_files", "else", ":", "kinds_backup_files", "=", "backup_info", ".", "get_kind_backup_files", "(", "selected_kinds", ")", "return", "list", "(", "itertools", ".", "chain", "(", "*", "(", "kind_backup_files", ".", "files", "for", "kind_backup_files", "in", "kinds_backup_files", ")", ")", ")" ]
returns the backup filenames for selected kinds or all if none/empty .
train
false
53,116
def _get_collection_change_list(property_name, new_value): return [{'cmd': collection_domain.CMD_EDIT_COLLECTION_PROPERTY, 'property_name': property_name, 'new_value': new_value}]
[ "def", "_get_collection_change_list", "(", "property_name", ",", "new_value", ")", ":", "return", "[", "{", "'cmd'", ":", "collection_domain", ".", "CMD_EDIT_COLLECTION_PROPERTY", ",", "'property_name'", ":", "property_name", ",", "'new_value'", ":", "new_value", "}", "]" ]
generates a change list for a single collection property change .
train
false
53,117
def _scale_dimensions(width, height, longest_side=settings.THUMBNAIL_SIZE): if ((width < longest_side) and (height < longest_side)): return (width, height) if (width > height): new_width = longest_side new_height = ((new_width * height) / width) return (new_width, new_height) new_height = longest_side new_width = ((new_height * width) / height) return (new_width, new_height)
[ "def", "_scale_dimensions", "(", "width", ",", "height", ",", "longest_side", "=", "settings", ".", "THUMBNAIL_SIZE", ")", ":", "if", "(", "(", "width", "<", "longest_side", ")", "and", "(", "height", "<", "longest_side", ")", ")", ":", "return", "(", "width", ",", "height", ")", "if", "(", "width", ">", "height", ")", ":", "new_width", "=", "longest_side", "new_height", "=", "(", "(", "new_width", "*", "height", ")", "/", "width", ")", "return", "(", "new_width", ",", "new_height", ")", "new_height", "=", "longest_side", "new_width", "=", "(", "(", "new_height", "*", "width", ")", "/", "height", ")", "return", "(", "new_width", ",", "new_height", ")" ]
returns a tuple .
train
false
53,118
def get_db_info(): info = file_io.read(constants.DB_INFO_LOC) return yaml.load(info)
[ "def", "get_db_info", "(", ")", ":", "info", "=", "file_io", ".", "read", "(", "constants", ".", "DB_INFO_LOC", ")", "return", "yaml", ".", "load", "(", "info", ")" ]
get information on the database being used .
train
false
53,119
def list_history(request): history = History.objects if (not request.user.is_superuser): history = history.filter(submitter=request.user) history = history.order_by('-submission_date') return render('editor/list_history.mako', request, {'history': history})
[ "def", "list_history", "(", "request", ")", ":", "history", "=", "History", ".", "objects", "if", "(", "not", "request", ".", "user", ".", "is_superuser", ")", ":", "history", "=", "history", ".", "filter", "(", "submitter", "=", "request", ".", "user", ")", "history", "=", "history", ".", "order_by", "(", "'-submission_date'", ")", "return", "render", "(", "'editor/list_history.mako'", ",", "request", ",", "{", "'history'", ":", "history", "}", ")" ]
list the job submission history .
train
false
53,120
def is_subdomain(subdomain, base): return ((subdomain == base) or ((subdomain is not None) and subdomain.endswith(('.' + base))))
[ "def", "is_subdomain", "(", "subdomain", ",", "base", ")", ":", "return", "(", "(", "subdomain", "==", "base", ")", "or", "(", "(", "subdomain", "is", "not", "None", ")", "and", "subdomain", ".", "endswith", "(", "(", "'.'", "+", "base", ")", ")", ")", ")" ]
check if a domain is equal to or a subdomain of a base domain .
train
false
53,121
def add_master_course_staff_to_ccx_for_existing_ccx(apps, schema_editor): CustomCourseForEdX = apps.get_model(u'ccx', u'CustomCourseForEdX') list_ccx = CustomCourseForEdX.objects.all() for ccx in list_ccx: if ((not ccx.course_id) or ccx.course_id.deprecated): continue ccx_locator = CCXLocator.from_course_locator(ccx.course_id, unicode(ccx.id)) try: course = get_course_by_id(ccx.course_id) add_master_course_staff_to_ccx(course, ccx_locator, ccx.display_name, send_email=False) except Http404: log.warning(u'Unable to add instructors and staff of master course %s to ccx %s.', ccx.course_id, ccx_locator)
[ "def", "add_master_course_staff_to_ccx_for_existing_ccx", "(", "apps", ",", "schema_editor", ")", ":", "CustomCourseForEdX", "=", "apps", ".", "get_model", "(", "u'ccx'", ",", "u'CustomCourseForEdX'", ")", "list_ccx", "=", "CustomCourseForEdX", ".", "objects", ".", "all", "(", ")", "for", "ccx", "in", "list_ccx", ":", "if", "(", "(", "not", "ccx", ".", "course_id", ")", "or", "ccx", ".", "course_id", ".", "deprecated", ")", ":", "continue", "ccx_locator", "=", "CCXLocator", ".", "from_course_locator", "(", "ccx", ".", "course_id", ",", "unicode", "(", "ccx", ".", "id", ")", ")", "try", ":", "course", "=", "get_course_by_id", "(", "ccx", ".", "course_id", ")", "add_master_course_staff_to_ccx", "(", "course", ",", "ccx_locator", ",", "ccx", ".", "display_name", ",", "send_email", "=", "False", ")", "except", "Http404", ":", "log", ".", "warning", "(", "u'Unable to add instructors and staff of master course %s to ccx %s.'", ",", "ccx", ".", "course_id", ",", "ccx_locator", ")" ]
add all staff and admin of master course to respective ccx(s) .
train
false
53,123
def _infer_compression(filepath_or_buffer, compression): if (compression is None): return None is_path = isinstance(filepath_or_buffer, compat.string_types) if ((compression == 'infer') and (not is_path)): return None if (compression == 'infer'): for (compression, extension) in _compression_to_extension.items(): if filepath_or_buffer.endswith(extension): return compression return None if (compression in _compression_to_extension): return compression msg = 'Unrecognized compression type: {}'.format(compression) valid = (['infer', None] + sorted(_compression_to_extension)) msg += '\nValid compression types are {}'.format(valid) raise ValueError(msg)
[ "def", "_infer_compression", "(", "filepath_or_buffer", ",", "compression", ")", ":", "if", "(", "compression", "is", "None", ")", ":", "return", "None", "is_path", "=", "isinstance", "(", "filepath_or_buffer", ",", "compat", ".", "string_types", ")", "if", "(", "(", "compression", "==", "'infer'", ")", "and", "(", "not", "is_path", ")", ")", ":", "return", "None", "if", "(", "compression", "==", "'infer'", ")", ":", "for", "(", "compression", ",", "extension", ")", "in", "_compression_to_extension", ".", "items", "(", ")", ":", "if", "filepath_or_buffer", ".", "endswith", "(", "extension", ")", ":", "return", "compression", "return", "None", "if", "(", "compression", "in", "_compression_to_extension", ")", ":", "return", "compression", "msg", "=", "'Unrecognized compression type: {}'", ".", "format", "(", "compression", ")", "valid", "=", "(", "[", "'infer'", ",", "None", "]", "+", "sorted", "(", "_compression_to_extension", ")", ")", "msg", "+=", "'\\nValid compression types are {}'", ".", "format", "(", "valid", ")", "raise", "ValueError", "(", "msg", ")" ]
get the compression method for filepath_or_buffer .
train
false
53,124
def test_hermite_finite(Chart, datas): chart = Chart(interpolate='hermite', interpolation_parameters={'type': 'finite_difference'}) chart = make_data(chart, datas) assert chart.render()
[ "def", "test_hermite_finite", "(", "Chart", ",", "datas", ")", ":", "chart", "=", "Chart", "(", "interpolate", "=", "'hermite'", ",", "interpolation_parameters", "=", "{", "'type'", ":", "'finite_difference'", "}", ")", "chart", "=", "make_data", "(", "chart", ",", "datas", ")", "assert", "chart", ".", "render", "(", ")" ]
test hermite finite difference interpolation .
train
false
53,125
def get_audiff_gradient(f, be, tensors): op_tree = f(be, *tensors) ad = Autodiff(op_tree, be) return ad
[ "def", "get_audiff_gradient", "(", "f", ",", "be", ",", "tensors", ")", ":", "op_tree", "=", "f", "(", "be", ",", "*", "tensors", ")", "ad", "=", "Autodiff", "(", "op_tree", ",", "be", ")", "return", "ad" ]
get autodiff gradient w .
train
false
53,126
def test_peak_finder(): x = [0, 2, 5, 0, 6, (-1)] (peak_inds, peak_mags) = peak_finder(x) assert_array_equal(peak_inds, [2, 4])
[ "def", "test_peak_finder", "(", ")", ":", "x", "=", "[", "0", ",", "2", ",", "5", ",", "0", ",", "6", ",", "(", "-", "1", ")", "]", "(", "peak_inds", ",", "peak_mags", ")", "=", "peak_finder", "(", "x", ")", "assert_array_equal", "(", "peak_inds", ",", "[", "2", ",", "4", "]", ")" ]
test the peak detection method .
train
false
53,127
@csrf_exempt def data_json(request): json_data = [] for resource in ResourceBase.objects.all(): record = {} record['title'] = resource.title record['description'] = resource.abstract record['keyword'] = resource.keyword_csv.split(',') record['modified'] = resource.csw_insert_date.isoformat() record['publisher'] = resource.poc.organization record['contactPoint'] = resource.poc.name_long record['mbox'] = resource.poc.email record['identifier'] = resource.uuid if resource.is_published: record['accessLevel'] = 'public' else: record['accessLevel'] = 'non-public' record['distribution'] = [] for link in resource.link_set.all(): record['distribution'].append({'accessURL': link.url, 'format': link.mime}) json_data.append(record) return HttpResponse(json.dumps(json_data), 'application/json')
[ "@", "csrf_exempt", "def", "data_json", "(", "request", ")", ":", "json_data", "=", "[", "]", "for", "resource", "in", "ResourceBase", ".", "objects", ".", "all", "(", ")", ":", "record", "=", "{", "}", "record", "[", "'title'", "]", "=", "resource", ".", "title", "record", "[", "'description'", "]", "=", "resource", ".", "abstract", "record", "[", "'keyword'", "]", "=", "resource", ".", "keyword_csv", ".", "split", "(", "','", ")", "record", "[", "'modified'", "]", "=", "resource", ".", "csw_insert_date", ".", "isoformat", "(", ")", "record", "[", "'publisher'", "]", "=", "resource", ".", "poc", ".", "organization", "record", "[", "'contactPoint'", "]", "=", "resource", ".", "poc", ".", "name_long", "record", "[", "'mbox'", "]", "=", "resource", ".", "poc", ".", "email", "record", "[", "'identifier'", "]", "=", "resource", ".", "uuid", "if", "resource", ".", "is_published", ":", "record", "[", "'accessLevel'", "]", "=", "'public'", "else", ":", "record", "[", "'accessLevel'", "]", "=", "'non-public'", "record", "[", "'distribution'", "]", "=", "[", "]", "for", "link", "in", "resource", ".", "link_set", ".", "all", "(", ")", ":", "record", "[", "'distribution'", "]", ".", "append", "(", "{", "'accessURL'", ":", "link", ".", "url", ",", "'format'", ":", "link", ".", "mime", "}", ")", "json_data", ".", "append", "(", "record", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "json_data", ")", ",", "'application/json'", ")" ]
return data .
train
false
53,128
def volume_admin_metadata_delete(context, volume_id, key): return IMPL.volume_admin_metadata_delete(context, volume_id, key)
[ "def", "volume_admin_metadata_delete", "(", "context", ",", "volume_id", ",", "key", ")", ":", "return", "IMPL", ".", "volume_admin_metadata_delete", "(", "context", ",", "volume_id", ",", "key", ")" ]
delete the given metadata item .
train
false
53,130
@jit(nopython=True, cache=True) def _lemke_howson_capping(payoff_matrices, tableaux, bases, init_pivot, max_iter, capping): (m, n) = (tableaux[1].shape[0], tableaux[0].shape[0]) init_pivot_curr = init_pivot max_iter_curr = max_iter total_num_iter = 0 for k in range(((m + n) - 1)): capping_curr = min(max_iter_curr, capping) initialize_tableaux(payoff_matrices, tableaux, bases) (converged, num_iter) = lemke_howson_tbl(tableaux, bases, init_pivot_curr, capping_curr) total_num_iter += num_iter if (converged or (total_num_iter >= max_iter)): return (converged, total_num_iter, init_pivot_curr) init_pivot_curr += 1 if (init_pivot_curr >= (m + n)): init_pivot_curr -= (m + n) max_iter_curr -= num_iter initialize_tableaux(payoff_matrices, tableaux, bases) (converged, num_iter) = lemke_howson_tbl(tableaux, bases, init_pivot_curr, max_iter_curr) total_num_iter += num_iter return (converged, total_num_iter, init_pivot_curr)
[ "@", "jit", "(", "nopython", "=", "True", ",", "cache", "=", "True", ")", "def", "_lemke_howson_capping", "(", "payoff_matrices", ",", "tableaux", ",", "bases", ",", "init_pivot", ",", "max_iter", ",", "capping", ")", ":", "(", "m", ",", "n", ")", "=", "(", "tableaux", "[", "1", "]", ".", "shape", "[", "0", "]", ",", "tableaux", "[", "0", "]", ".", "shape", "[", "0", "]", ")", "init_pivot_curr", "=", "init_pivot", "max_iter_curr", "=", "max_iter", "total_num_iter", "=", "0", "for", "k", "in", "range", "(", "(", "(", "m", "+", "n", ")", "-", "1", ")", ")", ":", "capping_curr", "=", "min", "(", "max_iter_curr", ",", "capping", ")", "initialize_tableaux", "(", "payoff_matrices", ",", "tableaux", ",", "bases", ")", "(", "converged", ",", "num_iter", ")", "=", "lemke_howson_tbl", "(", "tableaux", ",", "bases", ",", "init_pivot_curr", ",", "capping_curr", ")", "total_num_iter", "+=", "num_iter", "if", "(", "converged", "or", "(", "total_num_iter", ">=", "max_iter", ")", ")", ":", "return", "(", "converged", ",", "total_num_iter", ",", "init_pivot_curr", ")", "init_pivot_curr", "+=", "1", "if", "(", "init_pivot_curr", ">=", "(", "m", "+", "n", ")", ")", ":", "init_pivot_curr", "-=", "(", "m", "+", "n", ")", "max_iter_curr", "-=", "num_iter", "initialize_tableaux", "(", "payoff_matrices", ",", "tableaux", ",", "bases", ")", "(", "converged", ",", "num_iter", ")", "=", "lemke_howson_tbl", "(", "tableaux", ",", "bases", ",", "init_pivot_curr", ",", "max_iter_curr", ")", "total_num_iter", "+=", "num_iter", "return", "(", "converged", ",", "total_num_iter", ",", "init_pivot_curr", ")" ]
execute the lemke-howson algorithm with the heuristics proposed by codenotti et al .
train
false
53,132
def sources_remove(source_uri, ruby=None, runas=None, gem_bin=None): return _gem(['sources', '--remove', source_uri], ruby, gem_bin=gem_bin, runas=runas)
[ "def", "sources_remove", "(", "source_uri", ",", "ruby", "=", "None", ",", "runas", "=", "None", ",", "gem_bin", "=", "None", ")", ":", "return", "_gem", "(", "[", "'sources'", ",", "'--remove'", ",", "source_uri", "]", ",", "ruby", ",", "gem_bin", "=", "gem_bin", ",", "runas", "=", "runas", ")" ]
remove a gem source .
train
true
53,133
def extract_email_id(email): from email.utils import parseaddr (fullname, email_id) = parseaddr(email) if (isinstance(email_id, basestring) and (not isinstance(email_id, unicode))): email_id = email_id.decode(u'utf-8', u'ignore') return email_id
[ "def", "extract_email_id", "(", "email", ")", ":", "from", "email", ".", "utils", "import", "parseaddr", "(", "fullname", ",", "email_id", ")", "=", "parseaddr", "(", "email", ")", "if", "(", "isinstance", "(", "email_id", ",", "basestring", ")", "and", "(", "not", "isinstance", "(", "email_id", ",", "unicode", ")", ")", ")", ":", "email_id", "=", "email_id", ".", "decode", "(", "u'utf-8'", ",", "u'ignore'", ")", "return", "email_id" ]
fetch only the email part of the email address .
train
false
53,134
def str2list(string): if ((',' in string) or ('|' in string)): return string.replace('|', ',').split(',') return string
[ "def", "str2list", "(", "string", ")", ":", "if", "(", "(", "','", "in", "string", ")", "or", "(", "'|'", "in", "string", ")", ")", ":", "return", "string", ".", "replace", "(", "'|'", ",", "','", ")", ".", "split", "(", "','", ")", "return", "string" ]
this function takes a string and returns either this string or a list of the coma-or-pipe separated elements from the string .
train
false
53,135
def docker_client(environment, version=None, tls_config=None, host=None, tls_version=None): try: kwargs = kwargs_from_env(environment=environment, ssl_version=tls_version) except TLSParameterError: raise UserError(u'TLS configuration is invalid - make sure your DOCKER_TLS_VERIFY and DOCKER_CERT_PATH are set correctly.\nYou might need to run `eval "$(docker-machine env default)"`') if host: kwargs[u'base_url'] = host if tls_config: kwargs[u'tls'] = tls_config if version: kwargs[u'version'] = version timeout = environment.get(u'COMPOSE_HTTP_TIMEOUT') if timeout: kwargs[u'timeout'] = int(timeout) else: kwargs[u'timeout'] = HTTP_TIMEOUT kwargs[u'user_agent'] = generate_user_agent() return APIClient(**kwargs)
[ "def", "docker_client", "(", "environment", ",", "version", "=", "None", ",", "tls_config", "=", "None", ",", "host", "=", "None", ",", "tls_version", "=", "None", ")", ":", "try", ":", "kwargs", "=", "kwargs_from_env", "(", "environment", "=", "environment", ",", "ssl_version", "=", "tls_version", ")", "except", "TLSParameterError", ":", "raise", "UserError", "(", "u'TLS configuration is invalid - make sure your DOCKER_TLS_VERIFY and DOCKER_CERT_PATH are set correctly.\\nYou might need to run `eval \"$(docker-machine env default)\"`'", ")", "if", "host", ":", "kwargs", "[", "u'base_url'", "]", "=", "host", "if", "tls_config", ":", "kwargs", "[", "u'tls'", "]", "=", "tls_config", "if", "version", ":", "kwargs", "[", "u'version'", "]", "=", "version", "timeout", "=", "environment", ".", "get", "(", "u'COMPOSE_HTTP_TIMEOUT'", ")", "if", "timeout", ":", "kwargs", "[", "u'timeout'", "]", "=", "int", "(", "timeout", ")", "else", ":", "kwargs", "[", "u'timeout'", "]", "=", "HTTP_TIMEOUT", "kwargs", "[", "u'user_agent'", "]", "=", "generate_user_agent", "(", ")", "return", "APIClient", "(", "**", "kwargs", ")" ]
returns a docker-py client configured using environment variables according to the same logic as the official docker client .
train
false
53,138
def get_number_of_serial_ports(flavor, image_meta): def get_number(obj, property): num_ports = obj.get(property) if (num_ports is not None): try: num_ports = int(num_ports) except ValueError: raise exception.ImageSerialPortNumberInvalid(num_ports=num_ports, property=property) return num_ports flavor_num_ports = get_number(flavor.extra_specs, 'hw:serial_port_count') image_num_ports = image_meta.properties.get('hw_serial_port_count', None) if ((flavor_num_ports and image_num_ports) is not None): if (image_num_ports > flavor_num_ports): raise exception.ImageSerialPortNumberExceedFlavorValue() return image_num_ports return (flavor_num_ports or image_num_ports or 1)
[ "def", "get_number_of_serial_ports", "(", "flavor", ",", "image_meta", ")", ":", "def", "get_number", "(", "obj", ",", "property", ")", ":", "num_ports", "=", "obj", ".", "get", "(", "property", ")", "if", "(", "num_ports", "is", "not", "None", ")", ":", "try", ":", "num_ports", "=", "int", "(", "num_ports", ")", "except", "ValueError", ":", "raise", "exception", ".", "ImageSerialPortNumberInvalid", "(", "num_ports", "=", "num_ports", ",", "property", "=", "property", ")", "return", "num_ports", "flavor_num_ports", "=", "get_number", "(", "flavor", ".", "extra_specs", ",", "'hw:serial_port_count'", ")", "image_num_ports", "=", "image_meta", ".", "properties", ".", "get", "(", "'hw_serial_port_count'", ",", "None", ")", "if", "(", "(", "flavor_num_ports", "and", "image_num_ports", ")", "is", "not", "None", ")", ":", "if", "(", "image_num_ports", ">", "flavor_num_ports", ")", ":", "raise", "exception", ".", "ImageSerialPortNumberExceedFlavorValue", "(", ")", "return", "image_num_ports", "return", "(", "flavor_num_ports", "or", "image_num_ports", "or", "1", ")" ]
get the number of serial consoles from the flavor or image .
train
false
53,139
def format_default_translations_config(additional_languages): if (not additional_languages): return SAMPLE_CONF[u'TRANSLATIONS'] lang_paths = [u' DEFAULT_LANG: "",'] for lang in sorted(additional_languages): lang_paths.append(u' "{0}": "./{0}",'.format(lang)) return u'{{\n{0}\n}}'.format(u'\n'.join(lang_paths))
[ "def", "format_default_translations_config", "(", "additional_languages", ")", ":", "if", "(", "not", "additional_languages", ")", ":", "return", "SAMPLE_CONF", "[", "u'TRANSLATIONS'", "]", "lang_paths", "=", "[", "u' DEFAULT_LANG: \"\",'", "]", "for", "lang", "in", "sorted", "(", "additional_languages", ")", ":", "lang_paths", ".", "append", "(", "u' \"{0}\": \"./{0}\",'", ".", "format", "(", "lang", ")", ")", "return", "u'{{\\n{0}\\n}}'", ".", "format", "(", "u'\\n'", ".", "join", "(", "lang_paths", ")", ")" ]
adapt translations setting for all additional languages .
train
false
53,141
@verbose def _read_source_spaces_from_tree(fid, tree, patch_stats=False, verbose=None): spaces = dir_tree_find(tree, FIFF.FIFFB_MNE_SOURCE_SPACE) if (len(spaces) == 0): raise ValueError('No source spaces found') src = list() for s in spaces: logger.info(' Reading a source space...') this = _read_one_source_space(fid, s) logger.info(' [done]') if patch_stats: _complete_source_space_info(this) src.append(this) logger.info((' %d source spaces read' % len(spaces))) return SourceSpaces(src)
[ "@", "verbose", "def", "_read_source_spaces_from_tree", "(", "fid", ",", "tree", ",", "patch_stats", "=", "False", ",", "verbose", "=", "None", ")", ":", "spaces", "=", "dir_tree_find", "(", "tree", ",", "FIFF", ".", "FIFFB_MNE_SOURCE_SPACE", ")", "if", "(", "len", "(", "spaces", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'No source spaces found'", ")", "src", "=", "list", "(", ")", "for", "s", "in", "spaces", ":", "logger", ".", "info", "(", "' Reading a source space...'", ")", "this", "=", "_read_one_source_space", "(", "fid", ",", "s", ")", "logger", ".", "info", "(", "' [done]'", ")", "if", "patch_stats", ":", "_complete_source_space_info", "(", "this", ")", "src", ".", "append", "(", "this", ")", "logger", ".", "info", "(", "(", "' %d source spaces read'", "%", "len", "(", "spaces", ")", ")", ")", "return", "SourceSpaces", "(", "src", ")" ]
read the source spaces from a fif file .
train
false
53,142
def running(ctid_or_name): return (status(ctid_or_name).split(' ')[4] == 'running')
[ "def", "running", "(", "ctid_or_name", ")", ":", "return", "(", "status", "(", "ctid_or_name", ")", ".", "split", "(", "' '", ")", "[", "4", "]", "==", "'running'", ")" ]
check apf status cli example: .
train
false
53,143
def miso_lfilter_old(ar, ma, x, useic=False): ma = np.asarray(ma) ar = np.asarray(ar) inp2 = signal.convolve(x, ma[:, ::(-1)])[:, ((x.shape[1] + 1) // 2)] inp = signal.correlate(x, ma[::(-1), :])[:, ((x.shape[1] + 1) // 2)] assert_almost_equal(inp2, inp) nobs = x.shape[0] if useic: return (signal.lfilter([1], ar, inp, zi=signal.lfiltic(np.array([1.0, 0.0]), ar, useic))[0][:nobs], inp[:nobs]) else: return (signal.lfilter([1], ar, inp)[:nobs], inp[:nobs])
[ "def", "miso_lfilter_old", "(", "ar", ",", "ma", ",", "x", ",", "useic", "=", "False", ")", ":", "ma", "=", "np", ".", "asarray", "(", "ma", ")", "ar", "=", "np", ".", "asarray", "(", "ar", ")", "inp2", "=", "signal", ".", "convolve", "(", "x", ",", "ma", "[", ":", ",", ":", ":", "(", "-", "1", ")", "]", ")", "[", ":", ",", "(", "(", "x", ".", "shape", "[", "1", "]", "+", "1", ")", "//", "2", ")", "]", "inp", "=", "signal", ".", "correlate", "(", "x", ",", "ma", "[", ":", ":", "(", "-", "1", ")", ",", ":", "]", ")", "[", ":", ",", "(", "(", "x", ".", "shape", "[", "1", "]", "+", "1", ")", "//", "2", ")", "]", "assert_almost_equal", "(", "inp2", ",", "inp", ")", "nobs", "=", "x", ".", "shape", "[", "0", "]", "if", "useic", ":", "return", "(", "signal", ".", "lfilter", "(", "[", "1", "]", ",", "ar", ",", "inp", ",", "zi", "=", "signal", ".", "lfiltic", "(", "np", ".", "array", "(", "[", "1.0", ",", "0.0", "]", ")", ",", "ar", ",", "useic", ")", ")", "[", "0", "]", "[", ":", "nobs", "]", ",", "inp", "[", ":", "nobs", "]", ")", "else", ":", "return", "(", "signal", ".", "lfilter", "(", "[", "1", "]", ",", "ar", ",", "inp", ")", "[", ":", "nobs", "]", ",", "inp", "[", ":", "nobs", "]", ")" ]
use nd convolution to merge inputs .
train
false
53,144
def print_julia_code(expr, **settings): print(julia_code(expr, **settings))
[ "def", "print_julia_code", "(", "expr", ",", "**", "settings", ")", ":", "print", "(", "julia_code", "(", "expr", ",", "**", "settings", ")", ")" ]
prints the julia representation of the given expression .
train
false
53,146
def lenet(images, num_classes=10, is_training=False, dropout_keep_prob=0.5, prediction_fn=slim.softmax, scope='LeNet'): end_points = {} with tf.variable_scope(scope, 'LeNet', [images, num_classes]): net = slim.conv2d(images, 32, [5, 5], scope='conv1') net = slim.max_pool2d(net, [2, 2], 2, scope='pool1') net = slim.conv2d(net, 64, [5, 5], scope='conv2') net = slim.max_pool2d(net, [2, 2], 2, scope='pool2') net = slim.flatten(net) end_points['Flatten'] = net net = slim.fully_connected(net, 1024, scope='fc3') net = slim.dropout(net, dropout_keep_prob, is_training=is_training, scope='dropout3') logits = slim.fully_connected(net, num_classes, activation_fn=None, scope='fc4') end_points['Logits'] = logits end_points['Predictions'] = prediction_fn(logits, scope='Predictions') return (logits, end_points)
[ "def", "lenet", "(", "images", ",", "num_classes", "=", "10", ",", "is_training", "=", "False", ",", "dropout_keep_prob", "=", "0.5", ",", "prediction_fn", "=", "slim", ".", "softmax", ",", "scope", "=", "'LeNet'", ")", ":", "end_points", "=", "{", "}", "with", "tf", ".", "variable_scope", "(", "scope", ",", "'LeNet'", ",", "[", "images", ",", "num_classes", "]", ")", ":", "net", "=", "slim", ".", "conv2d", "(", "images", ",", "32", ",", "[", "5", ",", "5", "]", ",", "scope", "=", "'conv1'", ")", "net", "=", "slim", ".", "max_pool2d", "(", "net", ",", "[", "2", ",", "2", "]", ",", "2", ",", "scope", "=", "'pool1'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "64", ",", "[", "5", ",", "5", "]", ",", "scope", "=", "'conv2'", ")", "net", "=", "slim", ".", "max_pool2d", "(", "net", ",", "[", "2", ",", "2", "]", ",", "2", ",", "scope", "=", "'pool2'", ")", "net", "=", "slim", ".", "flatten", "(", "net", ")", "end_points", "[", "'Flatten'", "]", "=", "net", "net", "=", "slim", ".", "fully_connected", "(", "net", ",", "1024", ",", "scope", "=", "'fc3'", ")", "net", "=", "slim", ".", "dropout", "(", "net", ",", "dropout_keep_prob", ",", "is_training", "=", "is_training", ",", "scope", "=", "'dropout3'", ")", "logits", "=", "slim", ".", "fully_connected", "(", "net", ",", "num_classes", ",", "activation_fn", "=", "None", ",", "scope", "=", "'fc4'", ")", "end_points", "[", "'Logits'", "]", "=", "logits", "end_points", "[", "'Predictions'", "]", "=", "prediction_fn", "(", "logits", ",", "scope", "=", "'Predictions'", ")", "return", "(", "logits", ",", "end_points", ")" ]
creates a variant of the lenet model .
train
true
53,147
def Rayleigh(name, sigma): return rv(name, RayleighDistribution, (sigma,))
[ "def", "Rayleigh", "(", "name", ",", "sigma", ")", ":", "return", "rv", "(", "name", ",", "RayleighDistribution", ",", "(", "sigma", ",", ")", ")" ]
create a continuous random variable with a rayleigh distribution .
train
false
53,148
def load_pklz_traceback(crash_filepath): try: data = loadcrash(crash_filepath) except TraitError as te: return str(te) except: raise else: return '\n'.join(data['traceback'])
[ "def", "load_pklz_traceback", "(", "crash_filepath", ")", ":", "try", ":", "data", "=", "loadcrash", "(", "crash_filepath", ")", "except", "TraitError", "as", "te", ":", "return", "str", "(", "te", ")", "except", ":", "raise", "else", ":", "return", "'\\n'", ".", "join", "(", "data", "[", "'traceback'", "]", ")" ]
return the traceback message in the given crash file .
train
false
53,149
def _import_mpl(): try: import matplotlib.pyplot as plt except: raise ImportError('Matplotlib is not found.') return plt
[ "def", "_import_mpl", "(", ")", ":", "try", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "except", ":", "raise", "ImportError", "(", "'Matplotlib is not found.'", ")", "return", "plt" ]
this function is not needed outside this utils module .
train
false
53,150
def is_valid_vpnv4_prefix(prefix): if (not isinstance(prefix, str)): return False tokens = prefix.split(':', 2) if (len(tokens) != 3): return False if (not is_valid_route_dist(':'.join([tokens[0], tokens[1]]))): return False return is_valid_ipv4_prefix(tokens[2])
[ "def", "is_valid_vpnv4_prefix", "(", "prefix", ")", ":", "if", "(", "not", "isinstance", "(", "prefix", ",", "str", ")", ")", ":", "return", "False", "tokens", "=", "prefix", ".", "split", "(", "':'", ",", "2", ")", "if", "(", "len", "(", "tokens", ")", "!=", "3", ")", ":", "return", "False", "if", "(", "not", "is_valid_route_dist", "(", "':'", ".", "join", "(", "[", "tokens", "[", "0", "]", ",", "tokens", "[", "1", "]", "]", ")", ")", ")", ":", "return", "False", "return", "is_valid_ipv4_prefix", "(", "tokens", "[", "2", "]", ")" ]
returns true if given prefix is a string represent vpnv4 prefix .
train
true
53,152
def _get_current_client(): if settings.USE_POSTGRES: return None return CLIENT_POOL.acquire()
[ "def", "_get_current_client", "(", ")", ":", "if", "settings", ".", "USE_POSTGRES", ":", "return", "None", "return", "CLIENT_POOL", ".", "acquire", "(", ")" ]
get the current mongodb client from the pool .
train
false
53,153
def reload_(name): term(name)
[ "def", "reload_", "(", "name", ")", ":", "term", "(", "name", ")" ]
reload the named service .
train
false
53,155
def send_mails(mails): try: connection = get_connection() connection.send_messages([mail for mail in mails if (mail is not None)]) except SMTPException as error: LOGGER.error(u'Failed to send email: %s', error) report_error(error, sys.exc_info())
[ "def", "send_mails", "(", "mails", ")", ":", "try", ":", "connection", "=", "get_connection", "(", ")", "connection", ".", "send_messages", "(", "[", "mail", "for", "mail", "in", "mails", "if", "(", "mail", "is", "not", "None", ")", "]", ")", "except", "SMTPException", "as", "error", ":", "LOGGER", ".", "error", "(", "u'Failed to send email: %s'", ",", "error", ")", "report_error", "(", "error", ",", "sys", ".", "exc_info", "(", ")", ")" ]
sends multiple mails in single connection .
train
false
53,156
@handle_response_format @treeio_login_required def task_edit(request, task_id, response_format='html'): task = get_object_or_404(Task, pk=task_id) if (not request.user.profile.has_permission(task, mode='w')): return user_denied(request, message="You don't have access to this Task") if request.POST: if ('cancel' not in request.POST): form = TaskForm(request.user.profile, None, None, None, request.POST, instance=task) if form.is_valid(): task = form.save() return HttpResponseRedirect(reverse('projects_task_view', args=[task.id])) else: return HttpResponseRedirect(reverse('projects_task_view', args=[task.id])) else: form = TaskForm(request.user.profile, None, None, None, instance=task) context = _get_default_context(request) context.update({'form': form, 'task': task}) return render_to_response('projects/task_edit', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "task_edit", "(", "request", ",", "task_id", ",", "response_format", "=", "'html'", ")", ":", "task", "=", "get_object_or_404", "(", "Task", ",", "pk", "=", "task_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "task", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Task\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "TaskForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "None", ",", "None", ",", "request", ".", "POST", ",", "instance", "=", "task", ")", "if", "form", ".", "is_valid", "(", ")", ":", "task", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_task_view'", ",", "args", "=", "[", "task", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_task_view'", ",", "args", "=", "[", "task", ".", "id", "]", ")", ")", "else", ":", "form", "=", "TaskForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "None", ",", "None", ",", "instance", "=", "task", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'form'", ":", "form", ",", "'task'", ":", "task", "}", ")", "return", "render_to_response", "(", "'projects/task_edit'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
task edit page .
train
false
53,158
def dots(it, label='', hide=None, every=1): count = 0 if (not hide): STREAM.write(label) for (i, item) in enumerate(it): if (not hide): if ((i % every) == 0): STREAM.write(DOTS_CHAR) sys.stderr.flush() count += 1 (yield item) STREAM.write('\n') STREAM.flush()
[ "def", "dots", "(", "it", ",", "label", "=", "''", ",", "hide", "=", "None", ",", "every", "=", "1", ")", ":", "count", "=", "0", "if", "(", "not", "hide", ")", ":", "STREAM", ".", "write", "(", "label", ")", "for", "(", "i", ",", "item", ")", "in", "enumerate", "(", "it", ")", ":", "if", "(", "not", "hide", ")", ":", "if", "(", "(", "i", "%", "every", ")", "==", "0", ")", ":", "STREAM", ".", "write", "(", "DOTS_CHAR", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "count", "+=", "1", "(", "yield", "item", ")", "STREAM", ".", "write", "(", "'\\n'", ")", "STREAM", ".", "flush", "(", ")" ]
progress iterator .
train
true
53,159
def do_slice(value, slices, fill_with=None): seq = list(value) length = len(seq) items_per_slice = (length // slices) slices_with_extra = (length % slices) offset = 0 for slice_number in range(slices): start = (offset + (slice_number * items_per_slice)) if (slice_number < slices_with_extra): offset += 1 end = (offset + ((slice_number + 1) * items_per_slice)) tmp = seq[start:end] if ((fill_with is not None) and (slice_number >= slices_with_extra)): tmp.append(fill_with) (yield tmp)
[ "def", "do_slice", "(", "value", ",", "slices", ",", "fill_with", "=", "None", ")", ":", "seq", "=", "list", "(", "value", ")", "length", "=", "len", "(", "seq", ")", "items_per_slice", "=", "(", "length", "//", "slices", ")", "slices_with_extra", "=", "(", "length", "%", "slices", ")", "offset", "=", "0", "for", "slice_number", "in", "range", "(", "slices", ")", ":", "start", "=", "(", "offset", "+", "(", "slice_number", "*", "items_per_slice", ")", ")", "if", "(", "slice_number", "<", "slices_with_extra", ")", ":", "offset", "+=", "1", "end", "=", "(", "offset", "+", "(", "(", "slice_number", "+", "1", ")", "*", "items_per_slice", ")", ")", "tmp", "=", "seq", "[", "start", ":", "end", "]", "if", "(", "(", "fill_with", "is", "not", "None", ")", "and", "(", "slice_number", ">=", "slices_with_extra", ")", ")", ":", "tmp", ".", "append", "(", "fill_with", ")", "(", "yield", "tmp", ")" ]
slice an iterator and return a list of lists containing those items .
train
true
53,162
def _analyze_indexed_fields(indexed_fields): result = {} for field_name in indexed_fields: if (not isinstance(field_name, basestring)): raise TypeError(('Field names must be strings; got %r' % (field_name,))) if ('.' not in field_name): if (field_name in result): raise ValueError(('Duplicate field name %s' % field_name)) result[field_name] = None else: (head, tail) = field_name.split('.', 1) if (head not in result): result[head] = [tail] elif (result[head] is None): raise ValueError(('Field name %s conflicts with ancestor %s' % (field_name, head))) else: result[head].append(tail) return result
[ "def", "_analyze_indexed_fields", "(", "indexed_fields", ")", ":", "result", "=", "{", "}", "for", "field_name", "in", "indexed_fields", ":", "if", "(", "not", "isinstance", "(", "field_name", ",", "basestring", ")", ")", ":", "raise", "TypeError", "(", "(", "'Field names must be strings; got %r'", "%", "(", "field_name", ",", ")", ")", ")", "if", "(", "'.'", "not", "in", "field_name", ")", ":", "if", "(", "field_name", "in", "result", ")", ":", "raise", "ValueError", "(", "(", "'Duplicate field name %s'", "%", "field_name", ")", ")", "result", "[", "field_name", "]", "=", "None", "else", ":", "(", "head", ",", "tail", ")", "=", "field_name", ".", "split", "(", "'.'", ",", "1", ")", "if", "(", "head", "not", "in", "result", ")", ":", "result", "[", "head", "]", "=", "[", "tail", "]", "elif", "(", "result", "[", "head", "]", "is", "None", ")", ":", "raise", "ValueError", "(", "(", "'Field name %s conflicts with ancestor %s'", "%", "(", "field_name", ",", "head", ")", ")", ")", "else", ":", "result", "[", "head", "]", ".", "append", "(", "tail", ")", "return", "result" ]
internal helper to check a list of indexed fields .
train
true
53,164
def test_truncate_purified_field_xss(): s = 'safe <script>alert("omg")</script>' t = PurifiedTranslation(localized_string=s) env = jingo.get_env() actual = env.from_string('{{ s|truncate(100) }}').render({'s': t}) assert (actual == 'safe &lt;script&gt;alert("omg")&lt;/script&gt;') actual = env.from_string('{{ s|truncate(5) }}').render({'s': t}) assert (actual == 'safe ...')
[ "def", "test_truncate_purified_field_xss", "(", ")", ":", "s", "=", "'safe <script>alert(\"omg\")</script>'", "t", "=", "PurifiedTranslation", "(", "localized_string", "=", "s", ")", "env", "=", "jingo", ".", "get_env", "(", ")", "actual", "=", "env", ".", "from_string", "(", "'{{ s|truncate(100) }}'", ")", ".", "render", "(", "{", "'s'", ":", "t", "}", ")", "assert", "(", "actual", "==", "'safe &lt;script&gt;alert(\"omg\")&lt;/script&gt;'", ")", "actual", "=", "env", ".", "from_string", "(", "'{{ s|truncate(5) }}'", ")", ".", "render", "(", "{", "'s'", ":", "t", "}", ")", "assert", "(", "actual", "==", "'safe ...'", ")" ]
truncating should not introduce xss issues .
train
false
53,165
def primitive_insert(course, num, tab_type, name): validate_args(num, tab_type) new_tab = CourseTab.from_json({u'type': unicode(tab_type), u'name': unicode(name)}) tabs = course.tabs tabs.insert(num, new_tab) modulestore().update_item(course, ModuleStoreEnum.UserID.primitive_command)
[ "def", "primitive_insert", "(", "course", ",", "num", ",", "tab_type", ",", "name", ")", ":", "validate_args", "(", "num", ",", "tab_type", ")", "new_tab", "=", "CourseTab", ".", "from_json", "(", "{", "u'type'", ":", "unicode", "(", "tab_type", ")", ",", "u'name'", ":", "unicode", "(", "name", ")", "}", ")", "tabs", "=", "course", ".", "tabs", "tabs", ".", "insert", "(", "num", ",", "new_tab", ")", "modulestore", "(", ")", ".", "update_item", "(", "course", ",", "ModuleStoreEnum", ".", "UserID", ".", "primitive_command", ")" ]
inserts a new tab at the given number .
train
false
53,166
def fake_db_cluster(**updates): db_cluster = cluster_basic_fields() for (name, field) in objects.Cluster.fields.items(): if (name in db_cluster): continue if (field.default != fields.UnspecifiedDefault): db_cluster[name] = field.default elif field.nullable: db_cluster[name] = None else: raise Exception(('fake_db_cluster needs help with %s.' % name)) if updates: db_cluster.update(updates) return db_cluster
[ "def", "fake_db_cluster", "(", "**", "updates", ")", ":", "db_cluster", "=", "cluster_basic_fields", "(", ")", "for", "(", "name", ",", "field", ")", "in", "objects", ".", "Cluster", ".", "fields", ".", "items", "(", ")", ":", "if", "(", "name", "in", "db_cluster", ")", ":", "continue", "if", "(", "field", ".", "default", "!=", "fields", ".", "UnspecifiedDefault", ")", ":", "db_cluster", "[", "name", "]", "=", "field", ".", "default", "elif", "field", ".", "nullable", ":", "db_cluster", "[", "name", "]", "=", "None", "else", ":", "raise", "Exception", "(", "(", "'fake_db_cluster needs help with %s.'", "%", "name", ")", ")", "if", "updates", ":", "db_cluster", ".", "update", "(", "updates", ")", "return", "db_cluster" ]
helper method for fake_cluster_orm .
train
false
53,168
def discoverable(dev): if (dev not in address_()): raise CommandExecutionError('Invalid dev passed to bluetooth.discoverable') cmd = 'hciconfig {0} iscan'.format(dev) __salt__['cmd.run'](cmd).splitlines() cmd = 'hciconfig {0}'.format(dev) out = __salt__['cmd.run'](cmd) if ('UP RUNNING ISCAN' in out): return True return False
[ "def", "discoverable", "(", "dev", ")", ":", "if", "(", "dev", "not", "in", "address_", "(", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Invalid dev passed to bluetooth.discoverable'", ")", "cmd", "=", "'hciconfig {0} iscan'", ".", "format", "(", "dev", ")", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "cmd", "=", "'hciconfig {0}'", ".", "format", "(", "dev", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", "if", "(", "'UP RUNNING ISCAN'", "in", "out", ")", ":", "return", "True", "return", "False" ]
enable this bluetooth device to be discoverable .
train
true
53,169
def url_quote_plus(s, charset='utf-8', safe=''): if isinstance(s, unicode): s = s.encode(charset) elif (not isinstance(s, str)): s = str(s) return _quote_plus(s, safe=safe)
[ "def", "url_quote_plus", "(", "s", ",", "charset", "=", "'utf-8'", ",", "safe", "=", "''", ")", ":", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "s", "=", "s", ".", "encode", "(", "charset", ")", "elif", "(", "not", "isinstance", "(", "s", ",", "str", ")", ")", ":", "s", "=", "str", "(", "s", ")", "return", "_quote_plus", "(", "s", ",", "safe", "=", "safe", ")" ]
url encode a single string with the given encoding and convert whitespace to "+" .
train
true
53,170
@app.route('/threads/<public_id>', methods=['DELETE']) def thread_api_delete(public_id): raise NotImplementedError
[ "@", "app", ".", "route", "(", "'/threads/<public_id>'", ",", "methods", "=", "[", "'DELETE'", "]", ")", "def", "thread_api_delete", "(", "public_id", ")", ":", "raise", "NotImplementedError" ]
moves the thread to the trash .
train
false
53,171
def application_unproxied(environ, start_response): if hasattr(threading.current_thread(), 'uid'): del threading.current_thread().uid if hasattr(threading.current_thread(), 'dbname'): del threading.current_thread().dbname with odoo.api.Environment.manage(): for handler in [wsgi_xmlrpc, odoo.http.root]: result = handler(environ, start_response) if (result is None): continue return result response = 'No handler found.\n' start_response('404 Not Found', [('Content-Type', 'text/plain'), ('Content-Length', str(len(response)))]) return [response]
[ "def", "application_unproxied", "(", "environ", ",", "start_response", ")", ":", "if", "hasattr", "(", "threading", ".", "current_thread", "(", ")", ",", "'uid'", ")", ":", "del", "threading", ".", "current_thread", "(", ")", ".", "uid", "if", "hasattr", "(", "threading", ".", "current_thread", "(", ")", ",", "'dbname'", ")", ":", "del", "threading", ".", "current_thread", "(", ")", ".", "dbname", "with", "odoo", ".", "api", ".", "Environment", ".", "manage", "(", ")", ":", "for", "handler", "in", "[", "wsgi_xmlrpc", ",", "odoo", ".", "http", ".", "root", "]", ":", "result", "=", "handler", "(", "environ", ",", "start_response", ")", "if", "(", "result", "is", "None", ")", ":", "continue", "return", "result", "response", "=", "'No handler found.\\n'", "start_response", "(", "'404 Not Found'", ",", "[", "(", "'Content-Type'", ",", "'text/plain'", ")", ",", "(", "'Content-Length'", ",", "str", "(", "len", "(", "response", ")", ")", ")", "]", ")", "return", "[", "response", "]" ]
wsgi entry point .
train
false
53,172
def download_without_progress(url): resp = requests.get(url) resp.raise_for_status() return BytesIO(resp.content)
[ "def", "download_without_progress", "(", "url", ")", ":", "resp", "=", "requests", ".", "get", "(", "url", ")", "resp", ".", "raise_for_status", "(", ")", "return", "BytesIO", "(", "resp", ".", "content", ")" ]
download data from a url .
train
true
53,173
def orthogonal(shape, scale=1.1, name=None, dim_ordering='th'): flat_shape = (shape[0], np.prod(shape[1:])) a = np.random.normal(0.0, 1.0, flat_shape) (u, _, v) = np.linalg.svd(a, full_matrices=False) q = (u if (u.shape == flat_shape) else v) q = q.reshape(shape) return K.variable((scale * q[:shape[0], :shape[1]]), name=name)
[ "def", "orthogonal", "(", "shape", ",", "scale", "=", "1.1", ",", "name", "=", "None", ",", "dim_ordering", "=", "'th'", ")", ":", "flat_shape", "=", "(", "shape", "[", "0", "]", ",", "np", ".", "prod", "(", "shape", "[", "1", ":", "]", ")", ")", "a", "=", "np", ".", "random", ".", "normal", "(", "0.0", ",", "1.0", ",", "flat_shape", ")", "(", "u", ",", "_", ",", "v", ")", "=", "np", ".", "linalg", ".", "svd", "(", "a", ",", "full_matrices", "=", "False", ")", "q", "=", "(", "u", "if", "(", "u", ".", "shape", "==", "flat_shape", ")", "else", "v", ")", "q", "=", "q", ".", "reshape", "(", "shape", ")", "return", "K", ".", "variable", "(", "(", "scale", "*", "q", "[", ":", "shape", "[", "0", "]", ",", ":", "shape", "[", "1", "]", "]", ")", ",", "name", "=", "name", ")" ]
orthogonal initializer .
train
false
53,174
def AddExtensionFiles(params, options): added = {} for vd in params.VirtualDirs: for smp in vd.ScriptMaps: if ((smp.Module not in added) and smp.AddExtensionFile): _AddExtensionFile(smp.Module, vd.Name, vd.Description, smp, options) added[smp.Module] = True for fd in params.Filters: if ((fd.Path not in added) and fd.AddExtensionFile): _AddExtensionFile(fd.Path, fd.Name, fd.Description, fd, options) added[fd.Path] = True
[ "def", "AddExtensionFiles", "(", "params", ",", "options", ")", ":", "added", "=", "{", "}", "for", "vd", "in", "params", ".", "VirtualDirs", ":", "for", "smp", "in", "vd", ".", "ScriptMaps", ":", "if", "(", "(", "smp", ".", "Module", "not", "in", "added", ")", "and", "smp", ".", "AddExtensionFile", ")", ":", "_AddExtensionFile", "(", "smp", ".", "Module", ",", "vd", ".", "Name", ",", "vd", ".", "Description", ",", "smp", ",", "options", ")", "added", "[", "smp", ".", "Module", "]", "=", "True", "for", "fd", "in", "params", ".", "Filters", ":", "if", "(", "(", "fd", ".", "Path", "not", "in", "added", ")", "and", "fd", ".", "AddExtensionFile", ")", ":", "_AddExtensionFile", "(", "fd", ".", "Path", ",", "fd", ".", "Name", ",", "fd", ".", "Description", ",", "fd", ",", "options", ")", "added", "[", "fd", ".", "Path", "]", "=", "True" ]
register the modules used by the filters/extensions as a trusted extension module - required by the default iis6 security settings .
train
false
53,175
def printPreOrder(root): if (root is None): return print root.data printPreOrder(root.left) printPreOrder(root.right)
[ "def", "printPreOrder", "(", "root", ")", ":", "if", "(", "root", "is", "None", ")", ":", "return", "print", "root", ".", "data", "printPreOrder", "(", "root", ".", "left", ")", "printPreOrder", "(", "root", ".", "right", ")" ]
prints the tree in preorder .
train
false
53,176
def _string_to_base64(string): utf8_encoded = string.encode('utf-8') return base64.urlsafe_b64encode(utf8_encoded)
[ "def", "_string_to_base64", "(", "string", ")", ":", "utf8_encoded", "=", "string", ".", "encode", "(", "'utf-8'", ")", "return", "base64", ".", "urlsafe_b64encode", "(", "utf8_encoded", ")" ]
encodes string to utf-8 and then base64 .
train
false
53,177
def chunkify(sequence, chunksize): for i in range(0, len(sequence), chunksize): (yield sequence[i:(i + chunksize)])
[ "def", "chunkify", "(", "sequence", ",", "chunksize", ")", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "sequence", ")", ",", "chunksize", ")", ":", "(", "yield", "sequence", "[", "i", ":", "(", "i", "+", "chunksize", ")", "]", ")" ]
yield successive chunks from sequence .
train
false
53,178
def cdata(state, text, i, formats, user_data): name = state.tags[(-1)].name pat = cdata_close_pats[name] m = pat.search(text, i) fmt = formats[(u'title' if (name == u'title') else u'special')] if (m is None): return [((len(text) - i), fmt)] state.parse = IN_CLOSING_TAG num = (m.start() - i) add_tag_data(user_data, TagStart(m.start(), u'', name, True, True)) return [(num, fmt), (2, formats[u'end_tag']), ((len(m.group()) - 2), formats[u'tag_name'])]
[ "def", "cdata", "(", "state", ",", "text", ",", "i", ",", "formats", ",", "user_data", ")", ":", "name", "=", "state", ".", "tags", "[", "(", "-", "1", ")", "]", ".", "name", "pat", "=", "cdata_close_pats", "[", "name", "]", "m", "=", "pat", ".", "search", "(", "text", ",", "i", ")", "fmt", "=", "formats", "[", "(", "u'title'", "if", "(", "name", "==", "u'title'", ")", "else", "u'special'", ")", "]", "if", "(", "m", "is", "None", ")", ":", "return", "[", "(", "(", "len", "(", "text", ")", "-", "i", ")", ",", "fmt", ")", "]", "state", ".", "parse", "=", "IN_CLOSING_TAG", "num", "=", "(", "m", ".", "start", "(", ")", "-", "i", ")", "add_tag_data", "(", "user_data", ",", "TagStart", "(", "m", ".", "start", "(", ")", ",", "u''", ",", "name", ",", "True", ",", "True", ")", ")", "return", "[", "(", "num", ",", "fmt", ")", ",", "(", "2", ",", "formats", "[", "u'end_tag'", "]", ")", ",", "(", "(", "len", "(", "m", ".", "group", "(", ")", ")", "-", "2", ")", ",", "formats", "[", "u'tag_name'", "]", ")", "]" ]
cdata inside tags like <title> or <style> .
train
false
53,179
@facebook_required(canvas=True) def canvas(request, graph): signed_request_string = request.POST.get('signed_request') signed_request = {} if signed_request_string: signed_request = parse_signed_request(signed_request_string) likes = [] if graph: likes = graph.get('me/likes')['data'] context = {'signed_request': signed_request, 'likes': likes} return render(request, 'django_facebook/canvas.html', context)
[ "@", "facebook_required", "(", "canvas", "=", "True", ")", "def", "canvas", "(", "request", ",", "graph", ")", ":", "signed_request_string", "=", "request", ".", "POST", ".", "get", "(", "'signed_request'", ")", "signed_request", "=", "{", "}", "if", "signed_request_string", ":", "signed_request", "=", "parse_signed_request", "(", "signed_request_string", ")", "likes", "=", "[", "]", "if", "graph", ":", "likes", "=", "graph", ".", "get", "(", "'me/likes'", ")", "[", "'data'", "]", "context", "=", "{", "'signed_request'", ":", "signed_request", ",", "'likes'", ":", "likes", "}", "return", "render", "(", "request", ",", "'django_facebook/canvas.html'", ",", "context", ")" ]
example of a canvas page .
train
false
53,180
def test_aligned_mem_float(): a = arange(402, dtype=np.uint8) z = np.frombuffer(a.data, offset=2, count=100, dtype=float32) z.shape = (10, 10) eig(z, overwrite_a=True) eig(z.T, overwrite_a=True)
[ "def", "test_aligned_mem_float", "(", ")", ":", "a", "=", "arange", "(", "402", ",", "dtype", "=", "np", ".", "uint8", ")", "z", "=", "np", ".", "frombuffer", "(", "a", ".", "data", ",", "offset", "=", "2", ",", "count", "=", "100", ",", "dtype", "=", "float32", ")", "z", ".", "shape", "=", "(", "10", ",", "10", ")", "eig", "(", "z", ",", "overwrite_a", "=", "True", ")", "eig", "(", "z", ".", "T", ",", "overwrite_a", "=", "True", ")" ]
check linalg works with non-aligned memory .
train
false
53,181
def deepcopy_bound(name): def _deepcopy_method(x, memo): return type(x)(x.im_func, copy.deepcopy(x.im_self, memo), x.im_class) try: pre_dispatch = copy._deepcopy_dispatch copy._deepcopy_dispatch[types.MethodType] = _deepcopy_method ret = copy.deepcopy(name) finally: copy._deepcopy_dispatch = pre_dispatch return ret
[ "def", "deepcopy_bound", "(", "name", ")", ":", "def", "_deepcopy_method", "(", "x", ",", "memo", ")", ":", "return", "type", "(", "x", ")", "(", "x", ".", "im_func", ",", "copy", ".", "deepcopy", "(", "x", ".", "im_self", ",", "memo", ")", ",", "x", ".", "im_class", ")", "try", ":", "pre_dispatch", "=", "copy", ".", "_deepcopy_dispatch", "copy", ".", "_deepcopy_dispatch", "[", "types", ".", "MethodType", "]", "=", "_deepcopy_method", "ret", "=", "copy", ".", "deepcopy", "(", "name", ")", "finally", ":", "copy", ".", "_deepcopy_dispatch", "=", "pre_dispatch", "return", "ret" ]
compatibility helper function to allow copy .
train
true
53,182
def addCollarShaft(collarThickness, derivation, negatives, positives, xmlElement): if (collarThickness <= 0.0): addShaft(derivation, negatives, positives) return connectionEnd = Vector3(0.0, 0.0, (derivation.pinionThickness + collarThickness)) collarDerivation = extrude.ExtrudeDerivation() collarDerivation.offsetPathDefault = [Vector3(0.0, 0.0, derivation.pinionThickness), connectionEnd] addCollarShaftSetDerivation(collarDerivation, collarThickness, derivation, negatives, positives, xmlElement)
[ "def", "addCollarShaft", "(", "collarThickness", ",", "derivation", ",", "negatives", ",", "positives", ",", "xmlElement", ")", ":", "if", "(", "collarThickness", "<=", "0.0", ")", ":", "addShaft", "(", "derivation", ",", "negatives", ",", "positives", ")", "return", "connectionEnd", "=", "Vector3", "(", "0.0", ",", "0.0", ",", "(", "derivation", ".", "pinionThickness", "+", "collarThickness", ")", ")", "collarDerivation", "=", "extrude", ".", "ExtrudeDerivation", "(", ")", "collarDerivation", ".", "offsetPathDefault", "=", "[", "Vector3", "(", "0.0", ",", "0.0", ",", "derivation", ".", "pinionThickness", ")", ",", "connectionEnd", "]", "addCollarShaftSetDerivation", "(", "collarDerivation", ",", "collarThickness", ",", "derivation", ",", "negatives", ",", "positives", ",", "xmlElement", ")" ]
add collar .
train
false
53,183
def parse_assigned_metadata(csvfile): with open(csvfile, 'rb') as f: for record in csv.reader(f): module = record[0] supported_by = record[1] status = [] if record[2]: status.append('stableinterface') if record[4]: status.append('deprecated') if record[5]: status.append('removed') if record[6]: status.append('tested') if ((not status) or record[3]): status.append('preview') (yield (module, {'version': '1.0', 'supported_by': supported_by, 'status': status}))
[ "def", "parse_assigned_metadata", "(", "csvfile", ")", ":", "with", "open", "(", "csvfile", ",", "'rb'", ")", "as", "f", ":", "for", "record", "in", "csv", ".", "reader", "(", "f", ")", ":", "module", "=", "record", "[", "0", "]", "supported_by", "=", "record", "[", "1", "]", "status", "=", "[", "]", "if", "record", "[", "2", "]", ":", "status", ".", "append", "(", "'stableinterface'", ")", "if", "record", "[", "4", "]", ":", "status", ".", "append", "(", "'deprecated'", ")", "if", "record", "[", "5", "]", ":", "status", ".", "append", "(", "'removed'", ")", "if", "record", "[", "6", "]", ":", "status", ".", "append", "(", "'tested'", ")", "if", "(", "(", "not", "status", ")", "or", "record", "[", "3", "]", ")", ":", "status", ".", "append", "(", "'preview'", ")", "(", "yield", "(", "module", ",", "{", "'version'", ":", "'1.0'", ",", "'supported_by'", ":", "supported_by", ",", "'status'", ":", "status", "}", ")", ")" ]
fields: :0: module name :1: supported_by string .
train
false
53,184
def get_includes(): root_dir = dirname(__file__) return [join(root_dir, 'graphics'), join(root_dir, 'tools', 'gles_compat'), join(root_dir, 'include')]
[ "def", "get_includes", "(", ")", ":", "root_dir", "=", "dirname", "(", "__file__", ")", "return", "[", "join", "(", "root_dir", ",", "'graphics'", ")", ",", "join", "(", "root_dir", ",", "'tools'", ",", "'gles_compat'", ")", ",", "join", "(", "root_dir", ",", "'include'", ")", "]" ]
retrieves the directories containing includes needed to build new cython modules with kivy as a dependency .
train
false
53,186
def load_crl(type, buffer): if isinstance(buffer, _text_type): buffer = buffer.encode('ascii') bio = _new_mem_buf(buffer) if (type == FILETYPE_PEM): crl = _lib.PEM_read_bio_X509_CRL(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) elif (type == FILETYPE_ASN1): crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) else: raise ValueError('type argument must be FILETYPE_PEM or FILETYPE_ASN1') if (crl == _ffi.NULL): _raise_current_error() result = CRL.__new__(CRL) result._crl = crl return result
[ "def", "load_crl", "(", "type", ",", "buffer", ")", ":", "if", "isinstance", "(", "buffer", ",", "_text_type", ")", ":", "buffer", "=", "buffer", ".", "encode", "(", "'ascii'", ")", "bio", "=", "_new_mem_buf", "(", "buffer", ")", "if", "(", "type", "==", "FILETYPE_PEM", ")", ":", "crl", "=", "_lib", ".", "PEM_read_bio_X509_CRL", "(", "bio", ",", "_ffi", ".", "NULL", ",", "_ffi", ".", "NULL", ",", "_ffi", ".", "NULL", ")", "elif", "(", "type", "==", "FILETYPE_ASN1", ")", ":", "crl", "=", "_lib", ".", "d2i_X509_CRL_bio", "(", "bio", ",", "_ffi", ".", "NULL", ")", "else", ":", "raise", "ValueError", "(", "'type argument must be FILETYPE_PEM or FILETYPE_ASN1'", ")", "if", "(", "crl", "==", "_ffi", ".", "NULL", ")", ":", "_raise_current_error", "(", ")", "result", "=", "CRL", ".", "__new__", "(", "CRL", ")", "result", ".", "_crl", "=", "crl", "return", "result" ]
load a certificate revocation list from a buffer .
train
true
53,187
def wait_for_build(obj, att=None, desired=None, callback=None, interval=None, attempts=None, verbose=None, verbose_atts=None): att = (att or 'status') desired = (desired or ['ACTIVE', 'ERROR', 'available', 'COMPLETED']) interval = (interval or 20) attempts = (attempts or 0) verbose_atts = (verbose_atts or 'progress') return wait_until(obj, att, desired, callback=callback, interval=interval, attempts=attempts, verbose=verbose, verbose_atts=verbose_atts)
[ "def", "wait_for_build", "(", "obj", ",", "att", "=", "None", ",", "desired", "=", "None", ",", "callback", "=", "None", ",", "interval", "=", "None", ",", "attempts", "=", "None", ",", "verbose", "=", "None", ",", "verbose_atts", "=", "None", ")", ":", "att", "=", "(", "att", "or", "'status'", ")", "desired", "=", "(", "desired", "or", "[", "'ACTIVE'", ",", "'ERROR'", ",", "'available'", ",", "'COMPLETED'", "]", ")", "interval", "=", "(", "interval", "or", "20", ")", "attempts", "=", "(", "attempts", "or", "0", ")", "verbose_atts", "=", "(", "verbose_atts", "or", "'progress'", ")", "return", "wait_until", "(", "obj", ",", "att", ",", "desired", ",", "callback", "=", "callback", ",", "interval", "=", "interval", ",", "attempts", "=", "attempts", ",", "verbose", "=", "verbose", ",", "verbose_atts", "=", "verbose_atts", ")" ]
designed to handle the most common use case for wait_until: an object whose status attribute will end up in either active or error state .
train
true
53,188
def get_default_flavor(): name = CONF.default_flavor return get_flavor_by_name(name)
[ "def", "get_default_flavor", "(", ")", ":", "name", "=", "CONF", ".", "default_flavor", "return", "get_flavor_by_name", "(", "name", ")" ]
get the default flavor .
train
false
53,190
def remove_option(file_name, section, option, separator='='): inifile = _Ini.get_ini_file(file_name, separator=separator) value = inifile.get(section, {}).pop(option, None) inifile.flush() return value
[ "def", "remove_option", "(", "file_name", ",", "section", ",", "option", ",", "separator", "=", "'='", ")", ":", "inifile", "=", "_Ini", ".", "get_ini_file", "(", "file_name", ",", "separator", "=", "separator", ")", "value", "=", "inifile", ".", "get", "(", "section", ",", "{", "}", ")", ".", "pop", "(", "option", ",", "None", ")", "inifile", ".", "flush", "(", ")", "return", "value" ]
remove a key/value pair from a section in an ini file .
train
false
53,192
def item_object_hook(dct): if (len(dct.keys()) > 1): return dct if ('S' in dct): return dct['S'] if ('N' in dct): return convert_num(dct['N']) if ('SS' in dct): return set(dct['SS']) if ('NS' in dct): return set(map(convert_num, dct['NS'])) if ('B' in dct): return convert_binary(dct['B']) if ('BS' in dct): return set(map(convert_binary, dct['BS'])) return dct
[ "def", "item_object_hook", "(", "dct", ")", ":", "if", "(", "len", "(", "dct", ".", "keys", "(", ")", ")", ">", "1", ")", ":", "return", "dct", "if", "(", "'S'", "in", "dct", ")", ":", "return", "dct", "[", "'S'", "]", "if", "(", "'N'", "in", "dct", ")", ":", "return", "convert_num", "(", "dct", "[", "'N'", "]", ")", "if", "(", "'SS'", "in", "dct", ")", ":", "return", "set", "(", "dct", "[", "'SS'", "]", ")", "if", "(", "'NS'", "in", "dct", ")", ":", "return", "set", "(", "map", "(", "convert_num", ",", "dct", "[", "'NS'", "]", ")", ")", "if", "(", "'B'", "in", "dct", ")", ":", "return", "convert_binary", "(", "dct", "[", "'B'", "]", ")", "if", "(", "'BS'", "in", "dct", ")", ":", "return", "set", "(", "map", "(", "convert_binary", ",", "dct", "[", "'BS'", "]", ")", ")", "return", "dct" ]
a custom object hook for use when decoding json item bodys .
train
true
53,193
def interkey_interval(): return ((random.lognormvariate(0.0, 0.5) * 30.0) / 1000.0) return (float(random.randrange(10, 50)) / 1000.0)
[ "def", "interkey_interval", "(", ")", ":", "return", "(", "(", "random", ".", "lognormvariate", "(", "0.0", ",", "0.5", ")", "*", "30.0", ")", "/", "1000.0", ")", "return", "(", "float", "(", "random", ".", "randrange", "(", "10", ",", "50", ")", ")", "/", "1000.0", ")" ]
in milliseconds .
train
false
53,196
def csvread(infile): out_csv = [] errors = [] index = (-1) p = LineParser(csv=True) for line in infile: index += 1 try: values = p.feed(line) except ListQuoteError as e: values = [] e.line = line e.index = index errors.append(e) out_csv.append(values) if errors: e = CSVError("Parsing CSV failed. See 'errors' attribute.") e.csv = out_csv e.errors = errors raise e return out_csv
[ "def", "csvread", "(", "infile", ")", ":", "out_csv", "=", "[", "]", "errors", "=", "[", "]", "index", "=", "(", "-", "1", ")", "p", "=", "LineParser", "(", "csv", "=", "True", ")", "for", "line", "in", "infile", ":", "index", "+=", "1", "try", ":", "values", "=", "p", ".", "feed", "(", "line", ")", "except", "ListQuoteError", "as", "e", ":", "values", "=", "[", "]", "e", ".", "line", "=", "line", "e", ".", "index", "=", "index", "errors", ".", "append", "(", "e", ")", "out_csv", ".", "append", "(", "values", ")", "if", "errors", ":", "e", "=", "CSVError", "(", "\"Parsing CSV failed. See 'errors' attribute.\"", ")", "e", ".", "csv", "=", "out_csv", "e", ".", "errors", "=", "errors", "raise", "e", "return", "out_csv" ]
given an infile as an iterable .
train
false
53,199
def bump_cache_for_product(product, shop=None): if (not shop): from shuup.core.models import ShopProduct for sp in ShopProduct.objects.filter(product_id=product.id): bump_cache_for_shop_product(sp) else: shop_product = product.get_shop_instance(shop=shop, allow_cache=False) bump_cache_for_shop_product(shop_product)
[ "def", "bump_cache_for_product", "(", "product", ",", "shop", "=", "None", ")", ":", "if", "(", "not", "shop", ")", ":", "from", "shuup", ".", "core", ".", "models", "import", "ShopProduct", "for", "sp", "in", "ShopProduct", ".", "objects", ".", "filter", "(", "product_id", "=", "product", ".", "id", ")", ":", "bump_cache_for_shop_product", "(", "sp", ")", "else", ":", "shop_product", "=", "product", ".", "get_shop_instance", "(", "shop", "=", "shop", ",", "allow_cache", "=", "False", ")", "bump_cache_for_shop_product", "(", "shop_product", ")" ]
bump cache for product in case shop is not given all the shop products for the product is bumped .
train
false
53,201
def add_port(zone, port, permanent=True): if (not get_masquerade(zone)): add_masquerade(zone) cmd = '--zone={0} --add-port={1}'.format(zone, port) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd)
[ "def", "add_port", "(", "zone", ",", "port", ",", "permanent", "=", "True", ")", ":", "if", "(", "not", "get_masquerade", "(", "zone", ")", ")", ":", "add_masquerade", "(", "zone", ")", "cmd", "=", "'--zone={0} --add-port={1}'", ".", "format", "(", "zone", ",", "port", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "return", "__firewall_cmd", "(", "cmd", ")" ]
allow specific ports in a zone .
train
true
53,202
def _get_stack_events(h_client, stack_id, event_args): event_args['stack_id'] = stack_id event_args['resource_name'] = None try: events = h_client.events.list(**event_args) except exc.HTTPNotFound as ex: raise exc.CommandError(str(ex)) else: for event in events: event.stack_name = stack_id.split('/')[0] return events
[ "def", "_get_stack_events", "(", "h_client", ",", "stack_id", ",", "event_args", ")", ":", "event_args", "[", "'stack_id'", "]", "=", "stack_id", "event_args", "[", "'resource_name'", "]", "=", "None", "try", ":", "events", "=", "h_client", ".", "events", ".", "list", "(", "**", "event_args", ")", "except", "exc", ".", "HTTPNotFound", "as", "ex", ":", "raise", "exc", ".", "CommandError", "(", "str", "(", "ex", ")", ")", "else", ":", "for", "event", "in", "events", ":", "event", ".", "stack_name", "=", "stack_id", ".", "split", "(", "'/'", ")", "[", "0", "]", "return", "events" ]
get event for stack .
train
false
53,203
def test_add_background_image(): import matplotlib.pyplot as plt rng = np.random.RandomState(0) (f, axs) = plt.subplots(1, 2) (x, y) = rng.randn(2, 10) im = rng.randn(10, 10) axs[0].scatter(x, y) axs[1].scatter(y, x) for ax in axs: ax.set_aspect(1) ax_im = add_background_image(f, im) assert_true((ax_im.get_aspect() == 'auto')) for ax in axs: assert_true((ax.get_aspect() == 1)) ax_im_asp = add_background_image(f, im, set_ratios='auto') assert_true((ax_im_asp.get_aspect() == 'auto')) for ax in axs: assert_true((ax.get_aspect() == 'auto')) assert_true((add_background_image(f, None) is None))
[ "def", "test_add_background_image", "(", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "(", "f", ",", "axs", ")", "=", "plt", ".", "subplots", "(", "1", ",", "2", ")", "(", "x", ",", "y", ")", "=", "rng", ".", "randn", "(", "2", ",", "10", ")", "im", "=", "rng", ".", "randn", "(", "10", ",", "10", ")", "axs", "[", "0", "]", ".", "scatter", "(", "x", ",", "y", ")", "axs", "[", "1", "]", ".", "scatter", "(", "y", ",", "x", ")", "for", "ax", "in", "axs", ":", "ax", ".", "set_aspect", "(", "1", ")", "ax_im", "=", "add_background_image", "(", "f", ",", "im", ")", "assert_true", "(", "(", "ax_im", ".", "get_aspect", "(", ")", "==", "'auto'", ")", ")", "for", "ax", "in", "axs", ":", "assert_true", "(", "(", "ax", ".", "get_aspect", "(", ")", "==", "1", ")", ")", "ax_im_asp", "=", "add_background_image", "(", "f", ",", "im", ",", "set_ratios", "=", "'auto'", ")", "assert_true", "(", "(", "ax_im_asp", ".", "get_aspect", "(", ")", "==", "'auto'", ")", ")", "for", "ax", "in", "axs", ":", "assert_true", "(", "(", "ax", ".", "get_aspect", "(", ")", "==", "'auto'", ")", ")", "assert_true", "(", "(", "add_background_image", "(", "f", ",", "None", ")", "is", "None", ")", ")" ]
test adding background image to a figure .
train
false
53,204
def CloneConfigurationForDeviceAndEmulator(target_dicts): if _HasIOSTarget(target_dicts): return _AddIOSDeviceConfigurations(target_dicts) return target_dicts
[ "def", "CloneConfigurationForDeviceAndEmulator", "(", "target_dicts", ")", ":", "if", "_HasIOSTarget", "(", "target_dicts", ")", ":", "return", "_AddIOSDeviceConfigurations", "(", "target_dicts", ")", "return", "target_dicts" ]
if |target_dicts| contains any ios targets .
train
false
53,206
def wait_for_fork(pid, raise_error=True, expected_exitcode=0): rc = 0 try: (pid, rc) = os.waitpid(pid, 0) rc = os.WEXITSTATUS(rc) if (rc != expected_exitcode): raise RuntimeError(('The exit code %d is not %d' % (rc, expected_exitcode))) except Exception: if raise_error: raise return rc
[ "def", "wait_for_fork", "(", "pid", ",", "raise_error", "=", "True", ",", "expected_exitcode", "=", "0", ")", ":", "rc", "=", "0", "try", ":", "(", "pid", ",", "rc", ")", "=", "os", ".", "waitpid", "(", "pid", ",", "0", ")", "rc", "=", "os", ".", "WEXITSTATUS", "(", "rc", ")", "if", "(", "rc", "!=", "expected_exitcode", ")", ":", "raise", "RuntimeError", "(", "(", "'The exit code %d is not %d'", "%", "(", "rc", ",", "expected_exitcode", ")", ")", ")", "except", "Exception", ":", "if", "raise_error", ":", "raise", "return", "rc" ]
wait for a process to complete this function will wait for the given pid to complete .
train
false
53,207
def getRank(width): return int(math.floor((3.0 * math.log10(width))))
[ "def", "getRank", "(", "width", ")", ":", "return", "int", "(", "math", ".", "floor", "(", "(", "3.0", "*", "math", ".", "log10", "(", "width", ")", ")", ")", ")" ]
get the rank which is 0 at 1 and increases by three every power of ten .
train
false
53,211
def _get_kwargs(profile=None, **connection_args): if profile: prefix = (profile + ':keystone.') else: prefix = 'keystone.' def get(key, default=None): '\n look in connection_args first, then default to config file\n ' return connection_args.get(('connection_' + key), __salt__['config.get']((prefix + key), default)) user = get('user', 'admin') password = get('password', 'ADMIN') tenant = get('tenant', 'admin') tenant_id = get('tenant_id') auth_url = get('auth_url', 'http://127.0.0.1:35357/v2.0/') insecure = get('insecure', False) token = get('token') endpoint = get('endpoint', 'http://127.0.0.1:35357/v2.0') if token: kwargs = {'token': token, 'endpoint': endpoint} else: kwargs = {'username': user, 'password': password, 'tenant_name': tenant, 'tenant_id': tenant_id, 'auth_url': auth_url} if insecure: kwargs['insecure'] = True return kwargs
[ "def", "_get_kwargs", "(", "profile", "=", "None", ",", "**", "connection_args", ")", ":", "if", "profile", ":", "prefix", "=", "(", "profile", "+", "':keystone.'", ")", "else", ":", "prefix", "=", "'keystone.'", "def", "get", "(", "key", ",", "default", "=", "None", ")", ":", "return", "connection_args", ".", "get", "(", "(", "'connection_'", "+", "key", ")", ",", "__salt__", "[", "'config.get'", "]", "(", "(", "prefix", "+", "key", ")", ",", "default", ")", ")", "user", "=", "get", "(", "'user'", ",", "'admin'", ")", "password", "=", "get", "(", "'password'", ",", "'ADMIN'", ")", "tenant", "=", "get", "(", "'tenant'", ",", "'admin'", ")", "tenant_id", "=", "get", "(", "'tenant_id'", ")", "auth_url", "=", "get", "(", "'auth_url'", ",", "'http://127.0.0.1:35357/v2.0/'", ")", "insecure", "=", "get", "(", "'insecure'", ",", "False", ")", "token", "=", "get", "(", "'token'", ")", "endpoint", "=", "get", "(", "'endpoint'", ",", "'http://127.0.0.1:35357/v2.0'", ")", "if", "token", ":", "kwargs", "=", "{", "'token'", ":", "token", ",", "'endpoint'", ":", "endpoint", "}", "else", ":", "kwargs", "=", "{", "'username'", ":", "user", ",", "'password'", ":", "password", ",", "'tenant_name'", ":", "tenant", ",", "'tenant_id'", ":", "tenant_id", ",", "'auth_url'", ":", "auth_url", "}", "if", "insecure", ":", "kwargs", "[", "'insecure'", "]", "=", "True", "return", "kwargs" ]
get connection args .
train
true
53,212
def _make_df_square(table): if (not isinstance(table, pd.DataFrame)): return table if (table.shape[0] != table.shape[1]): ix = list((set(table.index) | set(table.columns))) table = table.reindex(ix, axis=0) table = table.reindex(ix, axis=1) table = table.reindex(table.columns) return table
[ "def", "_make_df_square", "(", "table", ")", ":", "if", "(", "not", "isinstance", "(", "table", ",", "pd", ".", "DataFrame", ")", ")", ":", "return", "table", "if", "(", "table", ".", "shape", "[", "0", "]", "!=", "table", ".", "shape", "[", "1", "]", ")", ":", "ix", "=", "list", "(", "(", "set", "(", "table", ".", "index", ")", "|", "set", "(", "table", ".", "columns", ")", ")", ")", "table", "=", "table", ".", "reindex", "(", "ix", ",", "axis", "=", "0", ")", "table", "=", "table", ".", "reindex", "(", "ix", ",", "axis", "=", "1", ")", "table", "=", "table", ".", "reindex", "(", "table", ".", "columns", ")", "return", "table" ]
reindex a pandas dataframe so that it becomes square .
train
false
53,213
def do_one(*brules): def do_one_brl(expr): yielded = False for brl in brules: for nexpr in brl(expr): yielded = True (yield nexpr) if yielded: return return do_one_brl
[ "def", "do_one", "(", "*", "brules", ")", ":", "def", "do_one_brl", "(", "expr", ")", ":", "yielded", "=", "False", "for", "brl", "in", "brules", ":", "for", "nexpr", "in", "brl", "(", "expr", ")", ":", "yielded", "=", "True", "(", "yield", "nexpr", ")", "if", "yielded", ":", "return", "return", "do_one_brl" ]
execute one of the branching rules .
train
false
53,214
def is_foreground_light(color): return (rgb_to_hsl(*parse_color(color)[:3])[2] < 17.9)
[ "def", "is_foreground_light", "(", "color", ")", ":", "return", "(", "rgb_to_hsl", "(", "*", "parse_color", "(", "color", ")", "[", ":", "3", "]", ")", "[", "2", "]", "<", "17.9", ")" ]
determine if the background color need a light or dark foreground color .
train
false
53,217
def job_cancel(context, data_dict): _check_access(u'job_cancel', context, data_dict) id = _get_or_bust(data_dict, u'id') try: jobs.job_from_id(id).delete() log.info(u'Cancelled background job {}'.format(id)) except KeyError: raise NotFound
[ "def", "job_cancel", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "u'job_cancel'", ",", "context", ",", "data_dict", ")", "id", "=", "_get_or_bust", "(", "data_dict", ",", "u'id'", ")", "try", ":", "jobs", ".", "job_from_id", "(", "id", ")", ".", "delete", "(", ")", "log", ".", "info", "(", "u'Cancelled background job {}'", ".", "format", "(", "id", ")", ")", "except", "KeyError", ":", "raise", "NotFound" ]
cancel a queued background job .
train
false
53,218
def mutUniformInt(individual, low, up, indpb): size = len(individual) if (not isinstance(low, Sequence)): low = repeat(low, size) elif (len(low) < size): raise IndexError(('low must be at least the size of individual: %d < %d' % (len(low), size))) if (not isinstance(up, Sequence)): up = repeat(up, size) elif (len(up) < size): raise IndexError(('up must be at least the size of individual: %d < %d' % (len(up), size))) for (i, xl, xu) in zip(xrange(size), low, up): if (random.random() < indpb): individual[i] = random.randint(xl, xu) return (individual,)
[ "def", "mutUniformInt", "(", "individual", ",", "low", ",", "up", ",", "indpb", ")", ":", "size", "=", "len", "(", "individual", ")", "if", "(", "not", "isinstance", "(", "low", ",", "Sequence", ")", ")", ":", "low", "=", "repeat", "(", "low", ",", "size", ")", "elif", "(", "len", "(", "low", ")", "<", "size", ")", ":", "raise", "IndexError", "(", "(", "'low must be at least the size of individual: %d < %d'", "%", "(", "len", "(", "low", ")", ",", "size", ")", ")", ")", "if", "(", "not", "isinstance", "(", "up", ",", "Sequence", ")", ")", ":", "up", "=", "repeat", "(", "up", ",", "size", ")", "elif", "(", "len", "(", "up", ")", "<", "size", ")", ":", "raise", "IndexError", "(", "(", "'up must be at least the size of individual: %d < %d'", "%", "(", "len", "(", "up", ")", ",", "size", ")", ")", ")", "for", "(", "i", ",", "xl", ",", "xu", ")", "in", "zip", "(", "xrange", "(", "size", ")", ",", "low", ",", "up", ")", ":", "if", "(", "random", ".", "random", "(", ")", "<", "indpb", ")", ":", "individual", "[", "i", "]", "=", "random", ".", "randint", "(", "xl", ",", "xu", ")", "return", "(", "individual", ",", ")" ]
mutate an individual by replacing attributes .
train
false
53,219
def get_all_grouped_distances(dist_matrix_header, dist_matrix, mapping_header, mapping, field, within=True, suppress_symmetry_and_hollowness_check=False): distances = get_grouped_distances(dist_matrix_header, dist_matrix, mapping_header, mapping, field, within, suppress_symmetry_and_hollowness_check) results = [] for group in distances: for distance in group[2]: results.append(distance) return results
[ "def", "get_all_grouped_distances", "(", "dist_matrix_header", ",", "dist_matrix", ",", "mapping_header", ",", "mapping", ",", "field", ",", "within", "=", "True", ",", "suppress_symmetry_and_hollowness_check", "=", "False", ")", ":", "distances", "=", "get_grouped_distances", "(", "dist_matrix_header", ",", "dist_matrix", ",", "mapping_header", ",", "mapping", ",", "field", ",", "within", ",", "suppress_symmetry_and_hollowness_check", ")", "results", "=", "[", "]", "for", "group", "in", "distances", ":", "for", "distance", "in", "group", "[", "2", "]", ":", "results", ".", "append", "(", "distance", ")", "return", "results" ]
returns a list of distances for either samples within each of the field values or between each of the field values for the specified field .
train
false
53,221
def nl2p(s): return u'\n'.join(((u'<p>%s</p>' % p) for p in _par_re.split(s)))
[ "def", "nl2p", "(", "s", ")", ":", "return", "u'\\n'", ".", "join", "(", "(", "(", "u'<p>%s</p>'", "%", "p", ")", "for", "p", "in", "_par_re", ".", "split", "(", "s", ")", ")", ")" ]
add paragraphs to a text .
train
false