id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
48,688
def lfsr_autocorrelation(L, P, k): if (not isinstance(L, list)): raise TypeError(('L (=%s) must be a list' % L)) P = int(P) k = int(k) L0 = L[:P] L1 = (L0 + L0[:k]) L2 = [((-1) ** (L1[i].to_int() + L1[(i + k)].to_int())) for i in range(P)] tot = sum(L2) return Rational(tot, P)
[ "def", "lfsr_autocorrelation", "(", "L", ",", "P", ",", "k", ")", ":", "if", "(", "not", "isinstance", "(", "L", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "(", "'L (=%s) must be a list'", "%", "L", ")", ")", "P", "=", "int", "(", "P", ")", "k", "=", "int", "(", "k", ")", "L0", "=", "L", "[", ":", "P", "]", "L1", "=", "(", "L0", "+", "L0", "[", ":", "k", "]", ")", "L2", "=", "[", "(", "(", "-", "1", ")", "**", "(", "L1", "[", "i", "]", ".", "to_int", "(", ")", "+", "L1", "[", "(", "i", "+", "k", ")", "]", ".", "to_int", "(", ")", ")", ")", "for", "i", "in", "range", "(", "P", ")", "]", "tot", "=", "sum", "(", "L2", ")", "return", "Rational", "(", "tot", ",", "P", ")" ]
this function computes the lfsr autocorrelation function .
train
false
48,691
def is_server_error(status): return ((500 <= status) and (status <= 599))
[ "def", "is_server_error", "(", "status", ")", ":", "return", "(", "(", "500", "<=", "status", ")", "and", "(", "status", "<=", "599", ")", ")" ]
check if http status code is server error .
train
false
48,692
@py3compat.doctest_refactor_print def extract_vars(*names, **kw): depth = kw.get('depth', 0) callerNS = sys._getframe((depth + 1)).f_locals return dict(((k, callerNS[k]) for k in names))
[ "@", "py3compat", ".", "doctest_refactor_print", "def", "extract_vars", "(", "*", "names", ",", "**", "kw", ")", ":", "depth", "=", "kw", ".", "get", "(", "'depth'", ",", "0", ")", "callerNS", "=", "sys", ".", "_getframe", "(", "(", "depth", "+", "1", ")", ")", ".", "f_locals", "return", "dict", "(", "(", "(", "k", ",", "callerNS", "[", "k", "]", ")", "for", "k", "in", "names", ")", ")" ]
extract a set of variables by name from another frame .
train
true
48,693
def current_metadata(items): assert items likelies = {} consensus = {} fields = ['artist', 'album', 'albumartist', 'year', 'disctotal', 'mb_albumid', 'label', 'catalognum', 'country', 'media', 'albumdisambig'] for field in fields: values = [item[field] for item in items if item] (likelies[field], freq) = plurality(values) consensus[field] = (freq == len(values)) if (consensus['albumartist'] and likelies['albumartist']): likelies['artist'] = likelies['albumartist'] return (likelies, consensus)
[ "def", "current_metadata", "(", "items", ")", ":", "assert", "items", "likelies", "=", "{", "}", "consensus", "=", "{", "}", "fields", "=", "[", "'artist'", ",", "'album'", ",", "'albumartist'", ",", "'year'", ",", "'disctotal'", ",", "'mb_albumid'", ",", "'label'", ",", "'catalognum'", ",", "'country'", ",", "'media'", ",", "'albumdisambig'", "]", "for", "field", "in", "fields", ":", "values", "=", "[", "item", "[", "field", "]", "for", "item", "in", "items", "if", "item", "]", "(", "likelies", "[", "field", "]", ",", "freq", ")", "=", "plurality", "(", "values", ")", "consensus", "[", "field", "]", "=", "(", "freq", "==", "len", "(", "values", ")", ")", "if", "(", "consensus", "[", "'albumartist'", "]", "and", "likelies", "[", "'albumartist'", "]", ")", ":", "likelies", "[", "'artist'", "]", "=", "likelies", "[", "'albumartist'", "]", "return", "(", "likelies", ",", "consensus", ")" ]
extract the likely current metadata for an album given a list of its items .
train
false
48,695
def p_function_definition_3(t): pass
[ "def", "p_function_definition_3", "(", "t", ")", ":", "pass" ]
function_definition : declarator compound_statement .
train
false
48,697
@ssl_required def aaq_step2(request, product_key): return aaq(request, product_key=product_key, step=1)
[ "@", "ssl_required", "def", "aaq_step2", "(", "request", ",", "product_key", ")", ":", "return", "aaq", "(", "request", ",", "product_key", "=", "product_key", ",", "step", "=", "1", ")" ]
step 2: the product is selected .
train
false
48,698
@handle_response_format @treeio_login_required def receivable_add(request, response_format='html'): if request.POST: if ('cancel' not in request.POST): receivable = Liability() form = ReceivableForm(request.user.profile, request.POST, instance=receivable) if form.is_valid(): receivable = form.save(commit=False) receivable.target = receivable.account.owner convert(receivable, 'value') receivable.set_user_from_request(request) return HttpResponseRedirect(reverse('finance_receivable_view', args=[receivable.id])) else: return HttpResponseRedirect(reverse('finance_index_receivables')) else: form = ReceivableForm(request.user.profile) return render_to_response('finance/receivable_add', {'form': form}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "receivable_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "receivable", "=", "Liability", "(", ")", "form", "=", "ReceivableForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "receivable", ")", "if", "form", ".", "is_valid", "(", ")", ":", "receivable", "=", "form", ".", "save", "(", "commit", "=", "False", ")", "receivable", ".", "target", "=", "receivable", ".", "account", ".", "owner", "convert", "(", "receivable", ",", "'value'", ")", "receivable", ".", "set_user_from_request", "(", "request", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_receivable_view'", ",", "args", "=", "[", "receivable", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_index_receivables'", ")", ")", "else", ":", "form", "=", "ReceivableForm", "(", "request", ".", "user", ".", "profile", ")", "return", "render_to_response", "(", "'finance/receivable_add'", ",", "{", "'form'", ":", "form", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
new receivable form .
train
false
48,699
def format_remote_path(host, path): if (host is None): return path return ('%s:%s' % (safe_ip_format(host), path))
[ "def", "format_remote_path", "(", "host", ",", "path", ")", ":", "if", "(", "host", "is", "None", ")", ":", "return", "path", "return", "(", "'%s:%s'", "%", "(", "safe_ip_format", "(", "host", ")", ",", "path", ")", ")" ]
returns remote path in format acceptable for scp/rsync .
train
false
48,701
def typed_ordered_dict(key_type, value_type, default): return (lambda setting: OrderedDict(((key_type(StringConverter(key)), (value_type(StringConverter(value)) if (value != '') else default)) for (key, value) in OrderedDict(setting).items())))
[ "def", "typed_ordered_dict", "(", "key_type", ",", "value_type", ",", "default", ")", ":", "return", "(", "lambda", "setting", ":", "OrderedDict", "(", "(", "(", "key_type", "(", "StringConverter", "(", "key", ")", ")", ",", "(", "value_type", "(", "StringConverter", "(", "value", ")", ")", "if", "(", "value", "!=", "''", ")", "else", "default", ")", ")", "for", "(", "key", ",", "value", ")", "in", "OrderedDict", "(", "setting", ")", ".", "items", "(", ")", ")", ")", ")" ]
creates a function that converts a setting into an ordered dict with the given types .
train
false
48,702
def get_file_writer(file_handle, do_gzip, do_bzip): ofile = None if (do_gzip and do_bzip): raise ValueError(u'Cannot specify both bzip and gzip compression!') if do_gzip: ofile = gzip.GzipFile(fileobj=file_handle, mode=u'w') elif do_bzip: ofile = bz2file.open(file_handle, mode=u'w') else: ofile = file_handle return ofile
[ "def", "get_file_writer", "(", "file_handle", ",", "do_gzip", ",", "do_bzip", ")", ":", "ofile", "=", "None", "if", "(", "do_gzip", "and", "do_bzip", ")", ":", "raise", "ValueError", "(", "u'Cannot specify both bzip and gzip compression!'", ")", "if", "do_gzip", ":", "ofile", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "file_handle", ",", "mode", "=", "u'w'", ")", "elif", "do_bzip", ":", "ofile", "=", "bz2file", ".", "open", "(", "file_handle", ",", "mode", "=", "u'w'", ")", "else", ":", "ofile", "=", "file_handle", "return", "ofile" ]
generate and return a file object with specified compression .
train
false
48,703
def get_unique(content_type, object_pk, name=None, request=None, ip=None, user_agent=None, user=None): if request: if request.user.is_authenticated(): user = request.user ip = user_agent = None else: user = None ip = request.META.get('REMOTE_ADDR', '') user_agent = request.META.get('HTTP_USER_AGENT', '')[:255] hash_text = '\n'.join((unicode(x).encode('utf-8') for x in (content_type.pk, object_pk, (name or ''), ip, user_agent, ((user and user.pk) or 'None')))) unique_hash = hashlib.md5(hash_text).hexdigest() return (user, ip, user_agent, unique_hash)
[ "def", "get_unique", "(", "content_type", ",", "object_pk", ",", "name", "=", "None", ",", "request", "=", "None", ",", "ip", "=", "None", ",", "user_agent", "=", "None", ",", "user", "=", "None", ")", ":", "if", "request", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "user", "=", "request", ".", "user", "ip", "=", "user_agent", "=", "None", "else", ":", "user", "=", "None", "ip", "=", "request", ".", "META", ".", "get", "(", "'REMOTE_ADDR'", ",", "''", ")", "user_agent", "=", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "''", ")", "[", ":", "255", "]", "hash_text", "=", "'\\n'", ".", "join", "(", "(", "unicode", "(", "x", ")", ".", "encode", "(", "'utf-8'", ")", "for", "x", "in", "(", "content_type", ".", "pk", ",", "object_pk", ",", "(", "name", "or", "''", ")", ",", "ip", ",", "user_agent", ",", "(", "(", "user", "and", "user", ".", "pk", ")", "or", "'None'", ")", ")", ")", ")", "unique_hash", "=", "hashlib", ".", "md5", "(", "hash_text", ")", ".", "hexdigest", "(", ")", "return", "(", "user", ",", "ip", ",", "user_agent", ",", "unique_hash", ")" ]
extract a set of unique identifiers from the request .
train
false
48,704
def test_cache_deactivated_private_browsing(config_stub, tmpdir): config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': True}} disk_cache = cache.DiskCache(str(tmpdir)) metadata = QNetworkCacheMetaData() metadata.setUrl(QUrl('http://www.example.com/')) assert metadata.isValid() assert (disk_cache.prepare(metadata) is None)
[ "def", "test_cache_deactivated_private_browsing", "(", "config_stub", ",", "tmpdir", ")", ":", "config_stub", ".", "data", "=", "{", "'storage'", ":", "{", "'cache-size'", ":", "1024", "}", ",", "'general'", ":", "{", "'private-browsing'", ":", "True", "}", "}", "disk_cache", "=", "cache", ".", "DiskCache", "(", "str", "(", "tmpdir", ")", ")", "metadata", "=", "QNetworkCacheMetaData", "(", ")", "metadata", ".", "setUrl", "(", "QUrl", "(", "'http://www.example.com/'", ")", ")", "assert", "metadata", ".", "isValid", "(", ")", "assert", "(", "disk_cache", ".", "prepare", "(", "metadata", ")", "is", "None", ")" ]
test if cache is deactivated in private-browsing mode .
train
false
48,705
def _FieldSkipper(): WIRETYPE_TO_SKIPPER = [_SkipVarint, _SkipFixed64, _SkipLengthDelimited, _SkipGroup, _EndGroup, _SkipFixed32, _RaiseInvalidWireType, _RaiseInvalidWireType] wiretype_mask = wire_format.TAG_TYPE_MASK local_ord = ord def SkipField(buffer, pos, end, tag_bytes): 'Skips a field with the specified tag.\n\n |pos| should point to the byte immediately after the tag.\n\n Returns:\n The new position (after the tag value), or -1 if the tag is an end-group\n tag (in which case the calling loop should break).\n ' wire_type = (local_ord(tag_bytes[0]) & wiretype_mask) return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end) return SkipField
[ "def", "_FieldSkipper", "(", ")", ":", "WIRETYPE_TO_SKIPPER", "=", "[", "_SkipVarint", ",", "_SkipFixed64", ",", "_SkipLengthDelimited", ",", "_SkipGroup", ",", "_EndGroup", ",", "_SkipFixed32", ",", "_RaiseInvalidWireType", ",", "_RaiseInvalidWireType", "]", "wiretype_mask", "=", "wire_format", ".", "TAG_TYPE_MASK", "local_ord", "=", "ord", "def", "SkipField", "(", "buffer", ",", "pos", ",", "end", ",", "tag_bytes", ")", ":", "wire_type", "=", "(", "local_ord", "(", "tag_bytes", "[", "0", "]", ")", "&", "wiretype_mask", ")", "return", "WIRETYPE_TO_SKIPPER", "[", "wire_type", "]", "(", "buffer", ",", "pos", ",", "end", ")", "return", "SkipField" ]
constructs the skipfield function .
train
true
48,706
def transform_params(parameters): transformed_parameters = {} for (key, value) in iteritems(parameters): if isinstance(value, (list, tuple, set)): value = [convert_boolean(param) for param in value] transformed_parameters[format_name(key)] = value elif (value is not None): transformed_parameters[format_name(key)] = convert_boolean(value) return transformed_parameters
[ "def", "transform_params", "(", "parameters", ")", ":", "transformed_parameters", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "iteritems", "(", "parameters", ")", ":", "if", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ",", "set", ")", ")", ":", "value", "=", "[", "convert_boolean", "(", "param", ")", "for", "param", "in", "value", "]", "transformed_parameters", "[", "format_name", "(", "key", ")", "]", "=", "value", "elif", "(", "value", "is", "not", "None", ")", ":", "transformed_parameters", "[", "format_name", "(", "key", ")", "]", "=", "convert_boolean", "(", "value", ")", "return", "transformed_parameters" ]
transform parameters .
train
false
48,707
def _checkNetmask(netmask, masklen): num = long(netmask) bits = masklen while ((num & 1) == 0): num = (num >> 1) bits -= 1 if (bits == 0): break while (bits > 0): if ((num & 1) == 0): raise ValueError(("Netmask %s can't be expressed as an prefix." % hex(netmask))) num = (num >> 1) bits -= 1
[ "def", "_checkNetmask", "(", "netmask", ",", "masklen", ")", ":", "num", "=", "long", "(", "netmask", ")", "bits", "=", "masklen", "while", "(", "(", "num", "&", "1", ")", "==", "0", ")", ":", "num", "=", "(", "num", ">>", "1", ")", "bits", "-=", "1", "if", "(", "bits", "==", "0", ")", ":", "break", "while", "(", "bits", ">", "0", ")", ":", "if", "(", "(", "num", "&", "1", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "(", "\"Netmask %s can't be expressed as an prefix.\"", "%", "hex", "(", "netmask", ")", ")", ")", "num", "=", "(", "num", ">>", "1", ")", "bits", "-=", "1" ]
checks if a netmask is expressable as e prefixlen .
train
false
48,708
def find_element_by_sizzle(driver, sizzle_selector): elements = driver.find_elements_by_sizzle(sizzle_selector) if elements: return elements[0] else: raise NoSuchElementException('Unable to locate element by Sizzle: {selector}'.format(selector=sizzle_selector))
[ "def", "find_element_by_sizzle", "(", "driver", ",", "sizzle_selector", ")", ":", "elements", "=", "driver", ".", "find_elements_by_sizzle", "(", "sizzle_selector", ")", "if", "elements", ":", "return", "elements", "[", "0", "]", "else", ":", "raise", "NoSuchElementException", "(", "'Unable to locate element by Sizzle: {selector}'", ".", "format", "(", "selector", "=", "sizzle_selector", ")", ")" ]
finds an element by sizzle selector .
train
false
48,709
def garbagecollect(func): def inner(*args, **kwargs): result = func(*args, **kwargs) gc.collect() return result return inner
[ "def", "garbagecollect", "(", "func", ")", ":", "def", "inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "result", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "gc", ".", "collect", "(", ")", "return", "result", "return", "inner" ]
decorate a function to invoke the garbage collector after each execution .
train
false
48,710
def _latexFromHtml(col, latex): latex = re.sub('<br( /)?>|<div>', '\n', latex) latex = stripHTML(latex) return latex
[ "def", "_latexFromHtml", "(", "col", ",", "latex", ")", ":", "latex", "=", "re", ".", "sub", "(", "'<br( /)?>|<div>'", ",", "'\\n'", ",", "latex", ")", "latex", "=", "stripHTML", "(", "latex", ")", "return", "latex" ]
convert entities and fix newlines .
train
false
48,711
def randcplx(offset=(-1)): return ((_randrat() + (I * _randrat())) + (I * (1 + offset)))
[ "def", "randcplx", "(", "offset", "=", "(", "-", "1", ")", ")", ":", "return", "(", "(", "_randrat", "(", ")", "+", "(", "I", "*", "_randrat", "(", ")", ")", ")", "+", "(", "I", "*", "(", "1", "+", "offset", ")", ")", ")" ]
polys is not good with real coefficients .
train
false
48,712
@with_sitl def test_227(connpath): vehicle = connect(connpath, wait_ready=True) def assert_commands(count): vehicle.commands.download() vehicle.commands.wait_ready() assert_equals(len(vehicle.commands), count) assert_commands(0) vehicle.commands.add(Command(0, 0, 0, mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, 0, 0, 0, 0, 0, 10, 10, 10)) vehicle.flush() assert_commands(1)
[ "@", "with_sitl", "def", "test_227", "(", "connpath", ")", ":", "vehicle", "=", "connect", "(", "connpath", ",", "wait_ready", "=", "True", ")", "def", "assert_commands", "(", "count", ")", ":", "vehicle", ".", "commands", ".", "download", "(", ")", "vehicle", ".", "commands", ".", "wait_ready", "(", ")", "assert_equals", "(", "len", "(", "vehicle", ".", "commands", ")", ",", "count", ")", "assert_commands", "(", "0", ")", "vehicle", ".", "commands", ".", "add", "(", "Command", "(", "0", ",", "0", ",", "0", ",", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_RELATIVE_ALT", ",", "mavutil", ".", "mavlink", ".", "MAV_CMD_NAV_WAYPOINT", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "10", ",", "10", ",", "10", ")", ")", "vehicle", ".", "flush", "(", ")", "assert_commands", "(", "1", ")" ]
tests race condition when downloading items .
train
false
48,713
def kulsinski(u, v): u = _validate_vector(u) v = _validate_vector(v) n = float(len(u)) (nff, nft, ntf, ntt) = _nbool_correspond_all(u, v) return ((((ntf + nft) - ntt) + n) / ((ntf + nft) + n))
[ "def", "kulsinski", "(", "u", ",", "v", ")", ":", "u", "=", "_validate_vector", "(", "u", ")", "v", "=", "_validate_vector", "(", "v", ")", "n", "=", "float", "(", "len", "(", "u", ")", ")", "(", "nff", ",", "nft", ",", "ntf", ",", "ntt", ")", "=", "_nbool_correspond_all", "(", "u", ",", "v", ")", "return", "(", "(", "(", "(", "ntf", "+", "nft", ")", "-", "ntt", ")", "+", "n", ")", "/", "(", "(", "ntf", "+", "nft", ")", "+", "n", ")", ")" ]
computes the kulsinski dissimilarity between two boolean 1-d arrays .
train
false
48,714
def get_swap(transformed_list): if (not all(((isinstance(device, DriverSwapBlockDevice) or ('swap_size' in device)) for device in transformed_list))): return None try: return transformed_list.pop() except IndexError: return None
[ "def", "get_swap", "(", "transformed_list", ")", ":", "if", "(", "not", "all", "(", "(", "(", "isinstance", "(", "device", ",", "DriverSwapBlockDevice", ")", "or", "(", "'swap_size'", "in", "device", ")", ")", "for", "device", "in", "transformed_list", ")", ")", ")", ":", "return", "None", "try", ":", "return", "transformed_list", ".", "pop", "(", ")", "except", "IndexError", ":", "return", "None" ]
get the swap device out of the list context .
train
false
48,717
def _read_ustring(fid, n_bytes): return np.fromfile(fid, '>B', n_bytes)
[ "def", "_read_ustring", "(", "fid", ",", "n_bytes", ")", ":", "return", "np", ".", "fromfile", "(", "fid", ",", "'>B'", ",", "n_bytes", ")" ]
read unsigned character string .
train
false
48,718
@task def sdist(ctx, deploy=False, remove_dist=False): clean(ctx, remove_dist, create_dirs=True) ctx.run(('python setup.py sdist' + (' register upload' if deploy else ''))) announce()
[ "@", "task", "def", "sdist", "(", "ctx", ",", "deploy", "=", "False", ",", "remove_dist", "=", "False", ")", ":", "clean", "(", "ctx", ",", "remove_dist", ",", "create_dirs", "=", "True", ")", "ctx", ".", "run", "(", "(", "'python setup.py sdist'", "+", "(", "' register upload'", "if", "deploy", "else", "''", ")", ")", ")", "announce", "(", ")" ]
create source distribution .
train
false
48,719
def testLinkLimit(net, bw): info(('*** Testing network %.2f Mbps bandwidth limit\n' % bw)) net.iperf()
[ "def", "testLinkLimit", "(", "net", ",", "bw", ")", ":", "info", "(", "(", "'*** Testing network %.2f Mbps bandwidth limit\\n'", "%", "bw", ")", ")", "net", ".", "iperf", "(", ")" ]
run bandwidth limit test .
train
false
48,720
@hug.get() def hello(request): return 'Hello World!'
[ "@", "hug", ".", "get", "(", ")", "def", "hello", "(", "request", ")", ":", "return", "'Hello World!'" ]
return a friendly http greeting .
train
false
48,721
def apply_features(feature_func, toks, labeled=None): if (labeled is None): labeled = (toks and isinstance(toks[0], (tuple, list))) if labeled: def lazy_func(labeled_token): return (feature_func(labeled_token[0]), labeled_token[1]) return LazyMap(lazy_func, toks) else: return LazyMap(feature_func, toks)
[ "def", "apply_features", "(", "feature_func", ",", "toks", ",", "labeled", "=", "None", ")", ":", "if", "(", "labeled", "is", "None", ")", ":", "labeled", "=", "(", "toks", "and", "isinstance", "(", "toks", "[", "0", "]", ",", "(", "tuple", ",", "list", ")", ")", ")", "if", "labeled", ":", "def", "lazy_func", "(", "labeled_token", ")", ":", "return", "(", "feature_func", "(", "labeled_token", "[", "0", "]", ")", ",", "labeled_token", "[", "1", "]", ")", "return", "LazyMap", "(", "lazy_func", ",", "toks", ")", "else", ":", "return", "LazyMap", "(", "feature_func", ",", "toks", ")" ]
use the lazymap class to construct a lazy list-like object that is analogous to map .
train
false
48,722
def ZigZagDecode(value): if (not (value & 1)): return (value >> 1) return ((value >> 1) ^ (~ 0))
[ "def", "ZigZagDecode", "(", "value", ")", ":", "if", "(", "not", "(", "value", "&", "1", ")", ")", ":", "return", "(", "value", ">>", "1", ")", "return", "(", "(", "value", ">>", "1", ")", "^", "(", "~", "0", ")", ")" ]
inverse of zigzagencode() .
train
false
48,724
@log_call def task_get_all(context, filters=None, marker=None, limit=None, sort_key='created_at', sort_dir='desc'): _task_soft_delete(context) filters = (filters or {}) tasks = DATA['tasks'].values() tasks = _filter_tasks(tasks, filters, context) tasks = _sort_tasks(tasks, sort_key, sort_dir) tasks = _paginate_tasks(context, tasks, marker, limit, filters.get('deleted')) filtered_tasks = [] for task in tasks: filtered_tasks.append(_format_task_from_db(task, task_info_ref=None)) return filtered_tasks
[ "@", "log_call", "def", "task_get_all", "(", "context", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "sort_key", "=", "'created_at'", ",", "sort_dir", "=", "'desc'", ")", ":", "_task_soft_delete", "(", "context", ")", "filters", "=", "(", "filters", "or", "{", "}", ")", "tasks", "=", "DATA", "[", "'tasks'", "]", ".", "values", "(", ")", "tasks", "=", "_filter_tasks", "(", "tasks", ",", "filters", ",", "context", ")", "tasks", "=", "_sort_tasks", "(", "tasks", ",", "sort_key", ",", "sort_dir", ")", "tasks", "=", "_paginate_tasks", "(", "context", ",", "tasks", ",", "marker", ",", "limit", ",", "filters", ".", "get", "(", "'deleted'", ")", ")", "filtered_tasks", "=", "[", "]", "for", "task", "in", "tasks", ":", "filtered_tasks", ".", "append", "(", "_format_task_from_db", "(", "task", ",", "task_info_ref", "=", "None", ")", ")", "return", "filtered_tasks" ]
get all tasks that match zero or more filters .
train
false
48,725
@task(name='unit') def unittest(ctx, args='', options=''): pytest(ctx, args, options)
[ "@", "task", "(", "name", "=", "'unit'", ")", "def", "unittest", "(", "ctx", ",", "args", "=", "''", ",", "options", "=", "''", ")", ":", "pytest", "(", "ctx", ",", "args", ",", "options", ")" ]
run unit tests .
train
false
48,726
def deconstructible(*args, **kwargs): path = kwargs.pop('path', None) def decorator(klass): def __new__(cls, *args, **kwargs): obj = super(klass, cls).__new__(cls) obj._constructor_args = (args, kwargs) return obj def deconstruct(obj): '\n Returns a 3-tuple of class import path, positional arguments,\n and keyword arguments.\n ' if path: (module_name, _, name) = path.rpartition('.') else: module_name = obj.__module__ name = obj.__class__.__name__ module = import_module(module_name) if (not hasattr(module, name)): raise ValueError(('Could not find object %s in %s.\nPlease note that you cannot serialize things like inner classes. Please move the object into the main module body to use migrations.\nFor more information, see https://docs.djangoproject.com/en/%s/topics/migrations/#serializing-values' % (name, module_name, get_docs_version()))) return ((path or ('%s.%s' % (obj.__class__.__module__, name))), obj._constructor_args[0], obj._constructor_args[1]) klass.__new__ = staticmethod(__new__) klass.deconstruct = deconstruct return klass if (not args): return decorator return decorator(*args, **kwargs)
[ "def", "deconstructible", "(", "*", "args", ",", "**", "kwargs", ")", ":", "path", "=", "kwargs", ".", "pop", "(", "'path'", ",", "None", ")", "def", "decorator", "(", "klass", ")", ":", "def", "__new__", "(", "cls", ",", "*", "args", ",", "**", "kwargs", ")", ":", "obj", "=", "super", "(", "klass", ",", "cls", ")", ".", "__new__", "(", "cls", ")", "obj", ".", "_constructor_args", "=", "(", "args", ",", "kwargs", ")", "return", "obj", "def", "deconstruct", "(", "obj", ")", ":", "if", "path", ":", "(", "module_name", ",", "_", ",", "name", ")", "=", "path", ".", "rpartition", "(", "'.'", ")", "else", ":", "module_name", "=", "obj", ".", "__module__", "name", "=", "obj", ".", "__class__", ".", "__name__", "module", "=", "import_module", "(", "module_name", ")", "if", "(", "not", "hasattr", "(", "module", ",", "name", ")", ")", ":", "raise", "ValueError", "(", "(", "'Could not find object %s in %s.\\nPlease note that you cannot serialize things like inner classes. Please move the object into the main module body to use migrations.\\nFor more information, see https://docs.djangoproject.com/en/%s/topics/migrations/#serializing-values'", "%", "(", "name", ",", "module_name", ",", "get_docs_version", "(", ")", ")", ")", ")", "return", "(", "(", "path", "or", "(", "'%s.%s'", "%", "(", "obj", ".", "__class__", ".", "__module__", ",", "name", ")", ")", ")", ",", "obj", ".", "_constructor_args", "[", "0", "]", ",", "obj", ".", "_constructor_args", "[", "1", "]", ")", "klass", ".", "__new__", "=", "staticmethod", "(", "__new__", ")", "klass", ".", "deconstruct", "=", "deconstruct", "return", "klass", "if", "(", "not", "args", ")", ":", "return", "decorator", "return", "decorator", "(", "*", "args", ",", "**", "kwargs", ")" ]
class decorator that allow the decorated class to be serialized by the migrations subsystem .
train
false
48,728
def scharr_v(image, mask=None): assert_nD(image, 2) image = img_as_float(image) result = convolve(image, VSCHARR_WEIGHTS) return _mask_filter_result(result, mask)
[ "def", "scharr_v", "(", "image", ",", "mask", "=", "None", ")", ":", "assert_nD", "(", "image", ",", "2", ")", "image", "=", "img_as_float", "(", "image", ")", "result", "=", "convolve", "(", "image", ",", "VSCHARR_WEIGHTS", ")", "return", "_mask_filter_result", "(", "result", ",", "mask", ")" ]
find the vertical edges of an image using the scharr transform .
train
false
48,729
def check_secret(form_instance, secret): return (make_secret(form_instance) == secret)
[ "def", "check_secret", "(", "form_instance", ",", "secret", ")", ":", "return", "(", "make_secret", "(", "form_instance", ")", "==", "secret", ")" ]
returns true if received secret matches expected secret for form_instance .
train
false
48,731
def csrf_failure(request, reason='', template_name=CSRF_FAILURE_TEMPLATE_NAME): from django.middleware.csrf import REASON_NO_REFERER, REASON_NO_CSRF_COOKIE c = {'title': _('Forbidden'), 'main': _('CSRF verification failed. Request aborted.'), 'reason': reason, 'no_referer': (reason == REASON_NO_REFERER), 'no_referer1': _("You are seeing this message because this HTTPS site requires a 'Referer header' to be sent by your Web browser, but none was sent. This header is required for security reasons, to ensure that your browser is not being hijacked by third parties."), 'no_referer2': _("If you have configured your browser to disable 'Referer' headers, please re-enable them, at least for this site, or for HTTPS connections, or for 'same-origin' requests."), 'no_cookie': (reason == REASON_NO_CSRF_COOKIE), 'no_cookie1': _('You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.'), 'no_cookie2': _("If you have configured your browser to disable cookies, please re-enable them, at least for this site, or for 'same-origin' requests."), 'DEBUG': settings.DEBUG, 'docs_version': get_docs_version(), 'more': _('More information is available with DEBUG=True.')} try: t = loader.get_template(template_name) except TemplateDoesNotExist: if (template_name == CSRF_FAILURE_TEMPLATE_NAME): t = Engine().from_string(CSRF_FAILURE_TEMPLATE) c = Context(c) else: raise return HttpResponseForbidden(t.render(c), content_type='text/html')
[ "def", "csrf_failure", "(", "request", ",", "reason", "=", "''", ",", "template_name", "=", "CSRF_FAILURE_TEMPLATE_NAME", ")", ":", "from", "django", ".", "middleware", ".", "csrf", "import", "REASON_NO_REFERER", ",", "REASON_NO_CSRF_COOKIE", "c", "=", "{", "'title'", ":", "_", "(", "'Forbidden'", ")", ",", "'main'", ":", "_", "(", "'CSRF verification failed. Request aborted.'", ")", ",", "'reason'", ":", "reason", ",", "'no_referer'", ":", "(", "reason", "==", "REASON_NO_REFERER", ")", ",", "'no_referer1'", ":", "_", "(", "\"You are seeing this message because this HTTPS site requires a 'Referer header' to be sent by your Web browser, but none was sent. This header is required for security reasons, to ensure that your browser is not being hijacked by third parties.\"", ")", ",", "'no_referer2'", ":", "_", "(", "\"If you have configured your browser to disable 'Referer' headers, please re-enable them, at least for this site, or for HTTPS connections, or for 'same-origin' requests.\"", ")", ",", "'no_cookie'", ":", "(", "reason", "==", "REASON_NO_CSRF_COOKIE", ")", ",", "'no_cookie1'", ":", "_", "(", "'You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.'", ")", ",", "'no_cookie2'", ":", "_", "(", "\"If you have configured your browser to disable cookies, please re-enable them, at least for this site, or for 'same-origin' requests.\"", ")", ",", "'DEBUG'", ":", "settings", ".", "DEBUG", ",", "'docs_version'", ":", "get_docs_version", "(", ")", ",", "'more'", ":", "_", "(", "'More information is available with DEBUG=True.'", ")", "}", "try", ":", "t", "=", "loader", ".", "get_template", "(", "template_name", ")", "except", "TemplateDoesNotExist", ":", "if", "(", "template_name", "==", "CSRF_FAILURE_TEMPLATE_NAME", ")", ":", "t", "=", "Engine", "(", ")", ".", "from_string", "(", "CSRF_FAILURE_TEMPLATE", ")", "c", "=", "Context", "(", "c", ")", "else", ":", "raise", "return", "HttpResponseForbidden", "(", "t", ".", "render", "(", "c", ")", ",", "content_type", "=", "'text/html'", ")" ]
default view used when request fails csrf protection .
train
false
48,732
def test_join_inplace(): s = tensor.lscalar() x = tensor.vector('x') z = tensor.zeros((s,)) join = Join(view=0) c = join(0, x, z, z) f = theano.function([theano.In(x, borrow=True), s], theano.Out(c, borrow=True)) data = numpy.array([3, 4, 5], dtype=theano.config.floatX) print(f(data, 0)) if (theano.config.mode not in ['DebugMode', 'DEBUG_MODE']): assert (f(data, 0) is data) assert numpy.allclose(f(data, 0), [3, 4, 5])
[ "def", "test_join_inplace", "(", ")", ":", "s", "=", "tensor", ".", "lscalar", "(", ")", "x", "=", "tensor", ".", "vector", "(", "'x'", ")", "z", "=", "tensor", ".", "zeros", "(", "(", "s", ",", ")", ")", "join", "=", "Join", "(", "view", "=", "0", ")", "c", "=", "join", "(", "0", ",", "x", ",", "z", ",", "z", ")", "f", "=", "theano", ".", "function", "(", "[", "theano", ".", "In", "(", "x", ",", "borrow", "=", "True", ")", ",", "s", "]", ",", "theano", ".", "Out", "(", "c", ",", "borrow", "=", "True", ")", ")", "data", "=", "numpy", ".", "array", "(", "[", "3", ",", "4", ",", "5", "]", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", "print", "(", "f", "(", "data", ",", "0", ")", ")", "if", "(", "theano", ".", "config", ".", "mode", "not", "in", "[", "'DebugMode'", ",", "'DEBUG_MODE'", "]", ")", ":", "assert", "(", "f", "(", "data", ",", "0", ")", "is", "data", ")", "assert", "numpy", ".", "allclose", "(", "f", "(", "data", ",", "0", ")", ",", "[", "3", ",", "4", ",", "5", "]", ")" ]
test join to work inplace .
train
false
48,733
def label(object, index=None): object = Carbon.File.FSRef(object) object_alias = object.FSNewAliasMinimal() if (index is None): return _getlabel(object_alias) if ((index < 0) or (index > 7)): index = 0 return _setlabel(object_alias, index)
[ "def", "label", "(", "object", ",", "index", "=", "None", ")", ":", "object", "=", "Carbon", ".", "File", ".", "FSRef", "(", "object", ")", "object_alias", "=", "object", ".", "FSNewAliasMinimal", "(", ")", "if", "(", "index", "is", "None", ")", ":", "return", "_getlabel", "(", "object_alias", ")", "if", "(", "(", "index", "<", "0", ")", "or", "(", "index", ">", "7", ")", ")", ":", "index", "=", "0", "return", "_setlabel", "(", "object_alias", ",", "index", ")" ]
label: set or get the label of the item .
train
false
48,734
def extract_modes(spans): rangelen = sorted([((x['to'] - x['from']) + 1) for x in spans]) deflen = sorted(rangelen, key=rangelen.count)[(-1)] reprs = [str2fmt(x['str']) for x in spans] deffmt = sorted(reprs, key=reprs.count)[(-1)] return (deflen, deffmt)
[ "def", "extract_modes", "(", "spans", ")", ":", "rangelen", "=", "sorted", "(", "[", "(", "(", "x", "[", "'to'", "]", "-", "x", "[", "'from'", "]", ")", "+", "1", ")", "for", "x", "in", "spans", "]", ")", "deflen", "=", "sorted", "(", "rangelen", ",", "key", "=", "rangelen", ".", "count", ")", "[", "(", "-", "1", ")", "]", "reprs", "=", "[", "str2fmt", "(", "x", "[", "'str'", "]", ")", "for", "x", "in", "spans", "]", "deffmt", "=", "sorted", "(", "reprs", ",", "key", "=", "reprs", ".", "count", ")", "[", "(", "-", "1", ")", "]", "return", "(", "deflen", ",", "deffmt", ")" ]
extract the most common spans lengths and representation formats .
train
false
48,735
def _idnaText(octets): try: import idna except ImportError: return octets.decode('idna') else: return idna.decode(octets)
[ "def", "_idnaText", "(", "octets", ")", ":", "try", ":", "import", "idna", "except", "ImportError", ":", "return", "octets", ".", "decode", "(", "'idna'", ")", "else", ":", "return", "idna", ".", "decode", "(", "octets", ")" ]
convert some idna-encoded octets into some human-readable text .
train
false
48,736
def inputhook_wx3(): try: app = wx.GetApp() if (app is not None): assert wx.Thread_IsMain() if (not callable(signal.getsignal(signal.SIGINT))): signal.signal(signal.SIGINT, signal.default_int_handler) evtloop = wx.EventLoop() ea = wx.EventLoopActivator(evtloop) t = clock() while (not stdin_ready()): while evtloop.Pending(): t = clock() evtloop.Dispatch() app.ProcessIdle() used_time = (clock() - t) if (used_time > 10.0): time.sleep(1.0) elif (used_time > 0.1): time.sleep(0.05) else: time.sleep(0.001) del ea except KeyboardInterrupt: pass return 0
[ "def", "inputhook_wx3", "(", ")", ":", "try", ":", "app", "=", "wx", ".", "GetApp", "(", ")", "if", "(", "app", "is", "not", "None", ")", ":", "assert", "wx", ".", "Thread_IsMain", "(", ")", "if", "(", "not", "callable", "(", "signal", ".", "getsignal", "(", "signal", ".", "SIGINT", ")", ")", ")", ":", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "signal", ".", "default_int_handler", ")", "evtloop", "=", "wx", ".", "EventLoop", "(", ")", "ea", "=", "wx", ".", "EventLoopActivator", "(", "evtloop", ")", "t", "=", "clock", "(", ")", "while", "(", "not", "stdin_ready", "(", ")", ")", ":", "while", "evtloop", ".", "Pending", "(", ")", ":", "t", "=", "clock", "(", ")", "evtloop", ".", "Dispatch", "(", ")", "app", ".", "ProcessIdle", "(", ")", "used_time", "=", "(", "clock", "(", ")", "-", "t", ")", "if", "(", "used_time", ">", "10.0", ")", ":", "time", ".", "sleep", "(", "1.0", ")", "elif", "(", "used_time", ">", "0.1", ")", ":", "time", ".", "sleep", "(", "0.05", ")", "else", ":", "time", ".", "sleep", "(", "0.001", ")", "del", "ea", "except", "KeyboardInterrupt", ":", "pass", "return", "0" ]
run the wx event loop by processing pending events only .
train
true
48,739
def primary_key(name='id'): return field(name, INTEGER, index=PRIMARY, optional=False)
[ "def", "primary_key", "(", "name", "=", "'id'", ")", ":", "return", "field", "(", "name", ",", "INTEGER", ",", "index", "=", "PRIMARY", ",", "optional", "=", "False", ")" ]
returns an auto-incremented integer primary key field named "id" .
train
false
48,740
def check_entry_points(dist, attr, value): try: pkg_resources.EntryPoint.parse_map(value) except ValueError as e: raise DistutilsSetupError(e)
[ "def", "check_entry_points", "(", "dist", ",", "attr", ",", "value", ")", ":", "try", ":", "pkg_resources", ".", "EntryPoint", ".", "parse_map", "(", "value", ")", "except", "ValueError", "as", "e", ":", "raise", "DistutilsSetupError", "(", "e", ")" ]
verify that entry_points map is parseable .
train
true
48,744
def _host_find(context, session, src_aggregate, host_ref): uuid = session.host.get_uuid(host_ref) for (compute_host, host_uuid) in src_aggregate.metadetails.items(): if (host_uuid == uuid): return compute_host raise exception.NoValidHost(reason=('Host %(host_uuid)s could not be found from aggregate metadata: %(metadata)s.' % {'host_uuid': uuid, 'metadata': src_aggregate.metadetails}))
[ "def", "_host_find", "(", "context", ",", "session", ",", "src_aggregate", ",", "host_ref", ")", ":", "uuid", "=", "session", ".", "host", ".", "get_uuid", "(", "host_ref", ")", "for", "(", "compute_host", ",", "host_uuid", ")", "in", "src_aggregate", ".", "metadetails", ".", "items", "(", ")", ":", "if", "(", "host_uuid", "==", "uuid", ")", ":", "return", "compute_host", "raise", "exception", ".", "NoValidHost", "(", "reason", "=", "(", "'Host %(host_uuid)s could not be found from aggregate metadata: %(metadata)s.'", "%", "{", "'host_uuid'", ":", "uuid", ",", "'metadata'", ":", "src_aggregate", ".", "metadetails", "}", ")", ")" ]
return the host from the xenapi host reference .
train
false
48,745
def SplitIntoComponents(str_in): if str_in.startswith(('"', "'")): return shlex.split(str_in) else: components = str_in.split(' ', 1) if (len(components) > 1): return ([components[0]] + SplitIntoComponents(components[1])) else: return components
[ "def", "SplitIntoComponents", "(", "str_in", ")", ":", "if", "str_in", ".", "startswith", "(", "(", "'\"'", ",", "\"'\"", ")", ")", ":", "return", "shlex", ".", "split", "(", "str_in", ")", "else", ":", "components", "=", "str_in", ".", "split", "(", "' '", ",", "1", ")", "if", "(", "len", "(", "components", ")", ">", "1", ")", ":", "return", "(", "[", "components", "[", "0", "]", "]", "+", "SplitIntoComponents", "(", "components", "[", "1", "]", ")", ")", "else", ":", "return", "components" ]
splits strings into space-separated components .
train
true
48,746
def cpu_online_map(): cpus = [] for line in open('/proc/cpuinfo', 'r').readlines(): if line.startswith('processor'): cpus.append(line.split()[2]) return cpus
[ "def", "cpu_online_map", "(", ")", ":", "cpus", "=", "[", "]", "for", "line", "in", "open", "(", "'/proc/cpuinfo'", ",", "'r'", ")", ".", "readlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "'processor'", ")", ":", "cpus", ".", "append", "(", "line", ".", "split", "(", ")", "[", "2", "]", ")", "return", "cpus" ]
check out the available cpu online map .
train
false
48,747
def get_connection_info(user=True): (section, keypath) = reg_info(user) url = None try: hive = _winreg.ConnectRegistry(None, section) key = _winreg.OpenKey(hive, (keypath + '\\api')) for i in range(0, _winreg.QueryInfoKey(key)[1]): (name, value, val_type) = _winreg.EnumValue(key, i) if (name == 'url'): url = value _winreg.CloseKey(key) except WindowsError: pass finally: _winreg.CloseKey(hive) if (user and (not url)): url = get_connection_info(user=False) return url
[ "def", "get_connection_info", "(", "user", "=", "True", ")", ":", "(", "section", ",", "keypath", ")", "=", "reg_info", "(", "user", ")", "url", "=", "None", "try", ":", "hive", "=", "_winreg", ".", "ConnectRegistry", "(", "None", ",", "section", ")", "key", "=", "_winreg", ".", "OpenKey", "(", "hive", ",", "(", "keypath", "+", "'\\\\api'", ")", ")", "for", "i", "in", "range", "(", "0", ",", "_winreg", ".", "QueryInfoKey", "(", "key", ")", "[", "1", "]", ")", ":", "(", "name", ",", "value", ",", "val_type", ")", "=", "_winreg", ".", "EnumValue", "(", "key", ",", "i", ")", "if", "(", "name", "==", "'url'", ")", ":", "url", "=", "value", "_winreg", ".", "CloseKey", "(", "key", ")", "except", "WindowsError", ":", "pass", "finally", ":", "_winreg", ".", "CloseKey", "(", "hive", ")", "if", "(", "user", "and", "(", "not", "url", ")", ")", ":", "url", "=", "get_connection_info", "(", "user", "=", "False", ")", "return", "url" ]
return url of the api running sabnzbd instance user == true will first try users registry .
train
false
48,748
@register.simple_tag() def crispy_addon(field, append='', prepend='', form_show_labels=True): if field: context = Context({'field': field, 'form_show_errors': True, 'form_show_labels': form_show_labels}) template = loader.get_template(('%s/layout/prepended_appended_text.html' % get_template_pack())) context['crispy_prepended_text'] = prepend context['crispy_appended_text'] = append if ((not prepend) and (not append)): raise TypeError('Expected a prepend and/or append argument') context = context.flatten() return template.render(context)
[ "@", "register", ".", "simple_tag", "(", ")", "def", "crispy_addon", "(", "field", ",", "append", "=", "''", ",", "prepend", "=", "''", ",", "form_show_labels", "=", "True", ")", ":", "if", "field", ":", "context", "=", "Context", "(", "{", "'field'", ":", "field", ",", "'form_show_errors'", ":", "True", ",", "'form_show_labels'", ":", "form_show_labels", "}", ")", "template", "=", "loader", ".", "get_template", "(", "(", "'%s/layout/prepended_appended_text.html'", "%", "get_template_pack", "(", ")", ")", ")", "context", "[", "'crispy_prepended_text'", "]", "=", "prepend", "context", "[", "'crispy_appended_text'", "]", "=", "append", "if", "(", "(", "not", "prepend", ")", "and", "(", "not", "append", ")", ")", ":", "raise", "TypeError", "(", "'Expected a prepend and/or append argument'", ")", "context", "=", "context", ".", "flatten", "(", ")", "return", "template", ".", "render", "(", "context", ")" ]
renders a form field using bootstraps prepended or appended text:: {% crispy_addon form .
train
true
48,749
def s3_phone_represent(value): if (not value): return current.messages['NONE'] return ('%s%s' % (unichr(8206), s3_unicode(value)))
[ "def", "s3_phone_represent", "(", "value", ")", ":", "if", "(", "not", "value", ")", ":", "return", "current", ".", "messages", "[", "'NONE'", "]", "return", "(", "'%s%s'", "%", "(", "unichr", "(", "8206", ")", ",", "s3_unicode", "(", "value", ")", ")", ")" ]
ensure that phone numbers always show as ltr - otherwise + appears at the end which looks wrong even in rtl .
train
false
48,750
def generate_rsa_key_pair(key_size=2048): private_key = rsa.generate_private_key(public_exponent=65537, key_size=key_size, backend=default_backend()) private_key_str = private_key.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()) public_key_str = private_key.public_key().public_bytes(Encoding.PEM, PublicFormat.SubjectPublicKeyInfo) print public_key_str print private_key_str
[ "def", "generate_rsa_key_pair", "(", "key_size", "=", "2048", ")", ":", "private_key", "=", "rsa", ".", "generate_private_key", "(", "public_exponent", "=", "65537", ",", "key_size", "=", "key_size", ",", "backend", "=", "default_backend", "(", ")", ")", "private_key_str", "=", "private_key", ".", "private_bytes", "(", "Encoding", ".", "PEM", ",", "PrivateFormat", ".", "PKCS8", ",", "NoEncryption", "(", ")", ")", "public_key_str", "=", "private_key", ".", "public_key", "(", ")", ".", "public_bytes", "(", "Encoding", ".", "PEM", ",", "PublicFormat", ".", "SubjectPublicKeyInfo", ")", "print", "public_key_str", "print", "private_key_str" ]
generates a public and private rsa pem encoded key pair .
train
false
48,754
def _net_write(sock, data, expiration): current = 0 l = len(data) while (current < l): _wait_for_writable(sock, expiration) current += sock.send(data[current:])
[ "def", "_net_write", "(", "sock", ",", "data", ",", "expiration", ")", ":", "current", "=", "0", "l", "=", "len", "(", "data", ")", "while", "(", "current", "<", "l", ")", ":", "_wait_for_writable", "(", "sock", ",", "expiration", ")", "current", "+=", "sock", ".", "send", "(", "data", "[", "current", ":", "]", ")" ]
write the specified data to the socket .
train
true
48,756
def _set_asn1_time(boundary, when): if (not isinstance(when, bytes)): raise TypeError('when must be a byte string') set_result = _lib.ASN1_GENERALIZEDTIME_set_string(_ffi.cast('ASN1_GENERALIZEDTIME*', boundary), when) if (set_result == 0): dummy = _ffi.gc(_lib.ASN1_STRING_new(), _lib.ASN1_STRING_free) _lib.ASN1_STRING_set(dummy, when, len(when)) check_result = _lib.ASN1_GENERALIZEDTIME_check(_ffi.cast('ASN1_GENERALIZEDTIME*', dummy)) if (not check_result): raise ValueError('Invalid string') else: _untested_error()
[ "def", "_set_asn1_time", "(", "boundary", ",", "when", ")", ":", "if", "(", "not", "isinstance", "(", "when", ",", "bytes", ")", ")", ":", "raise", "TypeError", "(", "'when must be a byte string'", ")", "set_result", "=", "_lib", ".", "ASN1_GENERALIZEDTIME_set_string", "(", "_ffi", ".", "cast", "(", "'ASN1_GENERALIZEDTIME*'", ",", "boundary", ")", ",", "when", ")", "if", "(", "set_result", "==", "0", ")", ":", "dummy", "=", "_ffi", ".", "gc", "(", "_lib", ".", "ASN1_STRING_new", "(", ")", ",", "_lib", ".", "ASN1_STRING_free", ")", "_lib", ".", "ASN1_STRING_set", "(", "dummy", ",", "when", ",", "len", "(", "when", ")", ")", "check_result", "=", "_lib", ".", "ASN1_GENERALIZEDTIME_check", "(", "_ffi", ".", "cast", "(", "'ASN1_GENERALIZEDTIME*'", ",", "dummy", ")", ")", "if", "(", "not", "check_result", ")", ":", "raise", "ValueError", "(", "'Invalid string'", ")", "else", ":", "_untested_error", "(", ")" ]
the the time value of an asn1 time object .
train
false
48,757
def _match_emr_step_stderr_path(path, step_id=None): return _match_emr_step_log_path(path, 'stderr', step_id=step_id)
[ "def", "_match_emr_step_stderr_path", "(", "path", ",", "step_id", "=", "None", ")", ":", "return", "_match_emr_step_log_path", "(", "path", ",", "'stderr'", ",", "step_id", "=", "step_id", ")" ]
match path of a step stderr log .
train
false
48,758
def query_issues(page, after): return read_url(get_issues_url(page, after))
[ "def", "query_issues", "(", "page", ",", "after", ")", ":", "return", "read_url", "(", "get_issues_url", "(", "page", ",", "after", ")", ")" ]
hits the github api for a single page of closed issues and returns the data .
train
false
48,759
def convert_pep0(): check_paths() pep0_path = os.path.join(settings.PEP_REPO_PATH, 'pep-0000.html') pep0_content = open(pep0_path).read() soup = BeautifulSoup(pep0_content) body_children = list(soup.body.children) header = body_children[3] pep_content = body_children[7] body_links = pep_content.find_all('a') pep_href_re = re.compile('pep-(\\d+)\\.html') for b in body_links: m = pep_href_re.search(b.attrs['href']) if (not m): continue b.attrs['href'] = '/dev/peps/pep-{}/'.format(m.group(1)) header_rows = header.find_all('th') for t in header_rows: if (('Version:' in t.text) and ('N/A' in t.next_sibling.text)): t.parent.extract() return ''.join([header.prettify(), pep_content.prettify()])
[ "def", "convert_pep0", "(", ")", ":", "check_paths", "(", ")", "pep0_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "PEP_REPO_PATH", ",", "'pep-0000.html'", ")", "pep0_content", "=", "open", "(", "pep0_path", ")", ".", "read", "(", ")", "soup", "=", "BeautifulSoup", "(", "pep0_content", ")", "body_children", "=", "list", "(", "soup", ".", "body", ".", "children", ")", "header", "=", "body_children", "[", "3", "]", "pep_content", "=", "body_children", "[", "7", "]", "body_links", "=", "pep_content", ".", "find_all", "(", "'a'", ")", "pep_href_re", "=", "re", ".", "compile", "(", "'pep-(\\\\d+)\\\\.html'", ")", "for", "b", "in", "body_links", ":", "m", "=", "pep_href_re", ".", "search", "(", "b", ".", "attrs", "[", "'href'", "]", ")", "if", "(", "not", "m", ")", ":", "continue", "b", ".", "attrs", "[", "'href'", "]", "=", "'/dev/peps/pep-{}/'", ".", "format", "(", "m", ".", "group", "(", "1", ")", ")", "header_rows", "=", "header", ".", "find_all", "(", "'th'", ")", "for", "t", "in", "header_rows", ":", "if", "(", "(", "'Version:'", "in", "t", ".", "text", ")", "and", "(", "'N/A'", "in", "t", ".", "next_sibling", ".", "text", ")", ")", ":", "t", ".", "parent", ".", "extract", "(", ")", "return", "''", ".", "join", "(", "[", "header", ".", "prettify", "(", ")", ",", "pep_content", ".", "prettify", "(", ")", "]", ")" ]
take existing generated pep-0000 .
train
false
48,760
def get_calls(mock_observer_func): return [call_tuple[0][0].name for call_tuple in mock_observer_func.calls]
[ "def", "get_calls", "(", "mock_observer_func", ")", ":", "return", "[", "call_tuple", "[", "0", "]", "[", "0", "]", ".", "name", "for", "call_tuple", "in", "mock_observer_func", ".", "calls", "]" ]
given a mock ipluginobserver method .
train
false
48,763
def createPythonExtensionBuilder(env): try: pyext = env['BUILDERS']['PythonExtension'] except KeyError: import SCons.Action import SCons.Defaults action = SCons.Action.Action('$PYEXTLINKCOM', '$PYEXTLINKCOMSTR') action_list = [SCons.Defaults.SharedCheck, action] pyext = SCons.Builder.Builder(action=action_list, emitter='$SHLIBEMITTER', prefix='$PYEXTPREFIX', suffix='$PYEXTSUFFIX', target_scanner=ProgramScanner, src_suffix='$PYEXTOBJSUFFIX', src_builder='PythonObject') env['BUILDERS']['PythonExtension'] = pyext return pyext
[ "def", "createPythonExtensionBuilder", "(", "env", ")", ":", "try", ":", "pyext", "=", "env", "[", "'BUILDERS'", "]", "[", "'PythonExtension'", "]", "except", "KeyError", ":", "import", "SCons", ".", "Action", "import", "SCons", ".", "Defaults", "action", "=", "SCons", ".", "Action", ".", "Action", "(", "'$PYEXTLINKCOM'", ",", "'$PYEXTLINKCOMSTR'", ")", "action_list", "=", "[", "SCons", ".", "Defaults", ".", "SharedCheck", ",", "action", "]", "pyext", "=", "SCons", ".", "Builder", ".", "Builder", "(", "action", "=", "action_list", ",", "emitter", "=", "'$SHLIBEMITTER'", ",", "prefix", "=", "'$PYEXTPREFIX'", ",", "suffix", "=", "'$PYEXTSUFFIX'", ",", "target_scanner", "=", "ProgramScanner", ",", "src_suffix", "=", "'$PYEXTOBJSUFFIX'", ",", "src_builder", "=", "'PythonObject'", ")", "env", "[", "'BUILDERS'", "]", "[", "'PythonExtension'", "]", "=", "pyext", "return", "pyext" ]
this is a utility function that creates the pythonextension builder in an environment if it is not there already .
train
false
48,764
def encrypt_password(password): if (_security.password_hash == 'plaintext'): return password signed = get_hmac(password).decode('ascii') return _pwd_context.encrypt(signed)
[ "def", "encrypt_password", "(", "password", ")", ":", "if", "(", "_security", ".", "password_hash", "==", "'plaintext'", ")", ":", "return", "password", "signed", "=", "get_hmac", "(", "password", ")", ".", "decode", "(", "'ascii'", ")", "return", "_pwd_context", ".", "encrypt", "(", "signed", ")" ]
encrypts the specified plaintext password using the configured encryption options .
train
false
48,765
def strip_email_quotes(text): lines = text.splitlines() matches = set() for line in lines: prefix = re.match('^(\\s*>[ >]*)', line) if prefix: matches.add(prefix.group(1)) else: break else: prefix = long_substr(list(matches)) if prefix: strip = len(prefix) text = '\n'.join([ln[strip:] for ln in lines]) return text
[ "def", "strip_email_quotes", "(", "text", ")", ":", "lines", "=", "text", ".", "splitlines", "(", ")", "matches", "=", "set", "(", ")", "for", "line", "in", "lines", ":", "prefix", "=", "re", ".", "match", "(", "'^(\\\\s*>[ >]*)'", ",", "line", ")", "if", "prefix", ":", "matches", ".", "add", "(", "prefix", ".", "group", "(", "1", ")", ")", "else", ":", "break", "else", ":", "prefix", "=", "long_substr", "(", "list", "(", "matches", ")", ")", "if", "prefix", ":", "strip", "=", "len", "(", "prefix", ")", "text", "=", "'\\n'", ".", "join", "(", "[", "ln", "[", "strip", ":", "]", "for", "ln", "in", "lines", "]", ")", "return", "text" ]
strip leading email quotation characters (>) .
train
true
48,766
def policy_exists(vhost, name, runas=None): if ((runas is None) and (not salt.utils.is_windows())): runas = salt.utils.get_user() policies = list_policies(runas=runas) return bool(((vhost in policies) and (name in policies[vhost])))
[ "def", "policy_exists", "(", "vhost", ",", "name", ",", "runas", "=", "None", ")", ":", "if", "(", "(", "runas", "is", "None", ")", "and", "(", "not", "salt", ".", "utils", ".", "is_windows", "(", ")", ")", ")", ":", "runas", "=", "salt", ".", "utils", ".", "get_user", "(", ")", "policies", "=", "list_policies", "(", "runas", "=", "runas", ")", "return", "bool", "(", "(", "(", "vhost", "in", "policies", ")", "and", "(", "name", "in", "policies", "[", "vhost", "]", ")", ")", ")" ]
check to see if policy exists .
train
true
48,767
def p_expression_group_error(t): (line_start, line_end) = t.linespan(2) (pos_start, pos_end) = t.lexspan(2) print ('Syntax error at %d:%d to %d:%d' % (line_start, pos_start, line_end, pos_end)) t[0] = 0
[ "def", "p_expression_group_error", "(", "t", ")", ":", "(", "line_start", ",", "line_end", ")", "=", "t", ".", "linespan", "(", "2", ")", "(", "pos_start", ",", "pos_end", ")", "=", "t", ".", "lexspan", "(", "2", ")", "print", "(", "'Syntax error at %d:%d to %d:%d'", "%", "(", "line_start", ",", "pos_start", ",", "line_end", ",", "pos_end", ")", ")", "t", "[", "0", "]", "=", "0" ]
expression : lparen error rparen .
train
false
48,772
def pre_track_containers_create_policy_stat(self, conn): conn.executescript('\n CREATE TABLE policy_stat (\n storage_policy_index INTEGER PRIMARY KEY,\n object_count INTEGER DEFAULT 0,\n bytes_used INTEGER DEFAULT 0\n );\n INSERT OR IGNORE INTO policy_stat (\n storage_policy_index, object_count, bytes_used\n )\n SELECT 0, object_count, bytes_used\n FROM account_stat\n WHERE container_count > 0;\n ')
[ "def", "pre_track_containers_create_policy_stat", "(", "self", ",", "conn", ")", ":", "conn", ".", "executescript", "(", "'\\n CREATE TABLE policy_stat (\\n storage_policy_index INTEGER PRIMARY KEY,\\n object_count INTEGER DEFAULT 0,\\n bytes_used INTEGER DEFAULT 0\\n );\\n INSERT OR IGNORE INTO policy_stat (\\n storage_policy_index, object_count, bytes_used\\n )\\n SELECT 0, object_count, bytes_used\\n FROM account_stat\\n WHERE container_count > 0;\\n '", ")" ]
copied from accountbroker before the container_count column was added .
train
false
48,773
def parse_failed_targets(test_registry, junit_xml_dir, error_handler): failed_targets = defaultdict(set) for name in os.listdir(junit_xml_dir): if _JUNIT_XML_MATCHER.match(name): try: path = os.path.join(junit_xml_dir, name) xml = XmlParser.from_file(path) failures = int(xml.get_attribute(u'testsuite', u'failures')) errors = int(xml.get_attribute(u'testsuite', u'errors')) if (failures or errors): for testcase in xml.parsed.getElementsByTagName(u'testcase'): test_failed = testcase.getElementsByTagName(u'failure') test_errored = testcase.getElementsByTagName(u'error') if (test_failed or test_errored): test = Test(classname=testcase.getAttribute(u'classname'), methodname=testcase.getAttribute(u'name')) target = test_registry.get_owning_target(test) failed_targets[target].add(test) except (XmlParser.XmlError, ValueError) as e: error_handler(ParseError(path, e)) return dict(failed_targets)
[ "def", "parse_failed_targets", "(", "test_registry", ",", "junit_xml_dir", ",", "error_handler", ")", ":", "failed_targets", "=", "defaultdict", "(", "set", ")", "for", "name", "in", "os", ".", "listdir", "(", "junit_xml_dir", ")", ":", "if", "_JUNIT_XML_MATCHER", ".", "match", "(", "name", ")", ":", "try", ":", "path", "=", "os", ".", "path", ".", "join", "(", "junit_xml_dir", ",", "name", ")", "xml", "=", "XmlParser", ".", "from_file", "(", "path", ")", "failures", "=", "int", "(", "xml", ".", "get_attribute", "(", "u'testsuite'", ",", "u'failures'", ")", ")", "errors", "=", "int", "(", "xml", ".", "get_attribute", "(", "u'testsuite'", ",", "u'errors'", ")", ")", "if", "(", "failures", "or", "errors", ")", ":", "for", "testcase", "in", "xml", ".", "parsed", ".", "getElementsByTagName", "(", "u'testcase'", ")", ":", "test_failed", "=", "testcase", ".", "getElementsByTagName", "(", "u'failure'", ")", "test_errored", "=", "testcase", ".", "getElementsByTagName", "(", "u'error'", ")", "if", "(", "test_failed", "or", "test_errored", ")", ":", "test", "=", "Test", "(", "classname", "=", "testcase", ".", "getAttribute", "(", "u'classname'", ")", ",", "methodname", "=", "testcase", ".", "getAttribute", "(", "u'name'", ")", ")", "target", "=", "test_registry", ".", "get_owning_target", "(", "test", ")", "failed_targets", "[", "target", "]", ".", "add", "(", "test", ")", "except", "(", "XmlParser", ".", "XmlError", ",", "ValueError", ")", "as", "e", ":", "error_handler", "(", "ParseError", "(", "path", ",", "e", ")", ")", "return", "dict", "(", "failed_targets", ")" ]
parses junit xml reports and maps targets to the set of individual tests that failed .
train
false
48,774
def extract_bigram_feats(document, bigrams): features = {} for bigr in bigrams: features['contains({0} - {1})'.format(bigr[0], bigr[1])] = (bigr in nltk.bigrams(document)) return features
[ "def", "extract_bigram_feats", "(", "document", ",", "bigrams", ")", ":", "features", "=", "{", "}", "for", "bigr", "in", "bigrams", ":", "features", "[", "'contains({0} - {1})'", ".", "format", "(", "bigr", "[", "0", "]", ",", "bigr", "[", "1", "]", ")", "]", "=", "(", "bigr", "in", "nltk", ".", "bigrams", "(", "document", ")", ")", "return", "features" ]
populate a dictionary of bigram features .
train
false
48,775
def get_users_emails(send_type): sql = "\n SELECT json_build_object(\n 'user_id', osf_guid._id,\n 'info', json_agg(\n json_build_object(\n 'message', nd.message,\n 'node_lineage', nd.node_lineage,\n '_id', nd._id\n )\n )\n )\n FROM osf_notificationdigest AS nd\n LEFT JOIN osf_guid ON nd.user_id = osf_guid.object_id\n WHERE send_type = %s\n AND osf_guid.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser')\n GROUP BY osf_guid.id\n ORDER BY osf_guid.id ASC\n " with connection.cursor() as cursor: cursor.execute(sql, [send_type]) rows = list(sum(cursor.fetchall(), ())) if (len(rows) > 0): return rows else: return []
[ "def", "get_users_emails", "(", "send_type", ")", ":", "sql", "=", "\"\\n SELECT json_build_object(\\n 'user_id', osf_guid._id,\\n 'info', json_agg(\\n json_build_object(\\n 'message', nd.message,\\n 'node_lineage', nd.node_lineage,\\n '_id', nd._id\\n )\\n )\\n )\\n FROM osf_notificationdigest AS nd\\n LEFT JOIN osf_guid ON nd.user_id = osf_guid.object_id\\n WHERE send_type = %s\\n AND osf_guid.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser')\\n GROUP BY osf_guid.id\\n ORDER BY osf_guid.id ASC\\n \"", "with", "connection", ".", "cursor", "(", ")", "as", "cursor", ":", "cursor", ".", "execute", "(", "sql", ",", "[", "send_type", "]", ")", "rows", "=", "list", "(", "sum", "(", "cursor", ".", "fetchall", "(", ")", ",", "(", ")", ")", ")", "if", "(", "len", "(", "rows", ")", ">", "0", ")", ":", "return", "rows", "else", ":", "return", "[", "]" ]
get all emails that need to be sent .
train
false
48,776
def get_conf_path(filename=None): if sys.platform.startswith('linux'): xdg_config_home = os.environ.get('XDG_CONFIG_HOME', '') if (not xdg_config_home): xdg_config_home = osp.join(get_home_dir(), '.config') if (not osp.isdir(xdg_config_home)): os.makedirs(xdg_config_home) conf_dir = osp.join(xdg_config_home, SUBFOLDER) else: conf_dir = osp.join(get_home_dir(), SUBFOLDER) if (not osp.isdir(conf_dir)): os.mkdir(conf_dir) if (filename is None): return conf_dir else: return osp.join(conf_dir, filename)
[ "def", "get_conf_path", "(", "filename", "=", "None", ")", ":", "if", "sys", ".", "platform", ".", "startswith", "(", "'linux'", ")", ":", "xdg_config_home", "=", "os", ".", "environ", ".", "get", "(", "'XDG_CONFIG_HOME'", ",", "''", ")", "if", "(", "not", "xdg_config_home", ")", ":", "xdg_config_home", "=", "osp", ".", "join", "(", "get_home_dir", "(", ")", ",", "'.config'", ")", "if", "(", "not", "osp", ".", "isdir", "(", "xdg_config_home", ")", ")", ":", "os", ".", "makedirs", "(", "xdg_config_home", ")", "conf_dir", "=", "osp", ".", "join", "(", "xdg_config_home", ",", "SUBFOLDER", ")", "else", ":", "conf_dir", "=", "osp", ".", "join", "(", "get_home_dir", "(", ")", ",", "SUBFOLDER", ")", "if", "(", "not", "osp", ".", "isdir", "(", "conf_dir", ")", ")", ":", "os", ".", "mkdir", "(", "conf_dir", ")", "if", "(", "filename", "is", "None", ")", ":", "return", "conf_dir", "else", ":", "return", "osp", ".", "join", "(", "conf_dir", ",", "filename", ")" ]
return absolute path for configuration file with specified filename .
train
true
48,777
def Decompression_rar(specific_file): cmd = ((((('unrar x "' + specific_file) + '"') + ' "') + os.path.split(specific_file)[0]) + '"') os.system(cmd)
[ "def", "Decompression_rar", "(", "specific_file", ")", ":", "cmd", "=", "(", "(", "(", "(", "(", "'unrar x \"'", "+", "specific_file", ")", "+", "'\"'", ")", "+", "' \"'", ")", "+", "os", ".", "path", ".", "split", "(", "specific_file", ")", "[", "0", "]", ")", "+", "'\"'", ")", "os", ".", "system", "(", "cmd", ")" ]
decompression_rar if you want use this function .
train
false
48,779
@login_required def message_list(request, mailbox=None, template_name='messages/message_list.html'): message_list = _get_messages(request.user, mailbox) return render_to_response(template_name, {'message_list': message_list, 'mailbox': (mailbox or 'archive')}, context_instance=RequestContext(request))
[ "@", "login_required", "def", "message_list", "(", "request", ",", "mailbox", "=", "None", ",", "template_name", "=", "'messages/message_list.html'", ")", ":", "message_list", "=", "_get_messages", "(", "request", ".", "user", ",", "mailbox", ")", "return", "render_to_response", "(", "template_name", ",", "{", "'message_list'", ":", "message_list", ",", "'mailbox'", ":", "(", "mailbox", "or", "'archive'", ")", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")" ]
returns a list of user messages .
train
false
48,780
def pager(text): global pager pager = getpager() pager(text)
[ "def", "pager", "(", "text", ")", ":", "global", "pager", "pager", "=", "getpager", "(", ")", "pager", "(", "text", ")" ]
paging output .
train
false
48,781
def get_clients(node): return [c for (c, i) in node.outputs[0].clients if (c != 'output')]
[ "def", "get_clients", "(", "node", ")", ":", "return", "[", "c", "for", "(", "c", ",", "i", ")", "in", "node", ".", "outputs", "[", "0", "]", ".", "clients", "if", "(", "c", "!=", "'output'", ")", "]" ]
used by erf/erfc opt to track less frequent op .
train
false
48,782
def split_context(method, args, kwargs): pos = (len(getargspec(method).args) - 1) if (pos < len(args)): return (args[pos], args[:pos], kwargs) else: return (kwargs.pop('context', None), args, kwargs)
[ "def", "split_context", "(", "method", ",", "args", ",", "kwargs", ")", ":", "pos", "=", "(", "len", "(", "getargspec", "(", "method", ")", ".", "args", ")", "-", "1", ")", "if", "(", "pos", "<", "len", "(", "args", ")", ")", ":", "return", "(", "args", "[", "pos", "]", ",", "args", "[", ":", "pos", "]", ",", "kwargs", ")", "else", ":", "return", "(", "kwargs", ".", "pop", "(", "'context'", ",", "None", ")", ",", "args", ",", "kwargs", ")" ]
extract the context from a pair of positional and keyword arguments .
train
false
48,785
def iscsi_discover(portal_ip): cmd = ('iscsiadm -m discovery -t sendtargets -p %s' % portal_ip) output = utils.system_output(cmd, ignore_status=True) session = '' if ('Invalid' in output): logging.debug(output) else: session = output return session
[ "def", "iscsi_discover", "(", "portal_ip", ")", ":", "cmd", "=", "(", "'iscsiadm -m discovery -t sendtargets -p %s'", "%", "portal_ip", ")", "output", "=", "utils", ".", "system_output", "(", "cmd", ",", "ignore_status", "=", "True", ")", "session", "=", "''", "if", "(", "'Invalid'", "in", "output", ")", ":", "logging", ".", "debug", "(", "output", ")", "else", ":", "session", "=", "output", "return", "session" ]
query from iscsi server for available targets .
train
false
48,786
def test_field_assign(): from IronPythonTest.BinderTest import KeywordBase def f(): KeywordBase.SomeField = 42 AssertError(ValueError, f)
[ "def", "test_field_assign", "(", ")", ":", "from", "IronPythonTest", ".", "BinderTest", "import", "KeywordBase", "def", "f", "(", ")", ":", "KeywordBase", ".", "SomeField", "=", "42", "AssertError", "(", "ValueError", ",", "f", ")" ]
assign to an instance field through the type .
train
false
48,788
def building_marker_fn(record): if (record.asbestos == 1): marker = 'hazmat' else: marker = 'residence' priority = record.priority if (priority == 1): marker = ('%s_red' % marker) elif (priority == 2): marker = ('%s_yellow' % marker) elif (priority == 3): marker = ('%s_green' % marker) mtable = db.gis_marker try: marker = db((mtable.name == marker)).select(mtable.image, mtable.height, mtable.width, cache=s3db.cache, limitby=(0, 1)).first() except: marker = db((mtable.name == 'residence')).select(mtable.image, mtable.height, mtable.width, cache=s3db.cache, limitby=(0, 1)).first() return marker
[ "def", "building_marker_fn", "(", "record", ")", ":", "if", "(", "record", ".", "asbestos", "==", "1", ")", ":", "marker", "=", "'hazmat'", "else", ":", "marker", "=", "'residence'", "priority", "=", "record", ".", "priority", "if", "(", "priority", "==", "1", ")", ":", "marker", "=", "(", "'%s_red'", "%", "marker", ")", "elif", "(", "priority", "==", "2", ")", ":", "marker", "=", "(", "'%s_yellow'", "%", "marker", ")", "elif", "(", "priority", "==", "3", ")", ":", "marker", "=", "(", "'%s_green'", "%", "marker", ")", "mtable", "=", "db", ".", "gis_marker", "try", ":", "marker", "=", "db", "(", "(", "mtable", ".", "name", "==", "marker", ")", ")", ".", "select", "(", "mtable", ".", "image", ",", "mtable", ".", "height", ",", "mtable", ".", "width", ",", "cache", "=", "s3db", ".", "cache", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "except", ":", "marker", "=", "db", "(", "(", "mtable", ".", "name", "==", "'residence'", ")", ")", ".", "select", "(", "mtable", ".", "image", ",", "mtable", ".", "height", ",", "mtable", ".", "width", ",", "cache", "=", "s3db", ".", "cache", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "return", "marker" ]
function to decide which marker to use for building assessments map @todo: legend @todo: move to templates .
train
false
48,789
def get_vcs_info(path): for info in SUPPORTED: vcs_path = osp.join(path, info['rootdir']) if osp.isdir(vcs_path): return info
[ "def", "get_vcs_info", "(", "path", ")", ":", "for", "info", "in", "SUPPORTED", ":", "vcs_path", "=", "osp", ".", "join", "(", "path", ",", "info", "[", "'rootdir'", "]", ")", "if", "osp", ".", "isdir", "(", "vcs_path", ")", ":", "return", "info" ]
return support status dict if path is under vcs root .
train
true
48,790
def start_webserver(ip, port, webroot, handler=WebHandler): web_server = _get_inst(ip, port) if ((web_server is None) or web_server.is_down()): web_server = HTTPServer((ip, port), webroot, handler) _servers[(ip, port)] = web_server server_thread = threading.Thread(target=web_server.serve_forever) server_thread.name = 'WebServer' server_thread.daemon = True server_thread.start() return web_server
[ "def", "start_webserver", "(", "ip", ",", "port", ",", "webroot", ",", "handler", "=", "WebHandler", ")", ":", "web_server", "=", "_get_inst", "(", "ip", ",", "port", ")", "if", "(", "(", "web_server", "is", "None", ")", "or", "web_server", ".", "is_down", "(", ")", ")", ":", "web_server", "=", "HTTPServer", "(", "(", "ip", ",", "port", ")", ",", "webroot", ",", "handler", ")", "_servers", "[", "(", "ip", ",", "port", ")", "]", "=", "web_server", "server_thread", "=", "threading", ".", "Thread", "(", "target", "=", "web_server", ".", "serve_forever", ")", "server_thread", ".", "name", "=", "'WebServer'", "server_thread", ".", "daemon", "=", "True", "server_thread", ".", "start", "(", ")", "return", "web_server" ]
create a http server daemon .
train
false
48,791
def test_ordered_dict_value_validator(): schema = vol.Schema(cv.ordered_dict(cv.string)) with pytest.raises(vol.Invalid): schema({'hello': None}) schema({'hello': 'world'}) schema = vol.Schema(cv.ordered_dict(int)) with pytest.raises(vol.Invalid): schema({'hello': 'world'}) schema({'hello': 5})
[ "def", "test_ordered_dict_value_validator", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "ordered_dict", "(", "cv", ".", "string", ")", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "schema", "(", "{", "'hello'", ":", "None", "}", ")", "schema", "(", "{", "'hello'", ":", "'world'", "}", ")", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "ordered_dict", "(", "int", ")", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "schema", "(", "{", "'hello'", ":", "'world'", "}", ")", "schema", "(", "{", "'hello'", ":", "5", "}", ")" ]
test ordered_dict validator .
train
false
48,792
def extract_special_queries(queries): specials = {} dc = queries.copy() for i in queries: if (i.startswith('__') and (i in FILTERS_LIST)): specials[i] = queries[i] del dc[i] return (dc, specials)
[ "def", "extract_special_queries", "(", "queries", ")", ":", "specials", "=", "{", "}", "dc", "=", "queries", ".", "copy", "(", ")", "for", "i", "in", "queries", ":", "if", "(", "i", ".", "startswith", "(", "'__'", ")", "and", "(", "i", "in", "FILTERS_LIST", ")", ")", ":", "specials", "[", "i", "]", "=", "queries", "[", "i", "]", "del", "dc", "[", "i", "]", "return", "(", "dc", ",", "specials", ")" ]
separate special queries from normal queries .
train
false
48,793
def _float_feature(value): if (not isinstance(value, list)): value = [value] return tf.train.Feature(float_list=tf.train.FloatList(value=value))
[ "def", "_float_feature", "(", "value", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "list", ")", ")", ":", "value", "=", "[", "value", "]", "return", "tf", ".", "train", ".", "Feature", "(", "float_list", "=", "tf", ".", "train", ".", "FloatList", "(", "value", "=", "value", ")", ")" ]
wrapper for inserting float features into example proto .
train
true
48,796
@verbose def find_eog_events(raw, event_id=998, l_freq=1, h_freq=10, filter_length='10s', ch_name=None, tstart=0, verbose=None): eog_inds = _get_eog_channel_index(ch_name, raw) logger.info(('EOG channel index for this subject is: %s' % eog_inds)) (eog, _) = raw[eog_inds, :] eog_events = _find_eog_events(eog, event_id=event_id, l_freq=l_freq, h_freq=h_freq, sampling_rate=raw.info['sfreq'], first_samp=raw.first_samp, filter_length=filter_length, tstart=tstart) return eog_events
[ "@", "verbose", "def", "find_eog_events", "(", "raw", ",", "event_id", "=", "998", ",", "l_freq", "=", "1", ",", "h_freq", "=", "10", ",", "filter_length", "=", "'10s'", ",", "ch_name", "=", "None", ",", "tstart", "=", "0", ",", "verbose", "=", "None", ")", ":", "eog_inds", "=", "_get_eog_channel_index", "(", "ch_name", ",", "raw", ")", "logger", ".", "info", "(", "(", "'EOG channel index for this subject is: %s'", "%", "eog_inds", ")", ")", "(", "eog", ",", "_", ")", "=", "raw", "[", "eog_inds", ",", ":", "]", "eog_events", "=", "_find_eog_events", "(", "eog", ",", "event_id", "=", "event_id", ",", "l_freq", "=", "l_freq", ",", "h_freq", "=", "h_freq", ",", "sampling_rate", "=", "raw", ".", "info", "[", "'sfreq'", "]", ",", "first_samp", "=", "raw", ".", "first_samp", ",", "filter_length", "=", "filter_length", ",", "tstart", "=", "tstart", ")", "return", "eog_events" ]
locate eog artifacts .
train
false
48,797
def get_rendition_or_not_found(image, specs): try: return image.get_rendition(specs) except SourceImageIOError: Rendition = image.renditions.model rendition = Rendition(image=image, width=0, height=0) rendition.file.name = u'not-found' return rendition
[ "def", "get_rendition_or_not_found", "(", "image", ",", "specs", ")", ":", "try", ":", "return", "image", ".", "get_rendition", "(", "specs", ")", "except", "SourceImageIOError", ":", "Rendition", "=", "image", ".", "renditions", ".", "model", "rendition", "=", "Rendition", "(", "image", "=", "image", ",", "width", "=", "0", ",", "height", "=", "0", ")", "rendition", ".", "file", ".", "name", "=", "u'not-found'", "return", "rendition" ]
tries to get / create the rendition for the image or renders a not-found image if it does not exist .
train
false
48,798
def build_headers(api_token): headers = {'X-Circonus-App-Name': 'ansible', 'Host': 'api.circonus.com', 'X-Circonus-Auth-Token': api_token, 'Accept': 'application/json'} return headers
[ "def", "build_headers", "(", "api_token", ")", ":", "headers", "=", "{", "'X-Circonus-App-Name'", ":", "'ansible'", ",", "'Host'", ":", "'api.circonus.com'", ",", "'X-Circonus-Auth-Token'", ":", "api_token", ",", "'Accept'", ":", "'application/json'", "}", "return", "headers" ]
takes api token .
train
false
48,799
@gof.local_optimizer([sparse.Remove0]) def local_inplace_remove0(node): if (isinstance(node.op, sparse.Remove0) and (not node.op.inplace)): new_op = node.op.__class__(inplace=True) new_node = new_op(*node.inputs) return [new_node] return False
[ "@", "gof", ".", "local_optimizer", "(", "[", "sparse", ".", "Remove0", "]", ")", "def", "local_inplace_remove0", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "sparse", ".", "Remove0", ")", "and", "(", "not", "node", ".", "op", ".", "inplace", ")", ")", ":", "new_op", "=", "node", ".", "op", ".", "__class__", "(", "inplace", "=", "True", ")", "new_node", "=", "new_op", "(", "*", "node", ".", "inputs", ")", "return", "[", "new_node", "]", "return", "False" ]
optimization to insert inplace versions of remove0 .
train
false
48,801
def _check_rc(rc, errno=None): if (rc == (-1)): if (errno is None): from zmq.backend import zmq_errno errno = zmq_errno() from zmq import EAGAIN, ETERM if (errno == EINTR): raise InterruptedSystemCall(errno) elif (errno == EAGAIN): raise Again(errno) elif (errno == ETERM): raise ContextTerminated(errno) else: raise ZMQError(errno)
[ "def", "_check_rc", "(", "rc", ",", "errno", "=", "None", ")", ":", "if", "(", "rc", "==", "(", "-", "1", ")", ")", ":", "if", "(", "errno", "is", "None", ")", ":", "from", "zmq", ".", "backend", "import", "zmq_errno", "errno", "=", "zmq_errno", "(", ")", "from", "zmq", "import", "EAGAIN", ",", "ETERM", "if", "(", "errno", "==", "EINTR", ")", ":", "raise", "InterruptedSystemCall", "(", "errno", ")", "elif", "(", "errno", "==", "EAGAIN", ")", ":", "raise", "Again", "(", "errno", ")", "elif", "(", "errno", "==", "ETERM", ")", ":", "raise", "ContextTerminated", "(", "errno", ")", "else", ":", "raise", "ZMQError", "(", "errno", ")" ]
internal utility for checking zmq return condition and raising the appropriate exception class .
train
false
48,802
def remove_headers(*headers): def _decorator(func): '\n Decorates the given function.\n ' @wraps(func) def _inner(*args, **kwargs): '\n Alters the response.\n ' response = func(*args, **kwargs) remove_headers_from_response(response, *headers) return response return _inner return _decorator
[ "def", "remove_headers", "(", "*", "headers", ")", ":", "def", "_decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "remove_headers_from_response", "(", "response", ",", "*", "headers", ")", "return", "response", "return", "_inner", "return", "_decorator" ]
decorator that removes specific headers from the response .
train
false
48,803
def dmp_permute(f, P, u, K): (F, H) = (dmp_to_dict(f, u), {}) for (exp, coeff) in F.items(): new_exp = ([0] * len(exp)) for (e, p) in zip(exp, P): new_exp[p] = e H[tuple(new_exp)] = coeff return dmp_from_dict(H, u, K)
[ "def", "dmp_permute", "(", "f", ",", "P", ",", "u", ",", "K", ")", ":", "(", "F", ",", "H", ")", "=", "(", "dmp_to_dict", "(", "f", ",", "u", ")", ",", "{", "}", ")", "for", "(", "exp", ",", "coeff", ")", "in", "F", ".", "items", "(", ")", ":", "new_exp", "=", "(", "[", "0", "]", "*", "len", "(", "exp", ")", ")", "for", "(", "e", ",", "p", ")", "in", "zip", "(", "exp", ",", "P", ")", ":", "new_exp", "[", "p", "]", "=", "e", "H", "[", "tuple", "(", "new_exp", ")", "]", "=", "coeff", "return", "dmp_from_dict", "(", "H", ",", "u", ",", "K", ")" ]
return a polynomial in k[x_{p(1)} .
train
false
48,804
def _get_service_command_generator(run=utils.run): global _service_command_generator try: return _service_command_generator except NameError: command_generator = _command_generators[get_name_of_init(run)] _service_command_generator = _ServiceCommandGenerator(command_generator) return _service_command_generator
[ "def", "_get_service_command_generator", "(", "run", "=", "utils", ".", "run", ")", ":", "global", "_service_command_generator", "try", ":", "return", "_service_command_generator", "except", "NameError", ":", "command_generator", "=", "_command_generators", "[", "get_name_of_init", "(", "run", ")", "]", "_service_command_generator", "=", "_ServiceCommandGenerator", "(", "command_generator", ")", "return", "_service_command_generator" ]
lazy initializer for servicecommandgenerator using the auto-detect init command .
train
false
48,805
def json_splitter(buffer): try: (obj, index) = json_decoder.raw_decode(buffer) rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():] return (obj, rest) except ValueError: return None
[ "def", "json_splitter", "(", "buffer", ")", ":", "try", ":", "(", "obj", ",", "index", ")", "=", "json_decoder", ".", "raw_decode", "(", "buffer", ")", "rest", "=", "buffer", "[", "json", ".", "decoder", ".", "WHITESPACE", ".", "match", "(", "buffer", ",", "index", ")", ".", "end", "(", ")", ":", "]", "return", "(", "obj", ",", "rest", ")", "except", "ValueError", ":", "return", "None" ]
attempt to parse a json object from a buffer .
train
true
48,806
def filenameToModuleName(fn): if isinstance(fn, bytes): initPy = '__init__.py' else: initPy = '__init__.py' fullName = os.path.abspath(fn) base = os.path.basename(fn) if (not base): base = os.path.basename(fn[:(-1)]) modName = nativeString(os.path.splitext(base)[0]) while 1: fullName = os.path.dirname(fullName) if os.path.exists(os.path.join(fullName, initPy)): modName = ('%s.%s' % (nativeString(os.path.basename(fullName)), nativeString(modName))) else: break return modName
[ "def", "filenameToModuleName", "(", "fn", ")", ":", "if", "isinstance", "(", "fn", ",", "bytes", ")", ":", "initPy", "=", "'__init__.py'", "else", ":", "initPy", "=", "'__init__.py'", "fullName", "=", "os", ".", "path", ".", "abspath", "(", "fn", ")", "base", "=", "os", ".", "path", ".", "basename", "(", "fn", ")", "if", "(", "not", "base", ")", ":", "base", "=", "os", ".", "path", ".", "basename", "(", "fn", "[", ":", "(", "-", "1", ")", "]", ")", "modName", "=", "nativeString", "(", "os", ".", "path", ".", "splitext", "(", "base", ")", "[", "0", "]", ")", "while", "1", ":", "fullName", "=", "os", ".", "path", ".", "dirname", "(", "fullName", ")", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "fullName", ",", "initPy", ")", ")", ":", "modName", "=", "(", "'%s.%s'", "%", "(", "nativeString", "(", "os", ".", "path", ".", "basename", "(", "fullName", ")", ")", ",", "nativeString", "(", "modName", ")", ")", ")", "else", ":", "break", "return", "modName" ]
convert a name in the filesystem to the name of the python module it is .
train
false
48,807
def _package_conf_file_to_dir(file_name): if (file_name in SUPPORTED_CONFS): path = BASE_PATH.format(file_name) if os.path.exists(path): if os.path.isdir(path): return False else: os.rename(path, (path + '.tmpbak')) os.mkdir(path, 493) with salt.utils.fopen((path + '.tmpbak')) as fh_: for line in fh_: line = line.strip() if (line and (not line.startswith('#'))): append_to_package_conf(file_name, string=line) os.remove((path + '.tmpbak')) return True else: os.mkdir(path, 493) return True
[ "def", "_package_conf_file_to_dir", "(", "file_name", ")", ":", "if", "(", "file_name", "in", "SUPPORTED_CONFS", ")", ":", "path", "=", "BASE_PATH", ".", "format", "(", "file_name", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "False", "else", ":", "os", ".", "rename", "(", "path", ",", "(", "path", "+", "'.tmpbak'", ")", ")", "os", ".", "mkdir", "(", "path", ",", "493", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "(", "path", "+", "'.tmpbak'", ")", ")", "as", "fh_", ":", "for", "line", "in", "fh_", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "line", "and", "(", "not", "line", ".", "startswith", "(", "'#'", ")", ")", ")", ":", "append_to_package_conf", "(", "file_name", ",", "string", "=", "line", ")", "os", ".", "remove", "(", "(", "path", "+", "'.tmpbak'", ")", ")", "return", "True", "else", ":", "os", ".", "mkdir", "(", "path", ",", "493", ")", "return", "True" ]
convert a config file to a config directory .
train
false
48,809
def trigger_build(project, version=None, record=True, force=False, basic=False): from readthedocs.projects.tasks import update_docs from readthedocs.builds.models import Build if project.skip: return None if (not version): version = project.versions.get(slug=LATEST) kwargs = dict(pk=project.pk, version_pk=version.pk, record=record, force=force, basic=basic) build = None if record: build = Build.objects.create(project=project, version=version, type='html', state='triggered', success=True) kwargs['build_pk'] = build.pk options = {} if project.build_queue: options['queue'] = project.build_queue time_limit = DOCKER_LIMITS['time'] try: if project.container_time_limit: time_limit = int(project.container_time_limit) except ValueError: pass options['soft_time_limit'] = time_limit options['time_limit'] = int((time_limit * 1.2)) update_docs.apply_async(kwargs=kwargs, **options) return build
[ "def", "trigger_build", "(", "project", ",", "version", "=", "None", ",", "record", "=", "True", ",", "force", "=", "False", ",", "basic", "=", "False", ")", ":", "from", "readthedocs", ".", "projects", ".", "tasks", "import", "update_docs", "from", "readthedocs", ".", "builds", ".", "models", "import", "Build", "if", "project", ".", "skip", ":", "return", "None", "if", "(", "not", "version", ")", ":", "version", "=", "project", ".", "versions", ".", "get", "(", "slug", "=", "LATEST", ")", "kwargs", "=", "dict", "(", "pk", "=", "project", ".", "pk", ",", "version_pk", "=", "version", ".", "pk", ",", "record", "=", "record", ",", "force", "=", "force", ",", "basic", "=", "basic", ")", "build", "=", "None", "if", "record", ":", "build", "=", "Build", ".", "objects", ".", "create", "(", "project", "=", "project", ",", "version", "=", "version", ",", "type", "=", "'html'", ",", "state", "=", "'triggered'", ",", "success", "=", "True", ")", "kwargs", "[", "'build_pk'", "]", "=", "build", ".", "pk", "options", "=", "{", "}", "if", "project", ".", "build_queue", ":", "options", "[", "'queue'", "]", "=", "project", ".", "build_queue", "time_limit", "=", "DOCKER_LIMITS", "[", "'time'", "]", "try", ":", "if", "project", ".", "container_time_limit", ":", "time_limit", "=", "int", "(", "project", ".", "container_time_limit", ")", "except", "ValueError", ":", "pass", "options", "[", "'soft_time_limit'", "]", "=", "time_limit", "options", "[", "'time_limit'", "]", "=", "int", "(", "(", "time_limit", "*", "1.2", ")", ")", "update_docs", ".", "apply_async", "(", "kwargs", "=", "kwargs", ",", "**", "options", ")", "return", "build" ]
trigger build for project and version if project has a build_queue .
train
false
48,811
def StringToMakefileVariable(string): return re.sub('[^a-zA-Z0-9_]', '_', string)
[ "def", "StringToMakefileVariable", "(", "string", ")", ":", "return", "re", ".", "sub", "(", "'[^a-zA-Z0-9_]'", ",", "'_'", ",", "string", ")" ]
convert a string to a value that is acceptable as a make variable name .
train
false
48,813
def map_base_attributes(cube, mapper_class, naming, locale=None): base = [attr for attr in cube.all_attributes if attr.is_base] mapper = mapper_class(cube, naming, locale) mapped = {attr.ref: mapper[attr] for attr in base} return (mapper.fact_name, mapped)
[ "def", "map_base_attributes", "(", "cube", ",", "mapper_class", ",", "naming", ",", "locale", "=", "None", ")", ":", "base", "=", "[", "attr", "for", "attr", "in", "cube", ".", "all_attributes", "if", "attr", ".", "is_base", "]", "mapper", "=", "mapper_class", "(", "cube", ",", "naming", ",", "locale", ")", "mapped", "=", "{", "attr", ".", "ref", ":", "mapper", "[", "attr", "]", "for", "attr", "in", "base", "}", "return", "(", "mapper", ".", "fact_name", ",", "mapped", ")" ]
map all base attributes of cube using mapping function mapper .
train
false
48,814
def user_has_perm_note(note, profile, request=None): if (note.author and (note.author.id == profile.id)): return True if (request and (note.note_type == comm.REVIEWER_COMMENT)): return acl.check_reviewer(request) if (request and (note.note_type == comm.DEVELOPER_COMMENT) and acl.check_reviewer(request)): return True user_is_author = profile.addons.filter(pk=note.thread._addon_id) if ((user_is_author.exists() and note.read_permission_developer) or (note.note_type == comm.REVIEWER_PUBLIC_COMMENT)): return True return check_acls_comm_obj(note, profile)
[ "def", "user_has_perm_note", "(", "note", ",", "profile", ",", "request", "=", "None", ")", ":", "if", "(", "note", ".", "author", "and", "(", "note", ".", "author", ".", "id", "==", "profile", ".", "id", ")", ")", ":", "return", "True", "if", "(", "request", "and", "(", "note", ".", "note_type", "==", "comm", ".", "REVIEWER_COMMENT", ")", ")", ":", "return", "acl", ".", "check_reviewer", "(", "request", ")", "if", "(", "request", "and", "(", "note", ".", "note_type", "==", "comm", ".", "DEVELOPER_COMMENT", ")", "and", "acl", ".", "check_reviewer", "(", "request", ")", ")", ":", "return", "True", "user_is_author", "=", "profile", ".", "addons", ".", "filter", "(", "pk", "=", "note", ".", "thread", ".", "_addon_id", ")", "if", "(", "(", "user_is_author", ".", "exists", "(", ")", "and", "note", ".", "read_permission_developer", ")", "or", "(", "note", ".", "note_type", "==", "comm", ".", "REVIEWER_PUBLIC_COMMENT", ")", ")", ":", "return", "True", "return", "check_acls_comm_obj", "(", "note", ",", "profile", ")" ]
check if the user has read/write permissions on the given note .
train
false
48,815
def _get_k_p_a(font, left, right): chars = (left + right) args = [None, 1, cf.kCFTypeDictionaryKeyCallBacks, cf.kCFTypeDictionaryValueCallBacks] attributes = cf.CFDictionaryCreateMutable(*args) cf.CFDictionaryAddValue(attributes, kCTFontAttributeName, font) string = cf.CFAttributedStringCreate(None, CFSTR(chars), attributes) typesetter = ct.CTTypesetterCreateWithAttributedString(string) cf.CFRelease(string) cf.CFRelease(attributes) range = CFRange(0, 1) line = ct.CTTypesetterCreateLine(typesetter, range) offset = ct.CTLineGetOffsetForStringIndex(line, 1, None) cf.CFRelease(line) cf.CFRelease(typesetter) return offset
[ "def", "_get_k_p_a", "(", "font", ",", "left", ",", "right", ")", ":", "chars", "=", "(", "left", "+", "right", ")", "args", "=", "[", "None", ",", "1", ",", "cf", ".", "kCFTypeDictionaryKeyCallBacks", ",", "cf", ".", "kCFTypeDictionaryValueCallBacks", "]", "attributes", "=", "cf", ".", "CFDictionaryCreateMutable", "(", "*", "args", ")", "cf", ".", "CFDictionaryAddValue", "(", "attributes", ",", "kCTFontAttributeName", ",", "font", ")", "string", "=", "cf", ".", "CFAttributedStringCreate", "(", "None", ",", "CFSTR", "(", "chars", ")", ",", "attributes", ")", "typesetter", "=", "ct", ".", "CTTypesetterCreateWithAttributedString", "(", "string", ")", "cf", ".", "CFRelease", "(", "string", ")", "cf", ".", "CFRelease", "(", "attributes", ")", "range", "=", "CFRange", "(", "0", ",", "1", ")", "line", "=", "ct", ".", "CTTypesetterCreateLine", "(", "typesetter", ",", "range", ")", "offset", "=", "ct", ".", "CTLineGetOffsetForStringIndex", "(", "line", ",", "1", ",", "None", ")", "cf", ".", "CFRelease", "(", "line", ")", "cf", ".", "CFRelease", "(", "typesetter", ")", "return", "offset" ]
this actually calculates the kerning + advance .
train
true
48,816
def assert_series_equal(left, right, check_names=True, **kwargs): try: return tm.assert_series_equal(left, right, check_names=check_names, **kwargs) except TypeError: if check_names: assert (left.name == right.name) return tm.assert_series_equal(left, right, **kwargs)
[ "def", "assert_series_equal", "(", "left", ",", "right", ",", "check_names", "=", "True", ",", "**", "kwargs", ")", ":", "try", ":", "return", "tm", ".", "assert_series_equal", "(", "left", ",", "right", ",", "check_names", "=", "check_names", ",", "**", "kwargs", ")", "except", "TypeError", ":", "if", "check_names", ":", "assert", "(", "left", ".", "name", "==", "right", ".", "name", ")", "return", "tm", ".", "assert_series_equal", "(", "left", ",", "right", ",", "**", "kwargs", ")" ]
backwards compatibility wrapper for pandas .
train
false
48,818
def Indicator(pos, size, dtype): x = numpy.zeros(size, dtype=dtype) x[pos] = 1 return x
[ "def", "Indicator", "(", "pos", ",", "size", ",", "dtype", ")", ":", "x", "=", "numpy", ".", "zeros", "(", "size", ",", "dtype", "=", "dtype", ")", "x", "[", "pos", "]", "=", "1", "return", "x" ]
returns an array of length size and type dtype that is everywhere 0 .
train
true
48,819
def upcast_scalar(dtype, scalar): return (np.array([0], dtype=dtype) * scalar).dtype
[ "def", "upcast_scalar", "(", "dtype", ",", "scalar", ")", ":", "return", "(", "np", ".", "array", "(", "[", "0", "]", ",", "dtype", "=", "dtype", ")", "*", "scalar", ")", ".", "dtype" ]
determine data type for binary operation between an array of type dtype and a scalar .
train
false
48,820
@mock_autoscaling def test_create_launch_configuration_defaults(): conn = boto.connect_autoscale() config = LaunchConfiguration(name=u'tester', image_id=u'ami-abcd1234', instance_type=u'm1.small') conn.create_launch_configuration(config) launch_config = conn.get_all_launch_configurations()[0] launch_config.name.should.equal(u'tester') launch_config.image_id.should.equal(u'ami-abcd1234') launch_config.instance_type.should.equal(u'm1.small') launch_config.key_name.should.equal(u'') list(launch_config.security_groups).should.equal([]) launch_config.user_data.should.equal('') launch_config.instance_monitoring.enabled.should.equal(u'false') launch_config.instance_profile_name.should.equal(None) launch_config.spot_price.should.equal(None)
[ "@", "mock_autoscaling", "def", "test_create_launch_configuration_defaults", "(", ")", ":", "conn", "=", "boto", ".", "connect_autoscale", "(", ")", "config", "=", "LaunchConfiguration", "(", "name", "=", "u'tester'", ",", "image_id", "=", "u'ami-abcd1234'", ",", "instance_type", "=", "u'm1.small'", ")", "conn", ".", "create_launch_configuration", "(", "config", ")", "launch_config", "=", "conn", ".", "get_all_launch_configurations", "(", ")", "[", "0", "]", "launch_config", ".", "name", ".", "should", ".", "equal", "(", "u'tester'", ")", "launch_config", ".", "image_id", ".", "should", ".", "equal", "(", "u'ami-abcd1234'", ")", "launch_config", ".", "instance_type", ".", "should", ".", "equal", "(", "u'm1.small'", ")", "launch_config", ".", "key_name", ".", "should", ".", "equal", "(", "u''", ")", "list", "(", "launch_config", ".", "security_groups", ")", ".", "should", ".", "equal", "(", "[", "]", ")", "launch_config", ".", "user_data", ".", "should", ".", "equal", "(", "''", ")", "launch_config", ".", "instance_monitoring", ".", "enabled", ".", "should", ".", "equal", "(", "u'false'", ")", "launch_config", ".", "instance_profile_name", ".", "should", ".", "equal", "(", "None", ")", "launch_config", ".", "spot_price", ".", "should", ".", "equal", "(", "None", ")" ]
test with the minimum inputs and check that all of the proper defaults are assigned for the other attributes .
train
false
48,821
def fuzzy_not(v): if (v is None): return v else: return (not v)
[ "def", "fuzzy_not", "(", "v", ")", ":", "if", "(", "v", "is", "None", ")", ":", "return", "v", "else", ":", "return", "(", "not", "v", ")" ]
not in fuzzy logic return none if v is none else not v .
train
false
48,822
def test_cookie_jar_init(config_stub, fake_save_manager): line_parser_stub = [COOKIE1, COOKIE2] jar = cookies.CookieJar(line_parser=line_parser_stub) assert fake_save_manager.add_saveable.called assert (len(jar.allCookies()) == 2) raw_cookies = [c.toRawForm().data() for c in jar.allCookies()] assert (raw_cookies == [COOKIE1, COOKIE2])
[ "def", "test_cookie_jar_init", "(", "config_stub", ",", "fake_save_manager", ")", ":", "line_parser_stub", "=", "[", "COOKIE1", ",", "COOKIE2", "]", "jar", "=", "cookies", ".", "CookieJar", "(", "line_parser", "=", "line_parser_stub", ")", "assert", "fake_save_manager", ".", "add_saveable", ".", "called", "assert", "(", "len", "(", "jar", ".", "allCookies", "(", ")", ")", "==", "2", ")", "raw_cookies", "=", "[", "c", ".", "toRawForm", "(", ")", ".", "data", "(", ")", "for", "c", "in", "jar", ".", "allCookies", "(", ")", "]", "assert", "(", "raw_cookies", "==", "[", "COOKIE1", ",", "COOKIE2", "]", ")" ]
test the cookiejar constructor .
train
false
48,823
def skip(reason): return skipif(True, reason=reason)
[ "def", "skip", "(", "reason", ")", ":", "return", "skipif", "(", "True", ",", "reason", "=", "reason", ")" ]
unconditionally skip the currently decorated test with the passed reason .
train
false