id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
6,085
def _pipeline_objects(pipeline_objects_from_pillars, pipeline_object_overrides): from_pillars = copy.deepcopy(__salt__['pillar.get'](pipeline_objects_from_pillars)) from_pillars.update(pipeline_object_overrides) pipeline_objects = _standardize(_dict_to_list_ids(from_pillars)) for pipeline_object in pipeline_objects: pipeline_object['fields'] = _properties_from_dict(pipeline_object['fields']) return pipeline_objects
[ "def", "_pipeline_objects", "(", "pipeline_objects_from_pillars", ",", "pipeline_object_overrides", ")", ":", "from_pillars", "=", "copy", ".", "deepcopy", "(", "__salt__", "[", "'pillar.get'", "]", "(", "pipeline_objects_from_pillars", ")", ")", "from_pillars", ".", "update", "(", "pipeline_object_overrides", ")", "pipeline_objects", "=", "_standardize", "(", "_dict_to_list_ids", "(", "from_pillars", ")", ")", "for", "pipeline_object", "in", "pipeline_objects", ":", "pipeline_object", "[", "'fields'", "]", "=", "_properties_from_dict", "(", "pipeline_object", "[", "'fields'", "]", ")", "return", "pipeline_objects" ]
return a list of pipeline objects that compose the pipeline pipeline_objects_from_pillars the pillar key to use for lookup pipeline_object_overrides pipeline objects to use .
train
true
6,086
def VerifyCode(code): try: compile(textwrap.dedent(code).encode('UTF-8'), '<string>', 'exec') except SyntaxError: try: ast.parse(textwrap.dedent(code.lstrip('\n')).lstrip(), '<string>', 'exec') except SyntaxError: try: normalized_code = _NormalizeCode(code) compile(normalized_code.encode('UTF-8'), '<string>', 'exec') except SyntaxError: raise InternalError(sys.exc_info()[1])
[ "def", "VerifyCode", "(", "code", ")", ":", "try", ":", "compile", "(", "textwrap", ".", "dedent", "(", "code", ")", ".", "encode", "(", "'UTF-8'", ")", ",", "'<string>'", ",", "'exec'", ")", "except", "SyntaxError", ":", "try", ":", "ast", ".", "parse", "(", "textwrap", ".", "dedent", "(", "code", ".", "lstrip", "(", "'\\n'", ")", ")", ".", "lstrip", "(", ")", ",", "'<string>'", ",", "'exec'", ")", "except", "SyntaxError", ":", "try", ":", "normalized_code", "=", "_NormalizeCode", "(", "code", ")", "compile", "(", "normalized_code", ".", "encode", "(", "'UTF-8'", ")", ",", "'<string>'", ",", "'exec'", ")", "except", "SyntaxError", ":", "raise", "InternalError", "(", "sys", ".", "exc_info", "(", ")", "[", "1", "]", ")" ]
verify that the reformatted code is syntactically correct .
train
false
6,087
def query_member_id(api_key, user_token, login_name): r = requests.get((API_URL_PREFIX + u'members/search'), params={u'login': login_name}, headers={u'Accept': u'application/json', u'X-BetaSeries-Version': u'2.1', u'X-BetaSeries-Key': api_key, u'X-BetaSeries-Token': user_token}) assert (r.status_code == 200), (u'Bad HTTP status code: %s' % r.status_code) j = r.json() error_list = j[u'errors'] for err in error_list: log.error(str(err)) found_id = None if (not error_list): for candidate in j[u'users']: if (candidate[u'login'] == login_name): found_id = candidate[u'id'] break return found_id
[ "def", "query_member_id", "(", "api_key", ",", "user_token", ",", "login_name", ")", ":", "r", "=", "requests", ".", "get", "(", "(", "API_URL_PREFIX", "+", "u'members/search'", ")", ",", "params", "=", "{", "u'login'", ":", "login_name", "}", ",", "headers", "=", "{", "u'Accept'", ":", "u'application/json'", ",", "u'X-BetaSeries-Version'", ":", "u'2.1'", ",", "u'X-BetaSeries-Key'", ":", "api_key", ",", "u'X-BetaSeries-Token'", ":", "user_token", "}", ")", "assert", "(", "r", ".", "status_code", "==", "200", ")", ",", "(", "u'Bad HTTP status code: %s'", "%", "r", ".", "status_code", ")", "j", "=", "r", ".", "json", "(", ")", "error_list", "=", "j", "[", "u'errors'", "]", "for", "err", "in", "error_list", ":", "log", ".", "error", "(", "str", "(", "err", ")", ")", "found_id", "=", "None", "if", "(", "not", "error_list", ")", ":", "for", "candidate", "in", "j", "[", "u'users'", "]", ":", "if", "(", "candidate", "[", "u'login'", "]", "==", "login_name", ")", ":", "found_id", "=", "candidate", "[", "u'id'", "]", "break", "return", "found_id" ]
get the member id of a member identified by its login name .
train
false
6,088
def wavwrite(y, fs, filename): x = copy.deepcopy(y) x *= INT16_FAC x = np.int16(x) write(filename, fs, x)
[ "def", "wavwrite", "(", "y", ",", "fs", ",", "filename", ")", ":", "x", "=", "copy", ".", "deepcopy", "(", "y", ")", "x", "*=", "INT16_FAC", "x", "=", "np", ".", "int16", "(", "x", ")", "write", "(", "filename", ",", "fs", ",", "x", ")" ]
write a sound file from an array with the sound and the sampling rate y: floating point array of one dimension .
train
false
6,089
def ConstructTimestampAssetId(id_prefix, timestamp, device_id, uniquifier, reverse_ts=True): assert IdPrefix.IsValid(id_prefix), id_prefix assert (timestamp < (1L << 32)), timestamp if reverse_ts: timestamp = (((1L << 32) - int(timestamp)) - 1) byte_str = struct.pack('>I', timestamp) assert (len(byte_str) == 4), timestamp byte_str += util.EncodeVarLengthNumber(device_id) byte_str += _EncodeUniquifier(uniquifier) return (id_prefix + base64hex.B64HexEncode(byte_str, padding=False))
[ "def", "ConstructTimestampAssetId", "(", "id_prefix", ",", "timestamp", ",", "device_id", ",", "uniquifier", ",", "reverse_ts", "=", "True", ")", ":", "assert", "IdPrefix", ".", "IsValid", "(", "id_prefix", ")", ",", "id_prefix", "assert", "(", "timestamp", "<", "(", "1", "L", "<<", "32", ")", ")", ",", "timestamp", "if", "reverse_ts", ":", "timestamp", "=", "(", "(", "(", "1", "L", "<<", "32", ")", "-", "int", "(", "timestamp", ")", ")", "-", "1", ")", "byte_str", "=", "struct", ".", "pack", "(", "'>I'", ",", "timestamp", ")", "assert", "(", "len", "(", "byte_str", ")", "==", "4", ")", ",", "timestamp", "byte_str", "+=", "util", ".", "EncodeVarLengthNumber", "(", "device_id", ")", "byte_str", "+=", "_EncodeUniquifier", "(", "uniquifier", ")", "return", "(", "id_prefix", "+", "base64hex", ".", "B64HexEncode", "(", "byte_str", ",", "padding", "=", "False", ")", ")" ]
constructs an asset id that has a leading 4-byte encoded timestamp .
train
false
6,090
def msubs(expr, *sub_dicts, **kwargs): sub_dict = dict_merge(*sub_dicts) smart = kwargs.pop('smart', False) if smart: func = _smart_subs elif hasattr(expr, 'msubs'): return expr.msubs(sub_dict) else: func = (lambda expr, sub_dict: _crawl(expr, _sub_func, sub_dict)) if isinstance(expr, (Matrix, Vector, Dyadic)): return expr.applyfunc((lambda x: func(x, sub_dict))) else: return func(expr, sub_dict)
[ "def", "msubs", "(", "expr", ",", "*", "sub_dicts", ",", "**", "kwargs", ")", ":", "sub_dict", "=", "dict_merge", "(", "*", "sub_dicts", ")", "smart", "=", "kwargs", ".", "pop", "(", "'smart'", ",", "False", ")", "if", "smart", ":", "func", "=", "_smart_subs", "elif", "hasattr", "(", "expr", ",", "'msubs'", ")", ":", "return", "expr", ".", "msubs", "(", "sub_dict", ")", "else", ":", "func", "=", "(", "lambda", "expr", ",", "sub_dict", ":", "_crawl", "(", "expr", ",", "_sub_func", ",", "sub_dict", ")", ")", "if", "isinstance", "(", "expr", ",", "(", "Matrix", ",", "Vector", ",", "Dyadic", ")", ")", ":", "return", "expr", ".", "applyfunc", "(", "(", "lambda", "x", ":", "func", "(", "x", ",", "sub_dict", ")", ")", ")", "else", ":", "return", "func", "(", "expr", ",", "sub_dict", ")" ]
a custom subs for use on expressions derived in physics .
train
false
6,092
def getPillarOutput(loops): faces = [] vertexDictionary = {} vertexes = [] for loop in loops: for (vertexIndex, vertex) in enumerate(loop): position = (vertex.x, vertex.y, vertex.z) if (position in vertexDictionary): loop[vertexIndex] = vertexDictionary[position] else: if (vertex.__class__ != Vector3Index): loop[vertexIndex] = Vector3Index(len(vertexDictionary), vertex.x, vertex.y, vertex.z) vertexDictionary[position] = loop[vertexIndex] vertexes.append(loop[vertexIndex]) addPillarByLoops(faces, loops) return {'trianglemesh': {'vertex': vertexes, 'face': faces}}
[ "def", "getPillarOutput", "(", "loops", ")", ":", "faces", "=", "[", "]", "vertexDictionary", "=", "{", "}", "vertexes", "=", "[", "]", "for", "loop", "in", "loops", ":", "for", "(", "vertexIndex", ",", "vertex", ")", "in", "enumerate", "(", "loop", ")", ":", "position", "=", "(", "vertex", ".", "x", ",", "vertex", ".", "y", ",", "vertex", ".", "z", ")", "if", "(", "position", "in", "vertexDictionary", ")", ":", "loop", "[", "vertexIndex", "]", "=", "vertexDictionary", "[", "position", "]", "else", ":", "if", "(", "vertex", ".", "__class__", "!=", "Vector3Index", ")", ":", "loop", "[", "vertexIndex", "]", "=", "Vector3Index", "(", "len", "(", "vertexDictionary", ")", ",", "vertex", ".", "x", ",", "vertex", ".", "y", ",", "vertex", ".", "z", ")", "vertexDictionary", "[", "position", "]", "=", "loop", "[", "vertexIndex", "]", "vertexes", ".", "append", "(", "loop", "[", "vertexIndex", "]", ")", "addPillarByLoops", "(", "faces", ",", "loops", ")", "return", "{", "'trianglemesh'", ":", "{", "'vertex'", ":", "vertexes", ",", "'face'", ":", "faces", "}", "}" ]
get pillar output .
train
false
6,093
@csrf_exempt def checkout_cancel(_request): context = {'payment_support_email': configuration_helpers.get_value('payment_support_email', settings.PAYMENT_SUPPORT_EMAIL)} return render_to_response('commerce/checkout_cancel.html', context)
[ "@", "csrf_exempt", "def", "checkout_cancel", "(", "_request", ")", ":", "context", "=", "{", "'payment_support_email'", ":", "configuration_helpers", ".", "get_value", "(", "'payment_support_email'", ",", "settings", ".", "PAYMENT_SUPPORT_EMAIL", ")", "}", "return", "render_to_response", "(", "'commerce/checkout_cancel.html'", ",", "context", ")" ]
checkout/payment cancellation view .
train
false
6,094
def upload_blob(bucket_name, source_file_name, destination_blob_name): storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(destination_blob_name) blob.upload_from_filename(source_file_name) print 'File {} uploaded to {}.'.format(source_file_name, destination_blob_name)
[ "def", "upload_blob", "(", "bucket_name", ",", "source_file_name", ",", "destination_blob_name", ")", ":", "storage_client", "=", "storage", ".", "Client", "(", ")", "bucket", "=", "storage_client", ".", "get_bucket", "(", "bucket_name", ")", "blob", "=", "bucket", ".", "blob", "(", "destination_blob_name", ")", "blob", ".", "upload_from_filename", "(", "source_file_name", ")", "print", "'File {} uploaded to {}.'", ".", "format", "(", "source_file_name", ",", "destination_blob_name", ")" ]
uploads a file to the bucket .
train
false
6,096
def redirect_output(filename): import twill fp = open(filename, 'a') twill.set_output(fp)
[ "def", "redirect_output", "(", "filename", ")", ":", "import", "twill", "fp", "=", "open", "(", "filename", ",", "'a'", ")", "twill", ".", "set_output", "(", "fp", ")" ]
redirect stdout and stderr to a file .
train
false
6,097
def generate_garch(nobs, ar, ma, mu=1.0, scale=0.1): eta = (scale * np.random.randn(nobs)) h = signal.lfilter(ma, ar, (eta ** 2)) err = (np.sqrt(h) * eta) return (err, h)
[ "def", "generate_garch", "(", "nobs", ",", "ar", ",", "ma", ",", "mu", "=", "1.0", ",", "scale", "=", "0.1", ")", ":", "eta", "=", "(", "scale", "*", "np", ".", "random", ".", "randn", "(", "nobs", ")", ")", "h", "=", "signal", ".", "lfilter", "(", "ma", ",", "ar", ",", "(", "eta", "**", "2", ")", ")", "err", "=", "(", "np", ".", "sqrt", "(", "h", ")", "*", "eta", ")", "return", "(", "err", ",", "h", ")" ]
simulate standard garch scale : float scale/standard deviation of innovation process in garch process .
train
false
6,099
def AumSortedConcatenate(): def step(ctxt, ndx, author, sort, link): if (author is not None): ctxt[ndx] = u':::'.join((author, sort, link)) def finalize(ctxt): keys = list(ctxt.iterkeys()) l = len(keys) if (l == 0): return None if (l == 1): return ctxt[keys[0]] return u':#:'.join([ctxt[v] for v in sorted(keys)]) return ({}, step, finalize)
[ "def", "AumSortedConcatenate", "(", ")", ":", "def", "step", "(", "ctxt", ",", "ndx", ",", "author", ",", "sort", ",", "link", ")", ":", "if", "(", "author", "is", "not", "None", ")", ":", "ctxt", "[", "ndx", "]", "=", "u':::'", ".", "join", "(", "(", "author", ",", "sort", ",", "link", ")", ")", "def", "finalize", "(", "ctxt", ")", ":", "keys", "=", "list", "(", "ctxt", ".", "iterkeys", "(", ")", ")", "l", "=", "len", "(", "keys", ")", "if", "(", "l", "==", "0", ")", ":", "return", "None", "if", "(", "l", "==", "1", ")", ":", "return", "ctxt", "[", "keys", "[", "0", "]", "]", "return", "u':#:'", ".", "join", "(", "[", "ctxt", "[", "v", "]", "for", "v", "in", "sorted", "(", "keys", ")", "]", ")", "return", "(", "{", "}", ",", "step", ",", "finalize", ")" ]
string concatenation aggregator for the author sort map .
train
false
6,100
def over_bound(w_dyad, tree): nonzeros = sum((1 for e in w_dyad if (e != 0))) return ((len(tree) - lower_bound(w_dyad)) - nonzeros)
[ "def", "over_bound", "(", "w_dyad", ",", "tree", ")", ":", "nonzeros", "=", "sum", "(", "(", "1", "for", "e", "in", "w_dyad", "if", "(", "e", "!=", "0", ")", ")", ")", "return", "(", "(", "len", "(", "tree", ")", "-", "lower_bound", "(", "w_dyad", ")", ")", "-", "nonzeros", ")" ]
return the number of cones in the tree beyond the known lower bounds .
train
false
6,101
def sigterm_handler(signum, frame): logger.info(u'Got SIGTERM signal. Exiting...') exit_process()
[ "def", "sigterm_handler", "(", "signum", ",", "frame", ")", ":", "logger", ".", "info", "(", "u'Got SIGTERM signal. Exiting...'", ")", "exit_process", "(", ")" ]
gracefully exit on a sigterm .
train
false
6,102
def servicegroup_server_disable(sg_name, s_name, s_port, **connection_args): ret = True server = _servicegroup_get_server(sg_name, s_name, s_port, **connection_args) if (server is None): return False nitro = _connect(**connection_args) if (nitro is None): return False try: NSServiceGroup.disable_server(nitro, server) except NSNitroError as error: log.debug('netscaler module error - NSServiceGroup.disable_server() failed: {0}'.format(error)) ret = False _disconnect(nitro) return ret
[ "def", "servicegroup_server_disable", "(", "sg_name", ",", "s_name", ",", "s_port", ",", "**", "connection_args", ")", ":", "ret", "=", "True", "server", "=", "_servicegroup_get_server", "(", "sg_name", ",", "s_name", ",", "s_port", ",", "**", "connection_args", ")", "if", "(", "server", "is", "None", ")", ":", "return", "False", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "False", "try", ":", "NSServiceGroup", ".", "disable_server", "(", "nitro", ",", "server", ")", "except", "NSNitroError", "as", "error", ":", "log", ".", "debug", "(", "'netscaler module error - NSServiceGroup.disable_server() failed: {0}'", ".", "format", "(", "error", ")", ")", "ret", "=", "False", "_disconnect", "(", "nitro", ")", "return", "ret" ]
disable a server:port member of a servicegroup cli example: .
train
true
6,104
def iter_spider_classes(module): from scrapy.spider import BaseSpider for obj in vars(module).itervalues(): if (inspect.isclass(obj) and issubclass(obj, BaseSpider) and (obj.__module__ == module.__name__) and getattr(obj, 'name', None)): (yield obj)
[ "def", "iter_spider_classes", "(", "module", ")", ":", "from", "scrapy", ".", "spider", "import", "BaseSpider", "for", "obj", "in", "vars", "(", "module", ")", ".", "itervalues", "(", ")", ":", "if", "(", "inspect", ".", "isclass", "(", "obj", ")", "and", "issubclass", "(", "obj", ",", "BaseSpider", ")", "and", "(", "obj", ".", "__module__", "==", "module", ".", "__name__", ")", "and", "getattr", "(", "obj", ",", "'name'", ",", "None", ")", ")", ":", "(", "yield", "obj", ")" ]
return an iterator over all spider classes defined in the given module that can be instantiated .
train
false
6,105
def logged_in_users(attrs=None, where=None): return _osquery_cmd(table='logged_in_users', attrs=attrs, where=where)
[ "def", "logged_in_users", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'logged_in_users'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return logged_in_users information from osquery cli example: .
train
false
6,107
def ffmpeg_resize(video, output, size): cmd = [get_setting('FFMPEG_BINARY'), '-i', video, '-vf', ('scale=%d:%d' % (res[0], res[1])), output] subprocess_call(cmd)
[ "def", "ffmpeg_resize", "(", "video", ",", "output", ",", "size", ")", ":", "cmd", "=", "[", "get_setting", "(", "'FFMPEG_BINARY'", ")", ",", "'-i'", ",", "video", ",", "'-vf'", ",", "(", "'scale=%d:%d'", "%", "(", "res", "[", "0", "]", ",", "res", "[", "1", "]", ")", ")", ",", "output", "]", "subprocess_call", "(", "cmd", ")" ]
resizes video to new size size and write the result in file output .
train
false
6,108
def sort_key_by_numeric_other(key_value): return tuple((((int(numeric) if numeric else None), (INSTANCE_SIZES.index(alpha) if (alpha in INSTANCE_SIZES) else alpha), other) for (numeric, alpha, other) in RE_NUMERIC_OTHER.findall(key_value[0])))
[ "def", "sort_key_by_numeric_other", "(", "key_value", ")", ":", "return", "tuple", "(", "(", "(", "(", "int", "(", "numeric", ")", "if", "numeric", "else", "None", ")", ",", "(", "INSTANCE_SIZES", ".", "index", "(", "alpha", ")", "if", "(", "alpha", "in", "INSTANCE_SIZES", ")", "else", "alpha", ")", ",", "other", ")", "for", "(", "numeric", ",", "alpha", ",", "other", ")", "in", "RE_NUMERIC_OTHER", ".", "findall", "(", "key_value", "[", "0", "]", ")", ")", ")" ]
split key into numeric .
train
false
6,109
def _parse_searchdomain(): contents = _read_file(_DEB_RESOLV_FILE) pattern = 'search\\s+(?P<search_domain>\\S+)' prog = re.compile(pattern) for item in contents: match = prog.match(item) if match: return match.group('search_domain') return ''
[ "def", "_parse_searchdomain", "(", ")", ":", "contents", "=", "_read_file", "(", "_DEB_RESOLV_FILE", ")", "pattern", "=", "'search\\\\s+(?P<search_domain>\\\\S+)'", "prog", "=", "re", ".", "compile", "(", "pattern", ")", "for", "item", "in", "contents", ":", "match", "=", "prog", ".", "match", "(", "item", ")", "if", "match", ":", "return", "match", ".", "group", "(", "'search_domain'", ")", "return", "''" ]
parse /etc/resolv .
train
false
6,110
def cosine_similarity(v1, v2): return (tf.reduce_sum(tf.mul(v1, v2), reduction_indices=1) / (tf.sqrt(tf.reduce_sum(tf.mul(v1, v1), reduction_indices=1)) * tf.sqrt(tf.reduce_sum(tf.mul(v2, v2), reduction_indices=1))))
[ "def", "cosine_similarity", "(", "v1", ",", "v2", ")", ":", "return", "(", "tf", ".", "reduce_sum", "(", "tf", ".", "mul", "(", "v1", ",", "v2", ")", ",", "reduction_indices", "=", "1", ")", "/", "(", "tf", ".", "sqrt", "(", "tf", ".", "reduce_sum", "(", "tf", ".", "mul", "(", "v1", ",", "v1", ")", ",", "reduction_indices", "=", "1", ")", ")", "*", "tf", ".", "sqrt", "(", "tf", ".", "reduce_sum", "(", "tf", ".", "mul", "(", "v2", ",", "v2", ")", ",", "reduction_indices", "=", "1", ")", ")", ")", ")" ]
returns the cosine similarity of the given vectors .
train
false
6,111
def _is_python_file(filename): for ext in ('.py', '.so', '.pyd', '.pyw'): if filename.endswith(ext): return True return False
[ "def", "_is_python_file", "(", "filename", ")", ":", "for", "ext", "in", "(", "'.py'", ",", "'.so'", ",", "'.pyd'", ",", "'.pyw'", ")", ":", "if", "filename", ".", "endswith", "(", "ext", ")", ":", "return", "True", "return", "False" ]
return true if the given filename should be considered as a python file .
train
false
6,112
def _parseUNIX(factory, address, mode='666', backlog=50, lockfile=True): return ((address, factory), {'mode': int(mode, 8), 'backlog': int(backlog), 'wantPID': bool(int(lockfile))})
[ "def", "_parseUNIX", "(", "factory", ",", "address", ",", "mode", "=", "'666'", ",", "backlog", "=", "50", ",", "lockfile", "=", "True", ")", ":", "return", "(", "(", "address", ",", "factory", ")", ",", "{", "'mode'", ":", "int", "(", "mode", ",", "8", ")", ",", "'backlog'", ":", "int", "(", "backlog", ")", ",", "'wantPID'", ":", "bool", "(", "int", "(", "lockfile", ")", ")", "}", ")" ]
internal parser function for l{_parseserver} to convert the string arguments for a unix stream endpoint into the structured arguments .
train
false
6,116
def _is_in_group(opt, group): for (key, value) in group._opts.items(): if (value['opt'] == opt): return True return False
[ "def", "_is_in_group", "(", "opt", ",", "group", ")", ":", "for", "(", "key", ",", "value", ")", "in", "group", ".", "_opts", ".", "items", "(", ")", ":", "if", "(", "value", "[", "'opt'", "]", "==", "opt", ")", ":", "return", "True", "return", "False" ]
check if opt is in group .
train
false
6,117
def _get_cache_key_namespace(cache_key): return force_str(cache_key).split(str(u':'), 1)[0]
[ "def", "_get_cache_key_namespace", "(", "cache_key", ")", ":", "return", "force_str", "(", "cache_key", ")", ".", "split", "(", "str", "(", "u':'", ")", ",", "1", ")", "[", "0", "]" ]
split the given cache key by the first colon to get a namespace .
train
false
6,119
def is_fuse_exec(cmd): cmd_path = _which(cmd) if (not cmd_path): return False elif (not _which('ldd')): raise CommandNotFoundError('ldd') out = __salt__['cmd.run']('ldd {0}'.format(cmd_path), python_shell=False) return ('libfuse' in out)
[ "def", "is_fuse_exec", "(", "cmd", ")", ":", "cmd_path", "=", "_which", "(", "cmd", ")", "if", "(", "not", "cmd_path", ")", ":", "return", "False", "elif", "(", "not", "_which", "(", "'ldd'", ")", ")", ":", "raise", "CommandNotFoundError", "(", "'ldd'", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'ldd {0}'", ".", "format", "(", "cmd_path", ")", ",", "python_shell", "=", "False", ")", "return", "(", "'libfuse'", "in", "out", ")" ]
returns true if the command passed is a fuse mountable application .
train
false
6,121
def get_eip_allocation_id_by_address(client, eip_address, check_mode=False): params = {'PublicIps': [eip_address]} allocation_id = None err_msg = '' try: if (not check_mode): allocations = client.describe_addresses(**params)['Addresses'] if (len(allocations) == 1): allocation = allocations[0] else: allocation = None else: dry_run_eip = DRY_RUN_ALLOCATION_UNCONVERTED['Addresses'][0]['PublicIp'] if (dry_run_eip == eip_address): allocation = DRY_RUN_ALLOCATION_UNCONVERTED['Addresses'][0] else: allocation = None if allocation: if (allocation.get('Domain') != 'vpc'): err_msg = 'EIP {0} is a non-VPC EIP, please allocate a VPC scoped EIP'.format(eip_address) else: allocation_id = allocation.get('AllocationId') else: err_msg = 'EIP {0} does not exist'.format(eip_address) except botocore.exceptions.ClientError as e: err_msg = str(e) return (allocation_id, err_msg)
[ "def", "get_eip_allocation_id_by_address", "(", "client", ",", "eip_address", ",", "check_mode", "=", "False", ")", ":", "params", "=", "{", "'PublicIps'", ":", "[", "eip_address", "]", "}", "allocation_id", "=", "None", "err_msg", "=", "''", "try", ":", "if", "(", "not", "check_mode", ")", ":", "allocations", "=", "client", ".", "describe_addresses", "(", "**", "params", ")", "[", "'Addresses'", "]", "if", "(", "len", "(", "allocations", ")", "==", "1", ")", ":", "allocation", "=", "allocations", "[", "0", "]", "else", ":", "allocation", "=", "None", "else", ":", "dry_run_eip", "=", "DRY_RUN_ALLOCATION_UNCONVERTED", "[", "'Addresses'", "]", "[", "0", "]", "[", "'PublicIp'", "]", "if", "(", "dry_run_eip", "==", "eip_address", ")", ":", "allocation", "=", "DRY_RUN_ALLOCATION_UNCONVERTED", "[", "'Addresses'", "]", "[", "0", "]", "else", ":", "allocation", "=", "None", "if", "allocation", ":", "if", "(", "allocation", ".", "get", "(", "'Domain'", ")", "!=", "'vpc'", ")", ":", "err_msg", "=", "'EIP {0} is a non-VPC EIP, please allocate a VPC scoped EIP'", ".", "format", "(", "eip_address", ")", "else", ":", "allocation_id", "=", "allocation", ".", "get", "(", "'AllocationId'", ")", "else", ":", "err_msg", "=", "'EIP {0} does not exist'", ".", "format", "(", "eip_address", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "e", ":", "err_msg", "=", "str", "(", "e", ")", "return", "(", "allocation_id", ",", "err_msg", ")" ]
release an eip from your eip pool args: client : boto3 client eip_address : the elastic ip address of the eip .
train
false
6,122
def reduceDim(data, dim, func='pca'): try: pcaFunc = globals()[func] except KeyError: raise ValueError('Unknown function to calc principal components') pc = pcaFunc(data, dim) return (pc * asmatrix(makeCentered(data)).T).T
[ "def", "reduceDim", "(", "data", ",", "dim", ",", "func", "=", "'pca'", ")", ":", "try", ":", "pcaFunc", "=", "globals", "(", ")", "[", "func", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Unknown function to calc principal components'", ")", "pc", "=", "pcaFunc", "(", "data", ",", "dim", ")", "return", "(", "pc", "*", "asmatrix", "(", "makeCentered", "(", "data", ")", ")", ".", "T", ")", ".", "T" ]
reduce the dimension of datapoints to dim via principal component analysis .
train
false
6,123
def _fmt_path(path): if (not path): return '' return ('path: _' + ''.join(path))
[ "def", "_fmt_path", "(", "path", ")", ":", "if", "(", "not", "path", ")", ":", "return", "''", "return", "(", "'path: _'", "+", "''", ".", "join", "(", "path", ")", ")" ]
format the path for final display .
train
false
6,124
def cat(fname, fallback=_DEFAULT, binary=True): try: with (open_binary(fname) if binary else open_text(fname)) as f: return f.read() except IOError: if (fallback != _DEFAULT): return fallback else: raise
[ "def", "cat", "(", "fname", ",", "fallback", "=", "_DEFAULT", ",", "binary", "=", "True", ")", ":", "try", ":", "with", "(", "open_binary", "(", "fname", ")", "if", "binary", "else", "open_text", "(", "fname", ")", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")", "except", "IOError", ":", "if", "(", "fallback", "!=", "_DEFAULT", ")", ":", "return", "fallback", "else", ":", "raise" ]
specifies detailed configuration for a chart attribute that uses categoricals .
train
false
6,126
def disconnect_all(): from fabric.state import connections, output for key in connections.keys(): if output.status: sys.stdout.write(('Disconnecting from %s... ' % denormalize(key))) connections[key].close() del connections[key] if output.status: sys.stdout.write('done.\n')
[ "def", "disconnect_all", "(", ")", ":", "from", "fabric", ".", "state", "import", "connections", ",", "output", "for", "key", "in", "connections", ".", "keys", "(", ")", ":", "if", "output", ".", "status", ":", "sys", ".", "stdout", ".", "write", "(", "(", "'Disconnecting from %s... '", "%", "denormalize", "(", "key", ")", ")", ")", "connections", "[", "key", "]", ".", "close", "(", ")", "del", "connections", "[", "key", "]", "if", "output", ".", "status", ":", "sys", ".", "stdout", ".", "write", "(", "'done.\\n'", ")" ]
disconnect all signal handlers .
train
false
6,127
def p_ruleitem(p): p[0] = p[1]
[ "def", "p_ruleitem", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
ruleitem : id | qliteral | code | prec .
train
false
6,128
def _write_static_group_mapping(user_group_mapping, path): f = file(path, 'w') try: for (user, groups) in user_group_mapping.iteritems(): f.write(('%s = %s\n' % (user, ','.join(groups)))) finally: f.close()
[ "def", "_write_static_group_mapping", "(", "user_group_mapping", ",", "path", ")", ":", "f", "=", "file", "(", "path", ",", "'w'", ")", "try", ":", "for", "(", "user", ",", "groups", ")", "in", "user_group_mapping", ".", "iteritems", "(", ")", ":", "f", ".", "write", "(", "(", "'%s = %s\\n'", "%", "(", "user", ",", "','", ".", "join", "(", "groups", ")", ")", ")", ")", "finally", ":", "f", ".", "close", "(", ")" ]
create a java-style .
train
false
6,129
def instantiateShootCallback(): d = defer.Deferred() d.callback(1)
[ "def", "instantiateShootCallback", "(", ")", ":", "d", "=", "defer", ".", "Deferred", "(", ")", "d", ".", "callback", "(", "1", ")" ]
create a deferred and give it a normal result .
train
false
6,131
def get_scheduler_lock(get=None, collection=None): actual_get = effective_get(get, collection) if (actual_get == multiprocessing.get): return mp.Manager().Lock() return SerializableLock()
[ "def", "get_scheduler_lock", "(", "get", "=", "None", ",", "collection", "=", "None", ")", ":", "actual_get", "=", "effective_get", "(", "get", ",", "collection", ")", "if", "(", "actual_get", "==", "multiprocessing", ".", "get", ")", ":", "return", "mp", ".", "Manager", "(", ")", ".", "Lock", "(", ")", "return", "SerializableLock", "(", ")" ]
get an instance of the appropriate lock for a certain situation based on scheduler used .
train
false
6,132
def assert_warns(warning_class, func, *args, **kw): with WarningManager(record=True) as l: warnings.simplefilter('always') func(*args, **kw) if (not (len(l) > 0)): raise AssertionError(('No warning raised when calling %s' % func.__name__)) if (not (l[0].category is warning_class)): raise AssertionError(('First warning for %s is not a %s( is %s)' % (func.__name__, warning_class, l[0])))
[ "def", "assert_warns", "(", "warning_class", ",", "func", ",", "*", "args", ",", "**", "kw", ")", ":", "with", "WarningManager", "(", "record", "=", "True", ")", "as", "l", ":", "warnings", ".", "simplefilter", "(", "'always'", ")", "func", "(", "*", "args", ",", "**", "kw", ")", "if", "(", "not", "(", "len", "(", "l", ")", ">", "0", ")", ")", ":", "raise", "AssertionError", "(", "(", "'No warning raised when calling %s'", "%", "func", ".", "__name__", ")", ")", "if", "(", "not", "(", "l", "[", "0", "]", ".", "category", "is", "warning_class", ")", ")", ":", "raise", "AssertionError", "(", "(", "'First warning for %s is not a %s( is %s)'", "%", "(", "func", ".", "__name__", ",", "warning_class", ",", "l", "[", "0", "]", ")", ")", ")" ]
test that a certain warning occurs .
train
false
6,134
def _snapshot_to_data(snapshot): data = {} data['id'] = snapshot[0] data['type'] = ['single', 'pre', 'post'][snapshot[1]] if (data['type'] == 'post'): data['pre'] = snapshot[2] if (snapshot[3] != (-1)): data['timestamp'] = snapshot[3] else: data['timestamp'] = int(time.time()) data['user'] = getpwuid(snapshot[4])[0] data['description'] = snapshot[5] data['cleanup'] = snapshot[6] data['userdata'] = {} for (key, value) in snapshot[7].items(): data['userdata'][key] = value return data
[ "def", "_snapshot_to_data", "(", "snapshot", ")", ":", "data", "=", "{", "}", "data", "[", "'id'", "]", "=", "snapshot", "[", "0", "]", "data", "[", "'type'", "]", "=", "[", "'single'", ",", "'pre'", ",", "'post'", "]", "[", "snapshot", "[", "1", "]", "]", "if", "(", "data", "[", "'type'", "]", "==", "'post'", ")", ":", "data", "[", "'pre'", "]", "=", "snapshot", "[", "2", "]", "if", "(", "snapshot", "[", "3", "]", "!=", "(", "-", "1", ")", ")", ":", "data", "[", "'timestamp'", "]", "=", "snapshot", "[", "3", "]", "else", ":", "data", "[", "'timestamp'", "]", "=", "int", "(", "time", ".", "time", "(", ")", ")", "data", "[", "'user'", "]", "=", "getpwuid", "(", "snapshot", "[", "4", "]", ")", "[", "0", "]", "data", "[", "'description'", "]", "=", "snapshot", "[", "5", "]", "data", "[", "'cleanup'", "]", "=", "snapshot", "[", "6", "]", "data", "[", "'userdata'", "]", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "snapshot", "[", "7", "]", ".", "items", "(", ")", ":", "data", "[", "'userdata'", "]", "[", "key", "]", "=", "value", "return", "data" ]
returns snapshot data from a d-bus response .
train
true
6,137
def test_get_config(): conf = config.get_config('tests/test-config/valid-config.yaml') expected_conf = {'cookiecutters_dir': '/home/example/some-path-to-templates', 'replay_dir': '/home/example/some-path-to-replay-files', 'default_context': {'full_name': 'Firstname Lastname', 'email': 'firstname.lastname@gmail.com', 'github_username': 'example'}, 'abbreviations': {'gh': 'https://github.com/{0}.git', 'bb': 'https://bitbucket.org/{0}'}} assert (conf == expected_conf)
[ "def", "test_get_config", "(", ")", ":", "conf", "=", "config", ".", "get_config", "(", "'tests/test-config/valid-config.yaml'", ")", "expected_conf", "=", "{", "'cookiecutters_dir'", ":", "'/home/example/some-path-to-templates'", ",", "'replay_dir'", ":", "'/home/example/some-path-to-replay-files'", ",", "'default_context'", ":", "{", "'full_name'", ":", "'Firstname Lastname'", ",", "'email'", ":", "'firstname.lastname@gmail.com'", ",", "'github_username'", ":", "'example'", "}", ",", "'abbreviations'", ":", "{", "'gh'", ":", "'https://github.com/{0}.git'", ",", "'bb'", ":", "'https://bitbucket.org/{0}'", "}", "}", "assert", "(", "conf", "==", "expected_conf", ")" ]
opening and reading config file .
train
false
6,138
def override_pylons_about(): return render_template(u'about.html')
[ "def", "override_pylons_about", "(", ")", ":", "return", "render_template", "(", "u'about.html'", ")" ]
a simple replacement for the pylons about page .
train
false
6,139
def skipIfNoExecutable(executable): with open(os.devnull, u'w') as fnull: try: res = subprocess.call(executable, stdout=fnull, stderr=fnull) except OSError: res = None if (res is None): return unittest.skip(u'{0} executable not found'.format(executable)) return (lambda func: func)
[ "def", "skipIfNoExecutable", "(", "executable", ")", ":", "with", "open", "(", "os", ".", "devnull", ",", "u'w'", ")", "as", "fnull", ":", "try", ":", "res", "=", "subprocess", ".", "call", "(", "executable", ",", "stdout", "=", "fnull", ",", "stderr", "=", "fnull", ")", "except", "OSError", ":", "res", "=", "None", "if", "(", "res", "is", "None", ")", ":", "return", "unittest", ".", "skip", "(", "u'{0} executable not found'", ".", "format", "(", "executable", ")", ")", "return", "(", "lambda", "func", ":", "func", ")" ]
skip test if executable is not found tries to run executable with subprocess to make sure its in the path .
train
false
6,140
def vulnerability_callback(id, type, server_addr, server_port, applications): logger.critical(('Vulnerability %s in connection %s to %s:%s by %s' % (type, id, server_addr, server_port, ', '.join((('%s version %s' % (app.application, app.version)) for app in applications)))))
[ "def", "vulnerability_callback", "(", "id", ",", "type", ",", "server_addr", ",", "server_port", ",", "applications", ")", ":", "logger", ".", "critical", "(", "(", "'Vulnerability %s in connection %s to %s:%s by %s'", "%", "(", "type", ",", "id", ",", "server_addr", ",", "server_port", ",", "', '", ".", "join", "(", "(", "(", "'%s version %s'", "%", "(", "app", ".", "application", ",", "app", ".", "version", ")", ")", "for", "app", "in", "applications", ")", ")", ")", ")", ")" ]
called when a vulnerability is reported .
train
false
6,141
def stripNameSpace(xml): r = re.compile('^(<?[^>]+?>\\s*)(<\\w+) xmlns=[\'"](http://[^\'"]+)[\'"](.*)', re.MULTILINE) if r.match(xml): xmlns = r.match(xml).groups()[2] xml = r.sub('\\1\\2\\4', xml) else: xmlns = None return (xml, xmlns)
[ "def", "stripNameSpace", "(", "xml", ")", ":", "r", "=", "re", ".", "compile", "(", "'^(<?[^>]+?>\\\\s*)(<\\\\w+) xmlns=[\\'\"](http://[^\\'\"]+)[\\'\"](.*)'", ",", "re", ".", "MULTILINE", ")", "if", "r", ".", "match", "(", "xml", ")", ":", "xmlns", "=", "r", ".", "match", "(", "xml", ")", ".", "groups", "(", ")", "[", "2", "]", "xml", "=", "r", ".", "sub", "(", "'\\\\1\\\\2\\\\4'", ",", "xml", ")", "else", ":", "xmlns", "=", "None", "return", "(", "xml", ",", "xmlns", ")" ]
removenamespace -- remove top-level aws namespace .
train
false
6,142
def _cluster_to_full_summary(cluster, now=None): cs = _cluster_to_basic_summary(cluster, now=now) cs['usage'] = _cluster_to_usage_data(cluster, basic_summary=cs, now=now) if cs['end']: cs['nih_billed'] = cs['nih'] else: cs['nih_billed'] = float(sum((u['nih_billed'] for u in cs['usage']))) for nih_type in ('nih_used', 'nih_bbnu'): cs[nih_type] = float(sum((u[nih_type] for u in cs['usage']))) return cs
[ "def", "_cluster_to_full_summary", "(", "cluster", ",", "now", "=", "None", ")", ":", "cs", "=", "_cluster_to_basic_summary", "(", "cluster", ",", "now", "=", "now", ")", "cs", "[", "'usage'", "]", "=", "_cluster_to_usage_data", "(", "cluster", ",", "basic_summary", "=", "cs", ",", "now", "=", "now", ")", "if", "cs", "[", "'end'", "]", ":", "cs", "[", "'nih_billed'", "]", "=", "cs", "[", "'nih'", "]", "else", ":", "cs", "[", "'nih_billed'", "]", "=", "float", "(", "sum", "(", "(", "u", "[", "'nih_billed'", "]", "for", "u", "in", "cs", "[", "'usage'", "]", ")", ")", ")", "for", "nih_type", "in", "(", "'nih_used'", ",", "'nih_bbnu'", ")", ":", "cs", "[", "nih_type", "]", "=", "float", "(", "sum", "(", "(", "u", "[", "nih_type", "]", "for", "u", "in", "cs", "[", "'usage'", "]", ")", ")", ")", "return", "cs" ]
convert a cluster to a full summary for use in creating a report .
train
false
6,143
def _edge_value(G, edge_attr): if (edge_attr is None): if G.is_multigraph(): value = (lambda u, v: len(G[u][v])) else: value = (lambda u, v: 1) elif (not hasattr(edge_attr, '__call__')): if (edge_attr == 'weight'): if G.is_multigraph(): value = (lambda u, v: sum([d.get(edge_attr, 1) for d in G[u][v].values()])) else: value = (lambda u, v: G[u][v].get(edge_attr, 1)) elif G.is_multigraph(): value = (lambda u, v: sum([d[edge_attr] for d in G[u][v].values()])) else: value = (lambda u, v: G[u][v][edge_attr]) else: value = edge_attr return value
[ "def", "_edge_value", "(", "G", ",", "edge_attr", ")", ":", "if", "(", "edge_attr", "is", "None", ")", ":", "if", "G", ".", "is_multigraph", "(", ")", ":", "value", "=", "(", "lambda", "u", ",", "v", ":", "len", "(", "G", "[", "u", "]", "[", "v", "]", ")", ")", "else", ":", "value", "=", "(", "lambda", "u", ",", "v", ":", "1", ")", "elif", "(", "not", "hasattr", "(", "edge_attr", ",", "'__call__'", ")", ")", ":", "if", "(", "edge_attr", "==", "'weight'", ")", ":", "if", "G", ".", "is_multigraph", "(", ")", ":", "value", "=", "(", "lambda", "u", ",", "v", ":", "sum", "(", "[", "d", ".", "get", "(", "edge_attr", ",", "1", ")", "for", "d", "in", "G", "[", "u", "]", "[", "v", "]", ".", "values", "(", ")", "]", ")", ")", "else", ":", "value", "=", "(", "lambda", "u", ",", "v", ":", "G", "[", "u", "]", "[", "v", "]", ".", "get", "(", "edge_attr", ",", "1", ")", ")", "elif", "G", ".", "is_multigraph", "(", ")", ":", "value", "=", "(", "lambda", "u", ",", "v", ":", "sum", "(", "[", "d", "[", "edge_attr", "]", "for", "d", "in", "G", "[", "u", "]", "[", "v", "]", ".", "values", "(", ")", "]", ")", ")", "else", ":", "value", "=", "(", "lambda", "u", ",", "v", ":", "G", "[", "u", "]", "[", "v", "]", "[", "edge_attr", "]", ")", "else", ":", "value", "=", "edge_attr", "return", "value" ]
returns a function that returns a value from g[u][v] .
train
false
6,145
def calculated_hp(base_stat, level, iv, effort, nature=None): if (base_stat == 1): return 1 return (((((((base_stat * 2) + iv) + (effort // 4)) * level) // 100) + 10) + level)
[ "def", "calculated_hp", "(", "base_stat", ",", "level", ",", "iv", ",", "effort", ",", "nature", "=", "None", ")", ":", "if", "(", "base_stat", "==", "1", ")", ":", "return", "1", "return", "(", "(", "(", "(", "(", "(", "(", "base_stat", "*", "2", ")", "+", "iv", ")", "+", "(", "effort", "//", "4", ")", ")", "*", "level", ")", "//", "100", ")", "+", "10", ")", "+", "level", ")" ]
similar to calculated_stat .
train
false
6,146
def p_enumerator_list_2(t): pass
[ "def", "p_enumerator_list_2", "(", "t", ")", ":", "pass" ]
enumerator_list : enumerator_list comma enumerator .
train
false
6,148
def list_of_nodes(inputs, outputs): return stack_search(deque([o.owner for o in outputs]), (lambda o: [inp.owner for inp in o.inputs if (inp.owner and (not any(((i in inp.owner.outputs) for i in inputs))))]))
[ "def", "list_of_nodes", "(", "inputs", ",", "outputs", ")", ":", "return", "stack_search", "(", "deque", "(", "[", "o", ".", "owner", "for", "o", "in", "outputs", "]", ")", ",", "(", "lambda", "o", ":", "[", "inp", ".", "owner", "for", "inp", "in", "o", ".", "inputs", "if", "(", "inp", ".", "owner", "and", "(", "not", "any", "(", "(", "(", "i", "in", "inp", ".", "owner", ".", "outputs", ")", "for", "i", "in", "inputs", ")", ")", ")", ")", "]", ")", ")" ]
return the apply nodes of the graph between inputs and outputs .
train
false
6,149
def remove_file_extension(filename): for suff in EXTENSION_SUFFIXES: if filename.endswith(suff): return filename[0:filename.rfind(suff)] return os.path.splitext(filename)[0]
[ "def", "remove_file_extension", "(", "filename", ")", ":", "for", "suff", "in", "EXTENSION_SUFFIXES", ":", "if", "filename", ".", "endswith", "(", "suff", ")", ":", "return", "filename", "[", "0", ":", "filename", ".", "rfind", "(", "suff", ")", "]", "return", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "0", "]" ]
this function returns filename without its extension .
train
false
6,150
def p_logical_and_expression_1(t): pass
[ "def", "p_logical_and_expression_1", "(", "t", ")", ":", "pass" ]
logical_and_expression : inclusive_or_expression .
train
false
6,153
def forward_property(member): class Descriptor(object, ): def __init__(self, func_or_name): self._property_name = (func_or_name.__name__ if callable(func_or_name) else func_or_name) def __get__(self, obj, cls=None): return getattr(getattr(obj, member), self._property_name) def __set__(self, obj, value): return setattr(getattr(obj, member), self._property_name, value) return Descriptor
[ "def", "forward_property", "(", "member", ")", ":", "class", "Descriptor", "(", "object", ",", ")", ":", "def", "__init__", "(", "self", ",", "func_or_name", ")", ":", "self", ".", "_property_name", "=", "(", "func_or_name", ".", "__name__", "if", "callable", "(", "func_or_name", ")", "else", "func_or_name", ")", "def", "__get__", "(", "self", ",", "obj", ",", "cls", "=", "None", ")", ":", "return", "getattr", "(", "getattr", "(", "obj", ",", "member", ")", ",", "self", ".", "_property_name", ")", "def", "__set__", "(", "self", ",", "obj", ",", "value", ")", ":", "return", "setattr", "(", "getattr", "(", "obj", ",", "member", ")", ",", "self", ".", "_property_name", ",", "value", ")", "return", "Descriptor" ]
property that forwards access to a nested object .
train
false
6,154
@frappe.whitelist() def quick_kanban_board(doctype, board_name, field_name): doc = frappe.new_doc(u'Kanban Board') options = frappe.get_value(u'DocField', dict(parent=doctype, fieldname=field_name), u'options') columns = [] if options: columns = options.split(u'\n') for column in columns: if (not column): continue doc.append(u'columns', dict(column_name=column)) doc.kanban_board_name = board_name doc.reference_doctype = doctype doc.field_name = field_name doc.save() return doc
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "quick_kanban_board", "(", "doctype", ",", "board_name", ",", "field_name", ")", ":", "doc", "=", "frappe", ".", "new_doc", "(", "u'Kanban Board'", ")", "options", "=", "frappe", ".", "get_value", "(", "u'DocField'", ",", "dict", "(", "parent", "=", "doctype", ",", "fieldname", "=", "field_name", ")", ",", "u'options'", ")", "columns", "=", "[", "]", "if", "options", ":", "columns", "=", "options", ".", "split", "(", "u'\\n'", ")", "for", "column", "in", "columns", ":", "if", "(", "not", "column", ")", ":", "continue", "doc", ".", "append", "(", "u'columns'", ",", "dict", "(", "column_name", "=", "column", ")", ")", "doc", ".", "kanban_board_name", "=", "board_name", "doc", ".", "reference_doctype", "=", "doctype", "doc", ".", "field_name", "=", "field_name", "doc", ".", "save", "(", ")", "return", "doc" ]
create new kanbanboard quickly with default options .
train
false
6,155
def feedkeys(keys, mode='n'): if (eval('mode()') == 'n'): if (keys == 'a'): cursor_pos = get_cursor_pos() cursor_pos[2] = (int(cursor_pos[2]) + 1) set_cursor_from_pos(cursor_pos) if (keys in 'ai'): keys = 'startinsert' if (keys == 'startinsert'): command('startinsert') else: command((as_unicode('call feedkeys("%s", "%s")') % (keys, mode)))
[ "def", "feedkeys", "(", "keys", ",", "mode", "=", "'n'", ")", ":", "if", "(", "eval", "(", "'mode()'", ")", "==", "'n'", ")", ":", "if", "(", "keys", "==", "'a'", ")", ":", "cursor_pos", "=", "get_cursor_pos", "(", ")", "cursor_pos", "[", "2", "]", "=", "(", "int", "(", "cursor_pos", "[", "2", "]", ")", "+", "1", ")", "set_cursor_from_pos", "(", "cursor_pos", ")", "if", "(", "keys", "in", "'ai'", ")", ":", "keys", "=", "'startinsert'", "if", "(", "keys", "==", "'startinsert'", ")", ":", "command", "(", "'startinsert'", ")", "else", ":", "command", "(", "(", "as_unicode", "(", "'call feedkeys(\"%s\", \"%s\")'", ")", "%", "(", "keys", ",", "mode", ")", ")", ")" ]
wrapper around vims feedkeys function .
train
false
6,157
def get_masquerading_group_info(user, course_key): course_masquerade = get_course_masquerade(user, course_key) if (not course_masquerade): return (None, None) return (course_masquerade.group_id, course_masquerade.user_partition_id)
[ "def", "get_masquerading_group_info", "(", "user", ",", "course_key", ")", ":", "course_masquerade", "=", "get_course_masquerade", "(", "user", ",", "course_key", ")", "if", "(", "not", "course_masquerade", ")", ":", "return", "(", "None", ",", "None", ")", "return", "(", "course_masquerade", ".", "group_id", ",", "course_masquerade", ".", "user_partition_id", ")" ]
if the user is masquerading as belonging to a group .
train
false
6,158
def write_trunc_fasta(trunc_fasta_seqs, fasta_out_fp, seq_order): fasta_out = open(fasta_out_fp, 'w') for label in seq_order: trunc_label = label.split()[0].strip() fasta_out.write(('>%s\n%s\n' % (label, trunc_fasta_seqs[trunc_label])))
[ "def", "write_trunc_fasta", "(", "trunc_fasta_seqs", ",", "fasta_out_fp", ",", "seq_order", ")", ":", "fasta_out", "=", "open", "(", "fasta_out_fp", ",", "'w'", ")", "for", "label", "in", "seq_order", ":", "trunc_label", "=", "label", ".", "split", "(", ")", "[", "0", "]", ".", "strip", "(", ")", "fasta_out", ".", "write", "(", "(", "'>%s\\n%s\\n'", "%", "(", "label", ",", "trunc_fasta_seqs", "[", "trunc_label", "]", ")", ")", ")" ]
writes truncated fasta seqs in order specified with seq_order trunc_fasta_seqs: dict of fasta label: truncated sequence string fasta_out_fp: output filepath to write to seq_order: list of fasta labels in the order of the original input fasta .
train
false
6,159
def uses_requirement(requirement, field): if (hasattr(field.requires, 'other') or (requirement in str(field.requires))): if hasattr(field.requires, 'other'): if (requirement in str(field.requires.other)): return True elif (requirement in str(field.requires)): return True return False
[ "def", "uses_requirement", "(", "requirement", ",", "field", ")", ":", "if", "(", "hasattr", "(", "field", ".", "requires", ",", "'other'", ")", "or", "(", "requirement", "in", "str", "(", "field", ".", "requires", ")", ")", ")", ":", "if", "hasattr", "(", "field", ".", "requires", ",", "'other'", ")", ":", "if", "(", "requirement", "in", "str", "(", "field", ".", "requires", ".", "other", ")", ")", ":", "return", "True", "elif", "(", "requirement", "in", "str", "(", "field", ".", "requires", ")", ")", ":", "return", "True", "return", "False" ]
check if a given database field uses the specified requirement @todo: deprecate .
train
false
6,161
def isSharpCorner(beginComplex, centerComplex, endComplex): centerBeginComplex = (beginComplex - centerComplex) centerEndComplex = (endComplex - centerComplex) centerBeginLength = abs(centerBeginComplex) centerEndLength = abs(centerEndComplex) if ((centerBeginLength <= 0.0) or (centerEndLength <= 0.0)): return False centerBeginComplex /= centerBeginLength centerEndComplex /= centerEndLength return (euclidean.getDotProduct(centerBeginComplex, centerEndComplex) > 0.9)
[ "def", "isSharpCorner", "(", "beginComplex", ",", "centerComplex", ",", "endComplex", ")", ":", "centerBeginComplex", "=", "(", "beginComplex", "-", "centerComplex", ")", "centerEndComplex", "=", "(", "endComplex", "-", "centerComplex", ")", "centerBeginLength", "=", "abs", "(", "centerBeginComplex", ")", "centerEndLength", "=", "abs", "(", "centerEndComplex", ")", "if", "(", "(", "centerBeginLength", "<=", "0.0", ")", "or", "(", "centerEndLength", "<=", "0.0", ")", ")", ":", "return", "False", "centerBeginComplex", "/=", "centerBeginLength", "centerEndComplex", "/=", "centerEndLength", "return", "(", "euclidean", ".", "getDotProduct", "(", "centerBeginComplex", ",", "centerEndComplex", ")", ">", "0.9", ")" ]
determine if the three complex points form a sharp corner .
train
false
6,162
def run_channel(args, build_last_n_versions=1): pkgs = get_affected_packages(args) for (pkg_name, pkg_tests) in pkgs: repo_data = _fetch_repo_data(args) c = conda_versions(pkg_name, repo_data) c = version_sorted(c)[:build_last_n_versions] if (not args.force_rebuild): time.sleep(1) q = quay_versions(args.namespace, pkg_name) versions = _new_versions(q, c) else: versions = c for tag in versions: target = build_target(pkg_name, tag=tag) targets = [target] mull_targets(targets, test=pkg_tests, **args_to_mull_targets_kwds(args))
[ "def", "run_channel", "(", "args", ",", "build_last_n_versions", "=", "1", ")", ":", "pkgs", "=", "get_affected_packages", "(", "args", ")", "for", "(", "pkg_name", ",", "pkg_tests", ")", "in", "pkgs", ":", "repo_data", "=", "_fetch_repo_data", "(", "args", ")", "c", "=", "conda_versions", "(", "pkg_name", ",", "repo_data", ")", "c", "=", "version_sorted", "(", "c", ")", "[", ":", "build_last_n_versions", "]", "if", "(", "not", "args", ".", "force_rebuild", ")", ":", "time", ".", "sleep", "(", "1", ")", "q", "=", "quay_versions", "(", "args", ".", "namespace", ",", "pkg_name", ")", "versions", "=", "_new_versions", "(", "q", ",", "c", ")", "else", ":", "versions", "=", "c", "for", "tag", "in", "versions", ":", "target", "=", "build_target", "(", "pkg_name", ",", "tag", "=", "tag", ")", "targets", "=", "[", "target", "]", "mull_targets", "(", "targets", ",", "test", "=", "pkg_tests", ",", "**", "args_to_mull_targets_kwds", "(", "args", ")", ")" ]
build list of involucro commands to run .
train
false
6,163
def cleanpath(path): items = path.split('.') if (len(items) > 1): path = re.sub('[^\\w\\.]+', '_', (('_'.join(items[:(-1)]) + '.') + ''.join(items[(-1):]))) else: path = re.sub('[^\\w\\.]+', '_', ''.join(items[(-1):])) return path
[ "def", "cleanpath", "(", "path", ")", ":", "items", "=", "path", ".", "split", "(", "'.'", ")", "if", "(", "len", "(", "items", ")", ">", "1", ")", ":", "path", "=", "re", ".", "sub", "(", "'[^\\\\w\\\\.]+'", ",", "'_'", ",", "(", "(", "'_'", ".", "join", "(", "items", "[", ":", "(", "-", "1", ")", "]", ")", "+", "'.'", ")", "+", "''", ".", "join", "(", "items", "[", "(", "-", "1", ")", ":", "]", ")", ")", ")", "else", ":", "path", "=", "re", ".", "sub", "(", "'[^\\\\w\\\\.]+'", ",", "'_'", ",", "''", ".", "join", "(", "items", "[", "(", "-", "1", ")", ":", "]", ")", ")", "return", "path" ]
turns any expression/path into a valid filename .
train
false
6,164
def urlparse(url, scheme='', allow_fragments=True): (url, scheme, _coerce_result) = _coerce_args(url, scheme) splitresult = urlsplit(url, scheme, allow_fragments) (scheme, netloc, url, query, fragment) = splitresult if ((scheme in uses_params) and (';' in url)): (url, params) = _splitparams(url) else: params = '' result = ParseResult(scheme, netloc, url, params, query, fragment) return _coerce_result(result)
[ "def", "urlparse", "(", "url", ",", "scheme", "=", "''", ",", "allow_fragments", "=", "True", ")", ":", "(", "url", ",", "scheme", ",", "_coerce_result", ")", "=", "_coerce_args", "(", "url", ",", "scheme", ")", "splitresult", "=", "urlsplit", "(", "url", ",", "scheme", ",", "allow_fragments", ")", "(", "scheme", ",", "netloc", ",", "url", ",", "query", ",", "fragment", ")", "=", "splitresult", "if", "(", "(", "scheme", "in", "uses_params", ")", "and", "(", "';'", "in", "url", ")", ")", ":", "(", "url", ",", "params", ")", "=", "_splitparams", "(", "url", ")", "else", ":", "params", "=", "''", "result", "=", "ParseResult", "(", "scheme", ",", "netloc", ",", "url", ",", "params", ",", "query", ",", "fragment", ")", "return", "_coerce_result", "(", "result", ")" ]
parse an url into six components .
train
true
6,165
def askstring(title, prompt, **kw): d = _QueryString(title, prompt, **kw) return d.result
[ "def", "askstring", "(", "title", ",", "prompt", ",", "**", "kw", ")", ":", "d", "=", "_QueryString", "(", "title", ",", "prompt", ",", "**", "kw", ")", "return", "d", ".", "result" ]
get a string from the user arguments: title -- the dialog title prompt -- the label text **kw -- see simpledialog class return value is a string .
train
false
6,166
def test_one_of(): assert (hug.types.one_of(('bacon', 'sausage', 'pancakes'))('bacon') == 'bacon') assert (hug.types.one_of(['bacon', 'sausage', 'pancakes'])('sausage') == 'sausage') assert (hug.types.one_of({'bacon', 'sausage', 'pancakes'})('pancakes') == 'pancakes') assert ('bacon' in hug.types.one_of({'bacon', 'sausage', 'pancakes'}).__doc__) with pytest.raises(KeyError): hug.types.one_of({'bacon', 'sausage', 'pancakes'})('syrup')
[ "def", "test_one_of", "(", ")", ":", "assert", "(", "hug", ".", "types", ".", "one_of", "(", "(", "'bacon'", ",", "'sausage'", ",", "'pancakes'", ")", ")", "(", "'bacon'", ")", "==", "'bacon'", ")", "assert", "(", "hug", ".", "types", ".", "one_of", "(", "[", "'bacon'", ",", "'sausage'", ",", "'pancakes'", "]", ")", "(", "'sausage'", ")", "==", "'sausage'", ")", "assert", "(", "hug", ".", "types", ".", "one_of", "(", "{", "'bacon'", ",", "'sausage'", ",", "'pancakes'", "}", ")", "(", "'pancakes'", ")", "==", "'pancakes'", ")", "assert", "(", "'bacon'", "in", "hug", ".", "types", ".", "one_of", "(", "{", "'bacon'", ",", "'sausage'", ",", "'pancakes'", "}", ")", ".", "__doc__", ")", "with", "pytest", ".", "raises", "(", "KeyError", ")", ":", "hug", ".", "types", ".", "one_of", "(", "{", "'bacon'", ",", "'sausage'", ",", "'pancakes'", "}", ")", "(", "'syrup'", ")" ]
tests that hug allows limiting a value to one of a list of values .
train
false
6,167
@register.filter @jinja2.contextfilter def flag(context, addon): status = statusflags(context, addon) msg = {'unreviewed': _('Not Reviewed'), 'featuredaddon': _('Featured')} if status: return jinja2.Markup((u'<h5 class="flag">%s</h5>' % msg[status])) else: return ''
[ "@", "register", ".", "filter", "@", "jinja2", ".", "contextfilter", "def", "flag", "(", "context", ",", "addon", ")", ":", "status", "=", "statusflags", "(", "context", ",", "addon", ")", "msg", "=", "{", "'unreviewed'", ":", "_", "(", "'Not Reviewed'", ")", ",", "'featuredaddon'", ":", "_", "(", "'Featured'", ")", "}", "if", "status", ":", "return", "jinja2", ".", "Markup", "(", "(", "u'<h5 class=\"flag\">%s</h5>'", "%", "msg", "[", "status", "]", ")", ")", "else", ":", "return", "''" ]
does the value look like an on/off flag? .
train
false
6,168
def get_local_version(pear_output): lines = pear_output.split('\n') for line in lines: if ('Installed ' in line): installed = line.rsplit(None, 1)[(-1)].strip() if (installed == '-'): continue return installed return None
[ "def", "get_local_version", "(", "pear_output", ")", ":", "lines", "=", "pear_output", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "(", "'Installed '", "in", "line", ")", ":", "installed", "=", "line", ".", "rsplit", "(", "None", ",", "1", ")", "[", "(", "-", "1", ")", "]", ".", "strip", "(", ")", "if", "(", "installed", "==", "'-'", ")", ":", "continue", "return", "installed", "return", "None" ]
take pear remoteinfo output and get the installed version .
train
false
6,169
def user_getmedia(userids=None, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'usermedia.get' if userids: params = {'userids': userids} else: params = {} params = _params_extend(params, **connection_args) ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result'] else: raise KeyError except KeyError: return False
[ "def", "user_getmedia", "(", "userids", "=", "None", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'usermedia.get'", "if", "userids", ":", "params", "=", "{", "'userids'", ":", "userids", "}", "else", ":", "params", "=", "{", "}", "params", "=", "_params_extend", "(", "params", ",", "**", "connection_args", ")", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "False" ]
retrieve media according to the given parameters note: this function accepts all standard usermedia .
train
false
6,170
def _get_expiration_seconds(expiration): if isinstance(expiration, datetime.timedelta): now = _NOW().replace(tzinfo=UTC) expiration = (now + expiration) if isinstance(expiration, datetime.datetime): micros = _microseconds_from_datetime(expiration) expiration = (micros // (10 ** 6)) if (not isinstance(expiration, six.integer_types)): raise TypeError(('Expected an integer timestamp, datetime, or timedelta. Got %s' % type(expiration))) return expiration
[ "def", "_get_expiration_seconds", "(", "expiration", ")", ":", "if", "isinstance", "(", "expiration", ",", "datetime", ".", "timedelta", ")", ":", "now", "=", "_NOW", "(", ")", ".", "replace", "(", "tzinfo", "=", "UTC", ")", "expiration", "=", "(", "now", "+", "expiration", ")", "if", "isinstance", "(", "expiration", ",", "datetime", ".", "datetime", ")", ":", "micros", "=", "_microseconds_from_datetime", "(", "expiration", ")", "expiration", "=", "(", "micros", "//", "(", "10", "**", "6", ")", ")", "if", "(", "not", "isinstance", "(", "expiration", ",", "six", ".", "integer_types", ")", ")", ":", "raise", "TypeError", "(", "(", "'Expected an integer timestamp, datetime, or timedelta. Got %s'", "%", "type", "(", "expiration", ")", ")", ")", "return", "expiration" ]
convert expiration to a number of seconds in the future .
train
false
6,171
def test_strict_castform(session, media_root): with pytest.raises(ValueError): castform = session.query(tables.PokemonSpecies).filter_by(identifier=u'castform').first() rainy_castform = [f for f in castform.forms if (f.form_identifier == 'rainy')][0] rainy_castform = media.PokemonFormMedia(media_root, rainy_castform) rainy_castform.overworld('up', strict=True)
[ "def", "test_strict_castform", "(", "session", ",", "media_root", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "castform", "=", "session", ".", "query", "(", "tables", ".", "PokemonSpecies", ")", ".", "filter_by", "(", "identifier", "=", "u'castform'", ")", ".", "first", "(", ")", "rainy_castform", "=", "[", "f", "for", "f", "in", "castform", ".", "forms", "if", "(", "f", ".", "form_identifier", "==", "'rainy'", ")", "]", "[", "0", "]", "rainy_castform", "=", "media", ".", "PokemonFormMedia", "(", "media_root", ",", "rainy_castform", ")", "rainy_castform", ".", "overworld", "(", "'up'", ",", "strict", "=", "True", ")" ]
castform rainy form overworld with strict -- unavailable .
train
false
6,172
def displayhtml(public_key, use_ssl=False, error=None): error_param = '' if error: error_param = ('&error=%s' % error) if use_ssl: server = API_SSL_SERVER else: server = API_SERVER return ('<script type="text/javascript" src="%(ApiServer)s/challenge?k=%(PublicKey)s%(ErrorParam)s"></script>\n\n<noscript>\n <iframe src="%(ApiServer)s/noscript?k=%(PublicKey)s%(ErrorParam)s" height="300" width="500" frameborder="0"></iframe><br />\n <textarea name="recaptcha_challenge_field" rows="3" cols="40"></textarea>\n <input type=\'hidden\' name=\'recaptcha_response_field\' value=\'manual_challenge\' />\n</noscript>\n' % {'ApiServer': server, 'PublicKey': public_key, 'ErrorParam': error_param})
[ "def", "displayhtml", "(", "public_key", ",", "use_ssl", "=", "False", ",", "error", "=", "None", ")", ":", "error_param", "=", "''", "if", "error", ":", "error_param", "=", "(", "'&error=%s'", "%", "error", ")", "if", "use_ssl", ":", "server", "=", "API_SSL_SERVER", "else", ":", "server", "=", "API_SERVER", "return", "(", "'<script type=\"text/javascript\" src=\"%(ApiServer)s/challenge?k=%(PublicKey)s%(ErrorParam)s\"></script>\\n\\n<noscript>\\n <iframe src=\"%(ApiServer)s/noscript?k=%(PublicKey)s%(ErrorParam)s\" height=\"300\" width=\"500\" frameborder=\"0\"></iframe><br />\\n <textarea name=\"recaptcha_challenge_field\" rows=\"3\" cols=\"40\"></textarea>\\n <input type=\\'hidden\\' name=\\'recaptcha_response_field\\' value=\\'manual_challenge\\' />\\n</noscript>\\n'", "%", "{", "'ApiServer'", ":", "server", ",", "'PublicKey'", ":", "public_key", ",", "'ErrorParam'", ":", "error_param", "}", ")" ]
gets the html to display for recaptcha public_key -- the public api key use_ssl -- should the request be sent over ssl? error -- an error message to display .
train
false
6,173
def validate_adapter_class(validate_class, adapter_class): from .adapters import Adapter if isinstance(validate_class, dict): origional_data = validate_class.copy() validate_class = validate_class.get('import_path') if (not validate_class): raise Adapter.InvalidAdapterTypeException('The dictionary {} must contain a value for "import_path"'.format(str(origional_data))) if (not issubclass(import_module(validate_class), adapter_class)): raise Adapter.InvalidAdapterTypeException('{} must be a subclass of {}'.format(validate_class, adapter_class.__name__))
[ "def", "validate_adapter_class", "(", "validate_class", ",", "adapter_class", ")", ":", "from", ".", "adapters", "import", "Adapter", "if", "isinstance", "(", "validate_class", ",", "dict", ")", ":", "origional_data", "=", "validate_class", ".", "copy", "(", ")", "validate_class", "=", "validate_class", ".", "get", "(", "'import_path'", ")", "if", "(", "not", "validate_class", ")", ":", "raise", "Adapter", ".", "InvalidAdapterTypeException", "(", "'The dictionary {} must contain a value for \"import_path\"'", ".", "format", "(", "str", "(", "origional_data", ")", ")", ")", "if", "(", "not", "issubclass", "(", "import_module", "(", "validate_class", ")", ",", "adapter_class", ")", ")", ":", "raise", "Adapter", ".", "InvalidAdapterTypeException", "(", "'{} must be a subclass of {}'", ".", "format", "(", "validate_class", ",", "adapter_class", ".", "__name__", ")", ")" ]
raises an exception if validate_class is not a subclass of adapter_class .
train
true
6,174
def test_to_fits_1(): fits_name = get_pkg_data_filename(u'data/dist.fits') w = wcs.WCS(fits_name) wfits = w.to_fits() assert isinstance(wfits, fits.HDUList) assert isinstance(wfits[0], fits.PrimaryHDU) assert isinstance(wfits[1], fits.ImageHDU)
[ "def", "test_to_fits_1", "(", ")", ":", "fits_name", "=", "get_pkg_data_filename", "(", "u'data/dist.fits'", ")", "w", "=", "wcs", ".", "WCS", "(", "fits_name", ")", "wfits", "=", "w", ".", "to_fits", "(", ")", "assert", "isinstance", "(", "wfits", ",", "fits", ".", "HDUList", ")", "assert", "isinstance", "(", "wfits", "[", "0", "]", ",", "fits", ".", "PrimaryHDU", ")", "assert", "isinstance", "(", "wfits", "[", "1", "]", ",", "fits", ".", "ImageHDU", ")" ]
test to_fits() with lookuptable distortion .
train
false
6,177
def processElementNodeByFunctionPair(elementNode, geometryFunction, pathFunction): elementAttributesCopy = elementNode.attributes.copy() targets = evaluate.getElementNodesByKey(elementNode, 'target') for target in targets: targetAttributesCopy = target.attributes.copy() target.attributes = elementAttributesCopy processTargetByFunctionPair(geometryFunction, pathFunction, target) target.attributes = targetAttributesCopy
[ "def", "processElementNodeByFunctionPair", "(", "elementNode", ",", "geometryFunction", ",", "pathFunction", ")", ":", "elementAttributesCopy", "=", "elementNode", ".", "attributes", ".", "copy", "(", ")", "targets", "=", "evaluate", ".", "getElementNodesByKey", "(", "elementNode", ",", "'target'", ")", "for", "target", "in", "targets", ":", "targetAttributesCopy", "=", "target", ".", "attributes", ".", "copy", "(", ")", "target", ".", "attributes", "=", "elementAttributesCopy", "processTargetByFunctionPair", "(", "geometryFunction", ",", "pathFunction", ",", "target", ")", "target", ".", "attributes", "=", "targetAttributesCopy" ]
process the xml element by the appropriate manipulationfunction .
train
false
6,178
def tokenize_regex(input): p = re.compile(u'^(\n \\(\\?P\\<[a-zA-Z0-9_-]+\\> | # Start of named group.\n \\(\\?#[^)]*\\) | # Comment\n \\(\\?= | # Start of lookahead assertion\n \\(\\?! | # Start of negative lookahead assertion\n \\(\\?<= | # If preceded by.\n \\(\\?< | # If not preceded by.\n \\(?: | # Start of group. (non capturing.)\n \\( | # Start of group.\n \\(?[iLmsux] | # Flags.\n \\(?P=[a-zA-Z]+\\) | # Back reference to named group\n \\) | # End of group.\n \\{[^{}]*\\} | # Repetition\n \\*\\? | \\+\\? | \\?\\?\\ | # Non greedy repetition.\n \\* | \\+ | \\? | # Repetition\n \\#.*\\n | # Comment\n \\\\. |\n\n # Character group.\n \\[\n ( [^\\]\\\\] | \\\\.)*\n \\] |\n\n [^(){}] |\n .\n )', re.VERBOSE) tokens = [] while input: m = p.match(input) if m: (token, input) = (input[:m.end()], input[m.end():]) if (not token.isspace()): tokens.append(token) else: raise Exception(u'Could not tokenize input regex.') return tokens
[ "def", "tokenize_regex", "(", "input", ")", ":", "p", "=", "re", ".", "compile", "(", "u'^(\\n \\\\(\\\\?P\\\\<[a-zA-Z0-9_-]+\\\\> | # Start of named group.\\n \\\\(\\\\?#[^)]*\\\\) | # Comment\\n \\\\(\\\\?= | # Start of lookahead assertion\\n \\\\(\\\\?! | # Start of negative lookahead assertion\\n \\\\(\\\\?<= | # If preceded by.\\n \\\\(\\\\?< | # If not preceded by.\\n \\\\(?: | # Start of group. (non capturing.)\\n \\\\( | # Start of group.\\n \\\\(?[iLmsux] | # Flags.\\n \\\\(?P=[a-zA-Z]+\\\\) | # Back reference to named group\\n \\\\) | # End of group.\\n \\\\{[^{}]*\\\\} | # Repetition\\n \\\\*\\\\? | \\\\+\\\\? | \\\\?\\\\?\\\\ | # Non greedy repetition.\\n \\\\* | \\\\+ | \\\\? | # Repetition\\n \\\\#.*\\\\n | # Comment\\n \\\\\\\\. |\\n\\n # Character group.\\n \\\\[\\n ( [^\\\\]\\\\\\\\] | \\\\\\\\.)*\\n \\\\] |\\n\\n [^(){}] |\\n .\\n )'", ",", "re", ".", "VERBOSE", ")", "tokens", "=", "[", "]", "while", "input", ":", "m", "=", "p", ".", "match", "(", "input", ")", "if", "m", ":", "(", "token", ",", "input", ")", "=", "(", "input", "[", ":", "m", ".", "end", "(", ")", "]", ",", "input", "[", "m", ".", "end", "(", ")", ":", "]", ")", "if", "(", "not", "token", ".", "isspace", "(", ")", ")", ":", "tokens", ".", "append", "(", "token", ")", "else", ":", "raise", "Exception", "(", "u'Could not tokenize input regex.'", ")", "return", "tokens" ]
takes a string .
train
true
6,180
def _get_branch(repo, name): try: return [x for x in _all_branches(repo) if (x[0] == name)][0] except IndexError: return False
[ "def", "_get_branch", "(", "repo", ",", "name", ")", ":", "try", ":", "return", "[", "x", "for", "x", "in", "_all_branches", "(", "repo", ")", "if", "(", "x", "[", "0", "]", "==", "name", ")", "]", "[", "0", "]", "except", "IndexError", ":", "return", "False" ]
find the requested branch in the specified repo .
train
true
6,182
def path_to_uri(path): if isinstance(path, compat.text_type): path = path.encode(u'utf-8') path = urllib.parse.quote(path) return urllib.parse.urlunsplit(('file', '', path, '', ''))
[ "def", "path_to_uri", "(", "path", ")", ":", "if", "isinstance", "(", "path", ",", "compat", ".", "text_type", ")", ":", "path", "=", "path", ".", "encode", "(", "u'utf-8'", ")", "path", "=", "urllib", ".", "parse", ".", "quote", "(", "path", ")", "return", "urllib", ".", "parse", ".", "urlunsplit", "(", "(", "'file'", ",", "''", ",", "path", ",", "''", ",", "''", ")", ")" ]
convert file path to uri .
train
false
6,183
def setWarningMethod(newMethod): global warn warn = newMethod
[ "def", "setWarningMethod", "(", "newMethod", ")", ":", "global", "warn", "warn", "=", "newMethod" ]
set the warning method to use to record deprecation warnings .
train
false
6,184
def served_by_django(url): r = requests.get(url, allow_redirects=False) status = (r.status_code == 200) django = (('x-served' not in r.headers) or (r.headers['x-served'] == 'nginx-via-django')) return all([status, django])
[ "def", "served_by_django", "(", "url", ")", ":", "r", "=", "requests", ".", "get", "(", "url", ",", "allow_redirects", "=", "False", ")", "status", "=", "(", "r", ".", "status_code", "==", "200", ")", "django", "=", "(", "(", "'x-served'", "not", "in", "r", ".", "headers", ")", "or", "(", "r", ".", "headers", "[", "'x-served'", "]", "==", "'nginx-via-django'", ")", ")", "return", "all", "(", "[", "status", ",", "django", "]", ")" ]
return true if url returns 200 and is served by django .
train
false
6,186
def _factor_indexer(shape, labels): mult = np.array(shape)[::(-1)].cumprod()[::(-1)] return _ensure_platform_int(np.sum((np.array(labels).T * np.append(mult, [1])), axis=1).T)
[ "def", "_factor_indexer", "(", "shape", ",", "labels", ")", ":", "mult", "=", "np", ".", "array", "(", "shape", ")", "[", ":", ":", "(", "-", "1", ")", "]", ".", "cumprod", "(", ")", "[", ":", ":", "(", "-", "1", ")", "]", "return", "_ensure_platform_int", "(", "np", ".", "sum", "(", "(", "np", ".", "array", "(", "labels", ")", ".", "T", "*", "np", ".", "append", "(", "mult", ",", "[", "1", "]", ")", ")", ",", "axis", "=", "1", ")", ".", "T", ")" ]
given a tuple of shape and a list of categorical labels .
train
false
6,187
def _finger(eq): f = eq.free_symbols d = dict(list(zip(f, [([0] * 5) for fi in f]))) for a in eq.args: if a.is_Symbol: d[a][0] += 1 elif a.is_Not: d[a.args[0]][1] += 1 else: o = (len(a.args) + sum(((ai.func is Not) for ai in a.args))) for ai in a.args: if ai.is_Symbol: d[ai][2] += 1 d[ai][(-1)] += o else: d[ai.args[0]][3] += 1 d[ai.args[0]][(-1)] += o inv = defaultdict(list) for (k, v) in ordered(iter(d.items())): inv[tuple(v)].append(k) return inv
[ "def", "_finger", "(", "eq", ")", ":", "f", "=", "eq", ".", "free_symbols", "d", "=", "dict", "(", "list", "(", "zip", "(", "f", ",", "[", "(", "[", "0", "]", "*", "5", ")", "for", "fi", "in", "f", "]", ")", ")", ")", "for", "a", "in", "eq", ".", "args", ":", "if", "a", ".", "is_Symbol", ":", "d", "[", "a", "]", "[", "0", "]", "+=", "1", "elif", "a", ".", "is_Not", ":", "d", "[", "a", ".", "args", "[", "0", "]", "]", "[", "1", "]", "+=", "1", "else", ":", "o", "=", "(", "len", "(", "a", ".", "args", ")", "+", "sum", "(", "(", "(", "ai", ".", "func", "is", "Not", ")", "for", "ai", "in", "a", ".", "args", ")", ")", ")", "for", "ai", "in", "a", ".", "args", ":", "if", "ai", ".", "is_Symbol", ":", "d", "[", "ai", "]", "[", "2", "]", "+=", "1", "d", "[", "ai", "]", "[", "(", "-", "1", ")", "]", "+=", "o", "else", ":", "d", "[", "ai", ".", "args", "[", "0", "]", "]", "[", "3", "]", "+=", "1", "d", "[", "ai", ".", "args", "[", "0", "]", "]", "[", "(", "-", "1", ")", "]", "+=", "o", "inv", "=", "defaultdict", "(", "list", ")", "for", "(", "k", ",", "v", ")", "in", "ordered", "(", "iter", "(", "d", ".", "items", "(", ")", ")", ")", ":", "inv", "[", "tuple", "(", "v", ")", "]", ".", "append", "(", "k", ")", "return", "inv" ]
assign a 5-item fingerprint to each symbol in the equation: # of times it appeared as a symbol .
train
false
6,188
def prefix_filter_flowgrams(flowgrams, squeeze=False): if squeeze: seqs = imap((lambda f: (f.Name, squeeze_seq(str(f.toSeq(truncate=True))))), flowgrams) else: seqs = imap((lambda f: (f.Name, str(f.toSeq(truncate=True)))), flowgrams) mapping = build_prefix_map(seqs) l = len(mapping) orig_l = (sum([len(a) for a in mapping.values()]) + l) return (l, orig_l, mapping)
[ "def", "prefix_filter_flowgrams", "(", "flowgrams", ",", "squeeze", "=", "False", ")", ":", "if", "squeeze", ":", "seqs", "=", "imap", "(", "(", "lambda", "f", ":", "(", "f", ".", "Name", ",", "squeeze_seq", "(", "str", "(", "f", ".", "toSeq", "(", "truncate", "=", "True", ")", ")", ")", ")", ")", ",", "flowgrams", ")", "else", ":", "seqs", "=", "imap", "(", "(", "lambda", "f", ":", "(", "f", ".", "Name", ",", "str", "(", "f", ".", "toSeq", "(", "truncate", "=", "True", ")", ")", ")", ")", ",", "flowgrams", ")", "mapping", "=", "build_prefix_map", "(", "seqs", ")", "l", "=", "len", "(", "mapping", ")", "orig_l", "=", "(", "sum", "(", "[", "len", "(", "a", ")", "for", "a", "in", "mapping", ".", "values", "(", ")", "]", ")", "+", "l", ")", "return", "(", "l", ",", "orig_l", ",", "mapping", ")" ]
filters flowgrams by common prefixes .
train
false
6,189
def _is_visible(idx_row, idx_col, lengths): return ((idx_col, idx_row) in lengths)
[ "def", "_is_visible", "(", "idx_row", ",", "idx_col", ",", "lengths", ")", ":", "return", "(", "(", "idx_col", ",", "idx_row", ")", "in", "lengths", ")" ]
index -> {: bool}) .
train
false
6,190
def assert_raises_regex(exception_class, expected_regexp, callable_obj=None, *args, **kwargs): __tracebackhide__ = True nose = import_nose() if (sys.version_info.major >= 3): funcname = nose.tools.assert_raises_regex else: funcname = nose.tools.assert_raises_regexp return funcname(exception_class, expected_regexp, callable_obj, *args, **kwargs)
[ "def", "assert_raises_regex", "(", "exception_class", ",", "expected_regexp", ",", "callable_obj", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "__tracebackhide__", "=", "True", "nose", "=", "import_nose", "(", ")", "if", "(", "sys", ".", "version_info", ".", "major", ">=", "3", ")", ":", "funcname", "=", "nose", ".", "tools", ".", "assert_raises_regex", "else", ":", "funcname", "=", "nose", ".", "tools", ".", "assert_raises_regexp", "return", "funcname", "(", "exception_class", ",", "expected_regexp", ",", "callable_obj", ",", "*", "args", ",", "**", "kwargs", ")" ]
assert that some exception is raised in a context and that the message matches some pattern .
train
false
6,191
def defined_names(source, path=None, encoding='utf-8'): warnings.warn('Use call_signatures instead.', DeprecationWarning) return names(source, path, encoding)
[ "def", "defined_names", "(", "source", ",", "path", "=", "None", ",", "encoding", "=", "'utf-8'", ")", ":", "warnings", ".", "warn", "(", "'Use call_signatures instead.'", ",", "DeprecationWarning", ")", "return", "names", "(", "source", ",", "path", ",", "encoding", ")" ]
get all definitions in source sorted by its position .
train
false
6,192
def addPillarFromConvexLoopsGridTop(faces, indexedGridTop, indexedLoops): addFacesByLoopReversed(faces, indexedLoops[0]) addFacesByConvexLoops(faces, indexedLoops) addFacesByGrid(faces, indexedGridTop)
[ "def", "addPillarFromConvexLoopsGridTop", "(", "faces", ",", "indexedGridTop", ",", "indexedLoops", ")", ":", "addFacesByLoopReversed", "(", "faces", ",", "indexedLoops", "[", "0", "]", ")", "addFacesByConvexLoops", "(", "faces", ",", "indexedLoops", ")", "addFacesByGrid", "(", "faces", ",", "indexedGridTop", ")" ]
add pillar from convex loops and grid top .
train
false
6,193
def check_data_writable(): create_and_check_dir(appsettings.DATA_DIR) create_and_check_dir(data_dir('home')) create_and_check_dir(data_dir('whoosh')) create_and_check_dir(data_dir('ssh')) create_and_check_dir(data_dir('vcs'))
[ "def", "check_data_writable", "(", ")", ":", "create_and_check_dir", "(", "appsettings", ".", "DATA_DIR", ")", "create_and_check_dir", "(", "data_dir", "(", "'home'", ")", ")", "create_and_check_dir", "(", "data_dir", "(", "'whoosh'", ")", ")", "create_and_check_dir", "(", "data_dir", "(", "'ssh'", ")", ")", "create_and_check_dir", "(", "data_dir", "(", "'vcs'", ")", ")" ]
check we can write to data dir .
train
false
6,194
def sum_outer_product_balanced(x, n_groups): xrs = x.reshape((-1), n_groups, order='F') return np.dot(xrs, xrs.T)
[ "def", "sum_outer_product_balanced", "(", "x", ",", "n_groups", ")", ":", "xrs", "=", "x", ".", "reshape", "(", "(", "-", "1", ")", ",", "n_groups", ",", "order", "=", "'F'", ")", "return", "np", ".", "dot", "(", "xrs", ",", "xrs", ".", "T", ")" ]
sum outerproduct dot over individuals where x_i is .
train
false
6,196
@require_context @require_volume_exists def volume_glance_metadata_bulk_create(context, volume_id, metadata): session = get_session() with session.begin(): for (key, value) in metadata.items(): rows = session.query(models.VolumeGlanceMetadata).filter_by(volume_id=volume_id).filter_by(key=key).filter_by(deleted=False).all() if (len(rows) > 0): raise exception.GlanceMetadataExists(key=key, volume_id=volume_id) vol_glance_metadata = models.VolumeGlanceMetadata() vol_glance_metadata.volume_id = volume_id vol_glance_metadata.key = key vol_glance_metadata.value = six.text_type(value) session.add(vol_glance_metadata)
[ "@", "require_context", "@", "require_volume_exists", "def", "volume_glance_metadata_bulk_create", "(", "context", ",", "volume_id", ",", "metadata", ")", ":", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "for", "(", "key", ",", "value", ")", "in", "metadata", ".", "items", "(", ")", ":", "rows", "=", "session", ".", "query", "(", "models", ".", "VolumeGlanceMetadata", ")", ".", "filter_by", "(", "volume_id", "=", "volume_id", ")", ".", "filter_by", "(", "key", "=", "key", ")", ".", "filter_by", "(", "deleted", "=", "False", ")", ".", "all", "(", ")", "if", "(", "len", "(", "rows", ")", ">", "0", ")", ":", "raise", "exception", ".", "GlanceMetadataExists", "(", "key", "=", "key", ",", "volume_id", "=", "volume_id", ")", "vol_glance_metadata", "=", "models", ".", "VolumeGlanceMetadata", "(", ")", "vol_glance_metadata", ".", "volume_id", "=", "volume_id", "vol_glance_metadata", ".", "key", "=", "key", "vol_glance_metadata", ".", "value", "=", "six", ".", "text_type", "(", "value", ")", "session", ".", "add", "(", "vol_glance_metadata", ")" ]
add glance metadata for specified volume .
train
false
6,197
def try_send_email_with_form(func, form, field_name, *args, **kwargs): try: func(*args, **kwargs) except SMTPException as e: log.warning((u'Failed to send email: %s' % e)) if ('email' not in form.errors): form.errors[field_name] = [] form.errors[field_name].append(unicode(ERROR_SEND_EMAIL)) return form
[ "def", "try_send_email_with_form", "(", "func", ",", "form", ",", "field_name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "func", "(", "*", "args", ",", "**", "kwargs", ")", "except", "SMTPException", "as", "e", ":", "log", ".", "warning", "(", "(", "u'Failed to send email: %s'", "%", "e", ")", ")", "if", "(", "'email'", "not", "in", "form", ".", "errors", ")", ":", "form", ".", "errors", "[", "field_name", "]", "=", "[", "]", "form", ".", "errors", "[", "field_name", "]", ".", "append", "(", "unicode", "(", "ERROR_SEND_EMAIL", ")", ")", "return", "form" ]
send an email by calling func .
train
false
6,198
def test_delete_by_idx(test_idx): Test.objects.get(pk=test_idx).delete() return (test_get_by_idx(test_idx) is None)
[ "def", "test_delete_by_idx", "(", "test_idx", ")", ":", "Test", ".", "objects", ".", "get", "(", "pk", "=", "test_idx", ")", ".", "delete", "(", ")", "return", "(", "test_get_by_idx", "(", "test_idx", ")", "is", "None", ")" ]
delete test based on its idx .
train
false
6,200
def delayed_import(): global _ServerSession, _PlayerDB, _ServerConfig, _ScriptDB if (not _ServerSession): (modulename, classname) = settings.SERVER_SESSION_CLASS.rsplit('.', 1) _ServerSession = variable_from_module(modulename, classname) if (not _PlayerDB): from evennia.players.models import PlayerDB as _PlayerDB if (not _ServerConfig): from evennia.server.models import ServerConfig as _ServerConfig if (not _ScriptDB): from evennia.scripts.models import ScriptDB as _ScriptDB (_ServerSession, _PlayerDB, _ServerConfig, _ScriptDB)
[ "def", "delayed_import", "(", ")", ":", "global", "_ServerSession", ",", "_PlayerDB", ",", "_ServerConfig", ",", "_ScriptDB", "if", "(", "not", "_ServerSession", ")", ":", "(", "modulename", ",", "classname", ")", "=", "settings", ".", "SERVER_SESSION_CLASS", ".", "rsplit", "(", "'.'", ",", "1", ")", "_ServerSession", "=", "variable_from_module", "(", "modulename", ",", "classname", ")", "if", "(", "not", "_PlayerDB", ")", ":", "from", "evennia", ".", "players", ".", "models", "import", "PlayerDB", "as", "_PlayerDB", "if", "(", "not", "_ServerConfig", ")", ":", "from", "evennia", ".", "server", ".", "models", "import", "ServerConfig", "as", "_ServerConfig", "if", "(", "not", "_ScriptDB", ")", ":", "from", "evennia", ".", "scripts", ".", "models", "import", "ScriptDB", "as", "_ScriptDB", "(", "_ServerSession", ",", "_PlayerDB", ",", "_ServerConfig", ",", "_ScriptDB", ")" ]
helper method for delayed import of all needed entities .
train
false
6,201
def error(status, message): headers = {'Content-Type': 'text/plain'} current.log.error(message) raise HTTP(status, body=message, web2py_error=message, **headers)
[ "def", "error", "(", "status", ",", "message", ")", ":", "headers", "=", "{", "'Content-Type'", ":", "'text/plain'", "}", "current", ".", "log", ".", "error", "(", "message", ")", "raise", "HTTP", "(", "status", ",", "body", "=", "message", ",", "web2py_error", "=", "message", ",", "**", "headers", ")" ]
decorator to mark a validator method for a bokeh error condition args: code_or_name : a code from bokeh .
train
false
6,202
def _parse_rmw_row_response(row_response): result = {} for column_family in row_response.row.families: (column_family_id, curr_family) = _parse_family_pb(column_family) result[column_family_id] = curr_family return result
[ "def", "_parse_rmw_row_response", "(", "row_response", ")", ":", "result", "=", "{", "}", "for", "column_family", "in", "row_response", ".", "row", ".", "families", ":", "(", "column_family_id", ",", "curr_family", ")", "=", "_parse_family_pb", "(", "column_family", ")", "result", "[", "column_family_id", "]", "=", "curr_family", "return", "result" ]
parses the response to a readmodifywriterow request .
train
true
6,203
def _IP(ip): ip_class = netaddr.ip.IPAddress if (isinstance(ip, ip_class) or (ip == '')): return ip else: return ip_class(ip)
[ "def", "_IP", "(", "ip", ")", ":", "ip_class", "=", "netaddr", ".", "ip", ".", "IPAddress", "if", "(", "isinstance", "(", "ip", ",", "ip_class", ")", "or", "(", "ip", "==", "''", ")", ")", ":", "return", "ip", "else", ":", "return", "ip_class", "(", "ip", ")" ]
returns a netaddr .
train
false
6,204
def target_option(s): return s
[ "def", "target_option", "(", "s", ")", ":", "return", "s" ]
same type as str .
train
false
6,205
def update_default_args(defaults, updates): arg_name = re.compile('^-?-?\\s*=?([^\\s=]+)') defined_defaults_ = map((lambda arg: (arg_name.search(arg).group(1), arg)), defaults) defined_defaults = OrderedDict() for elm in defined_defaults_: defined_defaults[elm[0]] = elm[1] defined_updates_ = map((lambda arg: (arg_name.search(arg).group(1), arg)), updates) defined_updates = OrderedDict() for elm in defined_updates_: defined_updates[elm[0]] = elm[1] defined_defaults.update(defined_updates) return defined_defaults.values()
[ "def", "update_default_args", "(", "defaults", ",", "updates", ")", ":", "arg_name", "=", "re", ".", "compile", "(", "'^-?-?\\\\s*=?([^\\\\s=]+)'", ")", "defined_defaults_", "=", "map", "(", "(", "lambda", "arg", ":", "(", "arg_name", ".", "search", "(", "arg", ")", ".", "group", "(", "1", ")", ",", "arg", ")", ")", ",", "defaults", ")", "defined_defaults", "=", "OrderedDict", "(", ")", "for", "elm", "in", "defined_defaults_", ":", "defined_defaults", "[", "elm", "[", "0", "]", "]", "=", "elm", "[", "1", "]", "defined_updates_", "=", "map", "(", "(", "lambda", "arg", ":", "(", "arg_name", ".", "search", "(", "arg", ")", ".", "group", "(", "1", ")", ",", "arg", ")", ")", ",", "updates", ")", "defined_updates", "=", "OrderedDict", "(", ")", "for", "elm", "in", "defined_updates_", ":", "defined_updates", "[", "elm", "[", "0", "]", "]", "=", "elm", "[", "1", "]", "defined_defaults", ".", "update", "(", "defined_updates", ")", "return", "defined_defaults", ".", "values", "(", ")" ]
takes a list of default arguments and overwrites the defaults with contents of updates .
train
false
6,206
@gof.local_optimizer([Assert]) def local_remove_all_assert(node): if (not isinstance(node.op, Assert)): return return [node.inputs[0]]
[ "@", "gof", ".", "local_optimizer", "(", "[", "Assert", "]", ")", "def", "local_remove_all_assert", "(", "node", ")", ":", "if", "(", "not", "isinstance", "(", "node", ".", "op", ",", "Assert", ")", ")", ":", "return", "return", "[", "node", ".", "inputs", "[", "0", "]", "]" ]
an optimization disabled by default that removes all asserts from the graph .
train
false
6,208
def restore_template_loaders(): loader.template_source_loaders = getattr(loader, RESTORE_LOADERS_ATTR) delattr(loader, RESTORE_LOADERS_ATTR)
[ "def", "restore_template_loaders", "(", ")", ":", "loader", ".", "template_source_loaders", "=", "getattr", "(", "loader", ",", "RESTORE_LOADERS_ATTR", ")", "delattr", "(", "loader", ",", "RESTORE_LOADERS_ATTR", ")" ]
restores the original template loaders after :meth:setup_test_template_loader has been run .
train
false
6,212
def _ip_getnode(): mac = _find_mac('ip', 'link list', ['link/ether'], (lambda i: (i + 1))) if mac: return mac
[ "def", "_ip_getnode", "(", ")", ":", "mac", "=", "_find_mac", "(", "'ip'", ",", "'link list'", ",", "[", "'link/ether'", "]", ",", "(", "lambda", "i", ":", "(", "i", "+", "1", ")", ")", ")", "if", "mac", ":", "return", "mac" ]
get the hardware address on unix by running ip .
train
false
6,213
def read_timeseries(client, project_resource, custom_metric_type): request = client.projects().timeSeries().list(name=project_resource, filter='metric.type="{0}"'.format(custom_metric_type), pageSize=3, interval_startTime=get_start_time(), interval_endTime=get_now_rfc3339()) response = request.execute() return response
[ "def", "read_timeseries", "(", "client", ",", "project_resource", ",", "custom_metric_type", ")", ":", "request", "=", "client", ".", "projects", "(", ")", ".", "timeSeries", "(", ")", ".", "list", "(", "name", "=", "project_resource", ",", "filter", "=", "'metric.type=\"{0}\"'", ".", "format", "(", "custom_metric_type", ")", ",", "pageSize", "=", "3", ",", "interval_startTime", "=", "get_start_time", "(", ")", ",", "interval_endTime", "=", "get_now_rfc3339", "(", ")", ")", "response", "=", "request", ".", "execute", "(", ")", "return", "response" ]
reads all of the custom_metrics that we have written between start_time and end_time .
train
false
6,214
def createPattern(c, numDimensions): v = numpy.zeros(numDimensions) v[c] = ((5 * numpy.random.random()) + 10) v[(c + 1)] = numpy.random.random() if (c > 0): v[(c - 1)] = numpy.random.random() return v
[ "def", "createPattern", "(", "c", ",", "numDimensions", ")", ":", "v", "=", "numpy", ".", "zeros", "(", "numDimensions", ")", "v", "[", "c", "]", "=", "(", "(", "5", "*", "numpy", ".", "random", ".", "random", "(", ")", ")", "+", "10", ")", "v", "[", "(", "c", "+", "1", ")", "]", "=", "numpy", ".", "random", ".", "random", "(", ")", "if", "(", "c", ">", "0", ")", ":", "v", "[", "(", "c", "-", "1", ")", "]", "=", "numpy", ".", "random", ".", "random", "(", ")", "return", "v" ]
create a sparse pattern from category c with the given number of dimensions .
train
false
6,215
def get_masquerade_role(user, course_key): course_masquerade = get_course_masquerade(user, course_key) return (course_masquerade.role if course_masquerade else None)
[ "def", "get_masquerade_role", "(", "user", ",", "course_key", ")", ":", "course_masquerade", "=", "get_course_masquerade", "(", "user", ",", "course_key", ")", "return", "(", "course_masquerade", ".", "role", "if", "course_masquerade", "else", "None", ")" ]
returns the role that the user is masquerading as .
train
false
6,216
def p_power(p): if (len(p) == 2): p[0] = p[1] elif (p[2][0] == 'CALL'): p[0] = ast.CallFunc(p[1], p[2][1], None, None) else: raise AssertionError('not implemented')
[ "def", "p_power", "(", "p", ")", ":", "if", "(", "len", "(", "p", ")", "==", "2", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "elif", "(", "p", "[", "2", "]", "[", "0", "]", "==", "'CALL'", ")", ":", "p", "[", "0", "]", "=", "ast", ".", "CallFunc", "(", "p", "[", "1", "]", ",", "p", "[", "2", "]", "[", "1", "]", ",", "None", ",", "None", ")", "else", ":", "raise", "AssertionError", "(", "'not implemented'", ")" ]
power : atom | atom trailer .
train
false