id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
21,310
def convert_taxness(request, item, priceful, with_taxes): if ((with_taxes is None) or (priceful.price.includes_tax == with_taxes)): return priceful taxed_priceful = _make_taxed(request, item, priceful, with_taxes) return (taxed_priceful if taxed_priceful else priceful)
[ "def", "convert_taxness", "(", "request", ",", "item", ",", "priceful", ",", "with_taxes", ")", ":", "if", "(", "(", "with_taxes", "is", "None", ")", "or", "(", "priceful", ".", "price", ".", "includes_tax", "==", "with_taxes", ")", ")", ":", "return", "priceful", "taxed_priceful", "=", "_make_taxed", "(", "request", ",", "item", ",", "priceful", ",", "with_taxes", ")", "return", "(", "taxed_priceful", "if", "taxed_priceful", "else", "priceful", ")" ]
convert taxness of a priceful object .
train
false
21,311
def extend_config(config, config_items): for (key, val) in list(config_items.items()): if hasattr(config, key): setattr(config, key, val) return config
[ "def", "extend_config", "(", "config", ",", "config_items", ")", ":", "for", "(", "key", ",", "val", ")", "in", "list", "(", "config_items", ".", "items", "(", ")", ")", ":", "if", "hasattr", "(", "config", ",", "key", ")", ":", "setattr", "(", "config", ",", "key", ",", "val", ")", "return", "config" ]
we are handling config value setting like this for a cleaner api .
train
true
21,313
def c_creates_obj(client): objname = (OBJ_TEMPLATE % client.counter()) client.objs.append(objname) cmds = (('@create %s' % objname), ('@desc %s = "this is a test object' % objname), ('@set %s/testattr = this is a test attribute value.' % objname), ('@set %s/testattr2 = this is a second test attribute.' % objname)) return cmds
[ "def", "c_creates_obj", "(", "client", ")", ":", "objname", "=", "(", "OBJ_TEMPLATE", "%", "client", ".", "counter", "(", ")", ")", "client", ".", "objs", ".", "append", "(", "objname", ")", "cmds", "=", "(", "(", "'@create %s'", "%", "objname", ")", ",", "(", "'@desc %s = \"this is a test object'", "%", "objname", ")", ",", "(", "'@set %s/testattr = this is a test attribute value.'", "%", "objname", ")", ",", "(", "'@set %s/testattr2 = this is a second test attribute.'", "%", "objname", ")", ")", "return", "cmds" ]
creates normal objects .
train
false
21,314
def describe_api_key(apiKey, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) response = conn.get_api_key(apiKey=apiKey) return {'apiKey': _convert_datetime_str(response)} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "describe_api_key", "(", "apiKey", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "response", "=", "conn", ".", "get_api_key", "(", "apiKey", "=", "apiKey", ")", "return", "{", "'apiKey'", ":", "_convert_datetime_str", "(", "response", ")", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
gets info about the given api key cli example: .
train
false
21,316
def attach_enctype_error_multidict(request): oldcls = request.files.__class__ class newcls(oldcls, ): def __getitem__(self, key): try: return oldcls.__getitem__(self, key) except KeyError as e: if (key not in request.form): raise raise DebugFilesKeyError(request, key) newcls.__name__ = oldcls.__name__ newcls.__module__ = oldcls.__module__ request.files.__class__ = newcls
[ "def", "attach_enctype_error_multidict", "(", "request", ")", ":", "oldcls", "=", "request", ".", "files", ".", "__class__", "class", "newcls", "(", "oldcls", ",", ")", ":", "def", "__getitem__", "(", "self", ",", "key", ")", ":", "try", ":", "return", "oldcls", ".", "__getitem__", "(", "self", ",", "key", ")", "except", "KeyError", "as", "e", ":", "if", "(", "key", "not", "in", "request", ".", "form", ")", ":", "raise", "raise", "DebugFilesKeyError", "(", "request", ",", "key", ")", "newcls", ".", "__name__", "=", "oldcls", ".", "__name__", "newcls", ".", "__module__", "=", "oldcls", ".", "__module__", "request", ".", "files", ".", "__class__", "=", "newcls" ]
since flask 0 .
train
true
21,317
def _days_before_year(year): y = (year - 1) return ((((y * 365) + (y // 4)) - (y // 100)) + (y // 400))
[ "def", "_days_before_year", "(", "year", ")", ":", "y", "=", "(", "year", "-", "1", ")", "return", "(", "(", "(", "(", "y", "*", "365", ")", "+", "(", "y", "//", "4", ")", ")", "-", "(", "y", "//", "100", ")", ")", "+", "(", "y", "//", "400", ")", ")" ]
year -> number of days before january 1st of year .
train
false
21,318
def render_crispy_form(form, helper=None, context=None): from crispy_forms.templatetags.crispy_forms_tags import CrispyFormNode if (helper is not None): node = CrispyFormNode(u'form', u'helper') else: node = CrispyFormNode(u'form', None) node_context = Context(context) node_context.update({u'form': form, u'helper': helper}) return node.render(node_context)
[ "def", "render_crispy_form", "(", "form", ",", "helper", "=", "None", ",", "context", "=", "None", ")", ":", "from", "crispy_forms", ".", "templatetags", ".", "crispy_forms_tags", "import", "CrispyFormNode", "if", "(", "helper", "is", "not", "None", ")", ":", "node", "=", "CrispyFormNode", "(", "u'form'", ",", "u'helper'", ")", "else", ":", "node", "=", "CrispyFormNode", "(", "u'form'", ",", "None", ")", "node_context", "=", "Context", "(", "context", ")", "node_context", ".", "update", "(", "{", "u'form'", ":", "form", ",", "u'helper'", ":", "helper", "}", ")", "return", "node", ".", "render", "(", "node_context", ")" ]
renders a form and returns its html output .
train
true
21,319
def make_qstr(t): if (not is_string_like(t)): t = str(t) if (' ' in t): t = ('"%s"' % t) return t
[ "def", "make_qstr", "(", "t", ")", ":", "if", "(", "not", "is_string_like", "(", "t", ")", ")", ":", "t", "=", "str", "(", "t", ")", "if", "(", "' '", "in", "t", ")", ":", "t", "=", "(", "'\"%s\"'", "%", "t", ")", "return", "t" ]
return the string representation of t .
train
false
21,320
def _endpoint_from_view_func(view_func): assert (view_func is not None), 'expected view func if endpoint is not provided.' return view_func.__name__
[ "def", "_endpoint_from_view_func", "(", "view_func", ")", ":", "assert", "(", "view_func", "is", "not", "None", ")", ",", "'expected view func if endpoint is not provided.'", "return", "view_func", ".", "__name__" ]
internal helper that returns the default endpoint for a given function .
train
false
21,321
@core_helper def lang_native_name(lang=None): lang = (lang or lang()) locale = i18n.get_locales_dict().get(lang) if locale: return (locale.display_name or locale.english_name) return lang
[ "@", "core_helper", "def", "lang_native_name", "(", "lang", "=", "None", ")", ":", "lang", "=", "(", "lang", "or", "lang", "(", ")", ")", "locale", "=", "i18n", ".", "get_locales_dict", "(", ")", ".", "get", "(", "lang", ")", "if", "locale", ":", "return", "(", "locale", ".", "display_name", "or", "locale", ".", "english_name", ")", "return", "lang" ]
return the langage name currently used in its localised form either from parameter or current environ setting .
train
false
21,322
def make_summary(otu_table, level, upper_percentage, lower_percentage, md_as_string=False, md_identifier='taxonomy'): header = ['Taxon'] header.extend(otu_table.ids()) (counts_by_consensus, sample_map) = sum_counts_by_consensus(otu_table, level, 'Other', md_as_string, md_identifier) total_counts = float(sum([sum(i) for i in counts_by_consensus.values()])) taxonomy_summary = [] for (consensus, otu_counts) in sorted(counts_by_consensus.items()): if ((lower_percentage is not None) and (otu_counts.sum() > (lower_percentage * total_counts))): continue elif ((upper_percentage is not None) and (otu_counts.sum() < (upper_percentage * total_counts))): continue new_row = [consensus] new_row.extend(otu_counts) taxonomy_summary.append(new_row) collapse_f = (lambda id_, md: md[md_identifier][(level - 1)]) collapsed = otu_table.collapse(collapse_f, norm=False, min_group_size=0, axis='observation') return (taxonomy_summary, header)
[ "def", "make_summary", "(", "otu_table", ",", "level", ",", "upper_percentage", ",", "lower_percentage", ",", "md_as_string", "=", "False", ",", "md_identifier", "=", "'taxonomy'", ")", ":", "header", "=", "[", "'Taxon'", "]", "header", ".", "extend", "(", "otu_table", ".", "ids", "(", ")", ")", "(", "counts_by_consensus", ",", "sample_map", ")", "=", "sum_counts_by_consensus", "(", "otu_table", ",", "level", ",", "'Other'", ",", "md_as_string", ",", "md_identifier", ")", "total_counts", "=", "float", "(", "sum", "(", "[", "sum", "(", "i", ")", "for", "i", "in", "counts_by_consensus", ".", "values", "(", ")", "]", ")", ")", "taxonomy_summary", "=", "[", "]", "for", "(", "consensus", ",", "otu_counts", ")", "in", "sorted", "(", "counts_by_consensus", ".", "items", "(", ")", ")", ":", "if", "(", "(", "lower_percentage", "is", "not", "None", ")", "and", "(", "otu_counts", ".", "sum", "(", ")", ">", "(", "lower_percentage", "*", "total_counts", ")", ")", ")", ":", "continue", "elif", "(", "(", "upper_percentage", "is", "not", "None", ")", "and", "(", "otu_counts", ".", "sum", "(", ")", "<", "(", "upper_percentage", "*", "total_counts", ")", ")", ")", ":", "continue", "new_row", "=", "[", "consensus", "]", "new_row", ".", "extend", "(", "otu_counts", ")", "taxonomy_summary", ".", "append", "(", "new_row", ")", "collapse_f", "=", "(", "lambda", "id_", ",", "md", ":", "md", "[", "md_identifier", "]", "[", "(", "level", "-", "1", ")", "]", ")", "collapsed", "=", "otu_table", ".", "collapse", "(", "collapse_f", ",", "norm", "=", "False", ",", "min_group_size", "=", "0", ",", "axis", "=", "'observation'", ")", "return", "(", "taxonomy_summary", ",", "header", ")" ]
returns taxonomy summary data header is a list of: [ .
train
false
21,324
def sinhm(A): A = _asarray_square(A) return _maybe_real(A, (0.5 * (expm(A) - expm((- A)))))
[ "def", "sinhm", "(", "A", ")", ":", "A", "=", "_asarray_square", "(", "A", ")", "return", "_maybe_real", "(", "A", ",", "(", "0.5", "*", "(", "expm", "(", "A", ")", "-", "expm", "(", "(", "-", "A", ")", ")", ")", ")", ")" ]
compute the hyperbolic matrix sine .
train
false
21,326
def _loadCAsFromDir(directoryPath): caCerts = {} for child in directoryPath.children(): if (not (child.asTextMode().basename().split(u'.')[(-1)].lower() == u'pem')): continue try: data = child.getContent() except IOError: continue try: theCert = Certificate.loadPEM(data) except SSLError: pass else: caCerts[theCert.digest()] = theCert return trustRootFromCertificates(caCerts.values())
[ "def", "_loadCAsFromDir", "(", "directoryPath", ")", ":", "caCerts", "=", "{", "}", "for", "child", "in", "directoryPath", ".", "children", "(", ")", ":", "if", "(", "not", "(", "child", ".", "asTextMode", "(", ")", ".", "basename", "(", ")", ".", "split", "(", "u'.'", ")", "[", "(", "-", "1", ")", "]", ".", "lower", "(", ")", "==", "u'pem'", ")", ")", ":", "continue", "try", ":", "data", "=", "child", ".", "getContent", "(", ")", "except", "IOError", ":", "continue", "try", ":", "theCert", "=", "Certificate", ".", "loadPEM", "(", "data", ")", "except", "SSLError", ":", "pass", "else", ":", "caCerts", "[", "theCert", ".", "digest", "(", ")", "]", "=", "theCert", "return", "trustRootFromCertificates", "(", "caCerts", ".", "values", "(", ")", ")" ]
load certificate-authority certificate objects in a given directory .
train
false
21,327
def cap_tops(days=5, retry_count=3, pause=0.001): if (ct._check_lhb_input(days) is True): ct._write_head() df = _cap_tops(days, pageNo=1, retry_count=retry_count, pause=pause) df['code'] = df['code'].map((lambda x: str(x).zfill(6))) if (df is not None): df = df.drop_duplicates('code') return df
[ "def", "cap_tops", "(", "days", "=", "5", ",", "retry_count", "=", "3", ",", "pause", "=", "0.001", ")", ":", "if", "(", "ct", ".", "_check_lhb_input", "(", "days", ")", "is", "True", ")", ":", "ct", ".", "_write_head", "(", ")", "df", "=", "_cap_tops", "(", "days", ",", "pageNo", "=", "1", ",", "retry_count", "=", "retry_count", ",", "pause", "=", "pause", ")", "df", "[", "'code'", "]", "=", "df", "[", "'code'", "]", ".", "map", "(", "(", "lambda", "x", ":", "str", "(", "x", ")", ".", "zfill", "(", "6", ")", ")", ")", "if", "(", "df", "is", "not", "None", ")", ":", "df", "=", "df", ".", "drop_duplicates", "(", "'code'", ")", "return", "df" ]
parameters days:int 天数,统计n天以来上榜次数,默认为5天,其余是10、30、60 retry_count : int .
train
false
21,328
@commands(u'wt', u'define', u'dict') @example(u'.wt bailiwick') def wiktionary(bot, trigger): word = trigger.group(2) if (word is None): bot.reply(u'You must tell me what to look up!') return (_etymology, definitions) = wikt(word) if (not definitions): bot.say((u"Couldn't get any definitions for %s." % word)) return result = format(word, definitions) if (len(result) < 150): result = format(word, definitions, 3) if (len(result) < 150): result = format(word, definitions, 5) if (len(result) > 300): result = (result[:295] + u'[...]') bot.say(result)
[ "@", "commands", "(", "u'wt'", ",", "u'define'", ",", "u'dict'", ")", "@", "example", "(", "u'.wt bailiwick'", ")", "def", "wiktionary", "(", "bot", ",", "trigger", ")", ":", "word", "=", "trigger", ".", "group", "(", "2", ")", "if", "(", "word", "is", "None", ")", ":", "bot", ".", "reply", "(", "u'You must tell me what to look up!'", ")", "return", "(", "_etymology", ",", "definitions", ")", "=", "wikt", "(", "word", ")", "if", "(", "not", "definitions", ")", ":", "bot", ".", "say", "(", "(", "u\"Couldn't get any definitions for %s.\"", "%", "word", ")", ")", "return", "result", "=", "format", "(", "word", ",", "definitions", ")", "if", "(", "len", "(", "result", ")", "<", "150", ")", ":", "result", "=", "format", "(", "word", ",", "definitions", ",", "3", ")", "if", "(", "len", "(", "result", ")", "<", "150", ")", ":", "result", "=", "format", "(", "word", ",", "definitions", ",", "5", ")", "if", "(", "len", "(", "result", ")", ">", "300", ")", ":", "result", "=", "(", "result", "[", ":", "295", "]", "+", "u'[...]'", ")", "bot", ".", "say", "(", "result", ")" ]
look up a word on wiktionary .
train
false
21,329
def readHeader(file): while (ord(file.read(1)) != 26): pass
[ "def", "readHeader", "(", "file", ")", ":", "while", "(", "ord", "(", "file", ".", "read", "(", "1", ")", ")", "!=", "26", ")", ":", "pass" ]
read the slc header .
train
false
21,332
def require_volume_exists(f): def wrapper(context, volume_id, *args, **kwargs): db.volume_get(context, volume_id) return f(context, volume_id, *args, **kwargs) wrapper.__name__ = f.__name__ return wrapper
[ "def", "require_volume_exists", "(", "f", ")", ":", "def", "wrapper", "(", "context", ",", "volume_id", ",", "*", "args", ",", "**", "kwargs", ")", ":", "db", ".", "volume_get", "(", "context", ",", "volume_id", ")", "return", "f", "(", "context", ",", "volume_id", ",", "*", "args", ",", "**", "kwargs", ")", "wrapper", ".", "__name__", "=", "f", ".", "__name__", "return", "wrapper" ]
decorator to require the specified volume to exist .
train
false
21,333
def DNSServiceEnumerateDomains(flags, interfaceIndex=kDNSServiceInterfaceIndexAny, callBack=None): @_DNSServiceDomainEnumReply def _callback(sdRef, flags, interfaceIndex, errorCode, replyDomain, context): if (callBack is not None): callBack(sdRef, flags, interfaceIndex, errorCode, replyDomain.decode()) _global_lock.acquire() try: sdRef = _DNSServiceEnumerateDomains(flags, interfaceIndex, _callback, None) finally: _global_lock.release() sdRef._add_callback(_callback) return sdRef
[ "def", "DNSServiceEnumerateDomains", "(", "flags", ",", "interfaceIndex", "=", "kDNSServiceInterfaceIndexAny", ",", "callBack", "=", "None", ")", ":", "@", "_DNSServiceDomainEnumReply", "def", "_callback", "(", "sdRef", ",", "flags", ",", "interfaceIndex", ",", "errorCode", ",", "replyDomain", ",", "context", ")", ":", "if", "(", "callBack", "is", "not", "None", ")", ":", "callBack", "(", "sdRef", ",", "flags", ",", "interfaceIndex", ",", "errorCode", ",", "replyDomain", ".", "decode", "(", ")", ")", "_global_lock", ".", "acquire", "(", ")", "try", ":", "sdRef", "=", "_DNSServiceEnumerateDomains", "(", "flags", ",", "interfaceIndex", ",", "_callback", ",", "None", ")", "finally", ":", "_global_lock", ".", "release", "(", ")", "sdRef", ".", "_add_callback", "(", "_callback", ")", "return", "sdRef" ]
asynchronously enumerate domains available for browsing and registration .
train
false
21,334
def worker_disable(worker, lbn, profile='default'): return _worker_ctl(worker, lbn, 'd', profile)
[ "def", "worker_disable", "(", "worker", ",", "lbn", ",", "profile", "=", "'default'", ")", ":", "return", "_worker_ctl", "(", "worker", ",", "lbn", ",", "'d'", ",", "profile", ")" ]
set the worker to disable state in the lbn load balancer cli examples: .
train
false
21,336
def preprocess_image(image, height, width, is_training=False, bbox=None, fast_mode=True): if is_training: return preprocess_for_train(image, height, width, bbox, fast_mode) else: return preprocess_for_eval(image, height, width)
[ "def", "preprocess_image", "(", "image", ",", "height", ",", "width", ",", "is_training", "=", "False", ",", "bbox", "=", "None", ",", "fast_mode", "=", "True", ")", ":", "if", "is_training", ":", "return", "preprocess_for_train", "(", "image", ",", "height", ",", "width", ",", "bbox", ",", "fast_mode", ")", "else", ":", "return", "preprocess_for_eval", "(", "image", ",", "height", ",", "width", ")" ]
preprocesses the given image .
train
false
21,337
def _get_videos(course): videos = list(get_videos_for_course(course.id, VideoSortField.created, SortDirection.desc)) for video in videos: video['status'] = convert_video_status(video) return videos
[ "def", "_get_videos", "(", "course", ")", ":", "videos", "=", "list", "(", "get_videos_for_course", "(", "course", ".", "id", ",", "VideoSortField", ".", "created", ",", "SortDirection", ".", "desc", ")", ")", "for", "video", "in", "videos", ":", "video", "[", "'status'", "]", "=", "convert_video_status", "(", "video", ")", "return", "videos" ]
retrieves the list of videos from val corresponding to this course .
train
false
21,338
def send_email_after_account_create_with_password(form): send_email(to=form['email'], action=USER_REGISTER_WITH_PASSWORD, subject=MAILS[USER_REGISTER_WITH_PASSWORD]['subject'].format(app_name=get_settings()['app_name']), html=MAILS[USER_REGISTER_WITH_PASSWORD]['message'].format(email=form['email'], password=form['password']))
[ "def", "send_email_after_account_create_with_password", "(", "form", ")", ":", "send_email", "(", "to", "=", "form", "[", "'email'", "]", ",", "action", "=", "USER_REGISTER_WITH_PASSWORD", ",", "subject", "=", "MAILS", "[", "USER_REGISTER_WITH_PASSWORD", "]", "[", "'subject'", "]", ".", "format", "(", "app_name", "=", "get_settings", "(", ")", "[", "'app_name'", "]", ")", ",", "html", "=", "MAILS", "[", "USER_REGISTER_WITH_PASSWORD", "]", "[", "'message'", "]", ".", "format", "(", "email", "=", "form", "[", "'email'", "]", ",", "password", "=", "form", "[", "'password'", "]", ")", ")" ]
send email after account create .
train
false
21,339
def get_search_url_from_referer(request): referer = request.META.get('HTTP_REFERER', None) try: url = URL(referer) except UnicodeDecodeError: url = None current_site = Site.objects.get_current() if ((referer is None) or (url is None) or (url.scheme != 'https') or (url.netloc != current_site.domain) or (reverse('search', locale=request.LANGUAGE_CODE) != url.path)): return None return referer
[ "def", "get_search_url_from_referer", "(", "request", ")", ":", "referer", "=", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ",", "None", ")", "try", ":", "url", "=", "URL", "(", "referer", ")", "except", "UnicodeDecodeError", ":", "url", "=", "None", "current_site", "=", "Site", ".", "objects", ".", "get_current", "(", ")", "if", "(", "(", "referer", "is", "None", ")", "or", "(", "url", "is", "None", ")", "or", "(", "url", ".", "scheme", "!=", "'https'", ")", "or", "(", "url", ".", "netloc", "!=", "current_site", ".", "domain", ")", "or", "(", "reverse", "(", "'search'", ",", "locale", "=", "request", ".", "LANGUAGE_CODE", ")", "!=", "url", ".", "path", ")", ")", ":", "return", "None", "return", "referer" ]
returns search url from referer if it was an mdn search .
train
false
21,340
def ParseLogEntry(entry): split = entry.split(' ', 3) if ((len(split) == 4) and (split[0] == 'LOG')): level = split[1] timestamp = split[2] message = split[3] try: message = str(message) timestamp = int(timestamp) level = int(level) except ValueError: pass else: if (0 <= level <= 4): return (timestamp, level, message.replace('\x00', '\n')) usec = int((time.time() * 1000000.0)) return (usec, 3, entry.replace('\x00', '\n'))
[ "def", "ParseLogEntry", "(", "entry", ")", ":", "split", "=", "entry", ".", "split", "(", "' '", ",", "3", ")", "if", "(", "(", "len", "(", "split", ")", "==", "4", ")", "and", "(", "split", "[", "0", "]", "==", "'LOG'", ")", ")", ":", "level", "=", "split", "[", "1", "]", "timestamp", "=", "split", "[", "2", "]", "message", "=", "split", "[", "3", "]", "try", ":", "message", "=", "str", "(", "message", ")", "timestamp", "=", "int", "(", "timestamp", ")", "level", "=", "int", "(", "level", ")", "except", "ValueError", ":", "pass", "else", ":", "if", "(", "0", "<=", "level", "<=", "4", ")", ":", "return", "(", "timestamp", ",", "level", ",", "message", ".", "replace", "(", "'\\x00'", ",", "'\\n'", ")", ")", "usec", "=", "int", "(", "(", "time", ".", "time", "(", ")", "*", "1000000.0", ")", ")", "return", "(", "usec", ",", "3", ",", "entry", ".", "replace", "(", "'\\x00'", ",", "'\\n'", ")", ")" ]
parses a single log entry emitted by app_logging .
train
false
21,341
def agent_service_setup(test): test.ca_set = get_credential_sets()[0] test.host = '192.0.2.5' test.port = 54123 test.reactor = MemoryCoreReactor() test.agent_service = AgentService(reactor=test.reactor, control_service_host=test.host, control_service_port=test.port, node_credential=test.ca_set.node, ca_certificate=test.ca_set.root.credential.certificate, backend_description=LOOPBACK, api_args={})
[ "def", "agent_service_setup", "(", "test", ")", ":", "test", ".", "ca_set", "=", "get_credential_sets", "(", ")", "[", "0", "]", "test", ".", "host", "=", "'192.0.2.5'", "test", ".", "port", "=", "54123", "test", ".", "reactor", "=", "MemoryCoreReactor", "(", ")", "test", ".", "agent_service", "=", "AgentService", "(", "reactor", "=", "test", ".", "reactor", ",", "control_service_host", "=", "test", ".", "host", ",", "control_service_port", "=", "test", ".", "port", ",", "node_credential", "=", "test", ".", "ca_set", ".", "node", ",", "ca_certificate", "=", "test", ".", "ca_set", ".", "root", ".", "credential", ".", "certificate", ",", "backend_description", "=", "LOOPBACK", ",", "api_args", "=", "{", "}", ")" ]
do some setup common to all of the agentservice test cases .
train
false
21,342
def var_swapping(a, b, c, d, e): (a, b) = (b, a) (c, d, e) = (e, c, d) return ((((a + b) + c) + d) + e)
[ "def", "var_swapping", "(", "a", ",", "b", ",", "c", ",", "d", ",", "e", ")", ":", "(", "a", ",", "b", ")", "=", "(", "b", ",", "a", ")", "(", "c", ",", "d", ",", "e", ")", "=", "(", "e", ",", "c", ",", "d", ")", "return", "(", "(", "(", "(", "a", "+", "b", ")", "+", "c", ")", "+", "d", ")", "+", "e", ")" ]
label 0: a = arg [a] b = arg [b] c = arg [c] d = arg [d] e = arg [e] a .
train
false
21,343
def set_nodes(nodes): global _FAKE_NODES _FAKE_NODES = nodes
[ "def", "set_nodes", "(", "nodes", ")", ":", "global", "_FAKE_NODES", "_FAKE_NODES", "=", "nodes" ]
sets fakedrivers node .
train
false
21,344
def get_bind(obj): if hasattr(obj, 'bind'): conn = obj.bind else: try: conn = object_session(obj).bind except UnmappedInstanceError: conn = obj if (not hasattr(conn, 'execute')): raise TypeError('This method accepts only Session, Engine, Connection and declarative model objects.') return conn
[ "def", "get_bind", "(", "obj", ")", ":", "if", "hasattr", "(", "obj", ",", "'bind'", ")", ":", "conn", "=", "obj", ".", "bind", "else", ":", "try", ":", "conn", "=", "object_session", "(", "obj", ")", ".", "bind", "except", "UnmappedInstanceError", ":", "conn", "=", "obj", "if", "(", "not", "hasattr", "(", "conn", ",", "'execute'", ")", ")", ":", "raise", "TypeError", "(", "'This method accepts only Session, Engine, Connection and declarative model objects.'", ")", "return", "conn" ]
return the bind for given sqlalchemy engine / connection / declarative model object .
train
false
21,345
def agent_members(consul_url=None, **kwargs): ret = {} query_params = {} if (not consul_url): consul_url = _get_config() if (not consul_url): log.error('No Consul URL found.') ret['message'] = 'No Consul URL found.' ret['res'] = False return ret if ('wan' in kwargs): query_params['wan'] = kwargs['wan'] function = 'agent/members' ret = _query(consul_url=consul_url, function=function, method='GET', query_params=query_params) return ret
[ "def", "agent_members", "(", "consul_url", "=", "None", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "}", "query_params", "=", "{", "}", "if", "(", "not", "consul_url", ")", ":", "consul_url", "=", "_get_config", "(", ")", "if", "(", "not", "consul_url", ")", ":", "log", ".", "error", "(", "'No Consul URL found.'", ")", "ret", "[", "'message'", "]", "=", "'No Consul URL found.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "(", "'wan'", "in", "kwargs", ")", ":", "query_params", "[", "'wan'", "]", "=", "kwargs", "[", "'wan'", "]", "function", "=", "'agent/members'", "ret", "=", "_query", "(", "consul_url", "=", "consul_url", ",", "function", "=", "function", ",", "method", "=", "'GET'", ",", "query_params", "=", "query_params", ")", "return", "ret" ]
returns the members as seen by the local serf agent .
train
true
21,347
def clear_time_caches(delete_all=False): global _time_caches if delete_all: for cache in _time_caches.values(): cache.clear() parser_cache.clear() else: for tc in _time_caches.values(): for (key, (t, value)) in list(tc.items()): if (t < time.time()): del tc[key]
[ "def", "clear_time_caches", "(", "delete_all", "=", "False", ")", ":", "global", "_time_caches", "if", "delete_all", ":", "for", "cache", "in", "_time_caches", ".", "values", "(", ")", ":", "cache", ".", "clear", "(", ")", "parser_cache", ".", "clear", "(", ")", "else", ":", "for", "tc", "in", "_time_caches", ".", "values", "(", ")", ":", "for", "(", "key", ",", "(", "t", ",", "value", ")", ")", "in", "list", "(", "tc", ".", "items", "(", ")", ")", ":", "if", "(", "t", "<", "time", ".", "time", "(", ")", ")", ":", "del", "tc", "[", "key", "]" ]
jedi caches many things .
train
false
21,350
@Profiler.profile def test_orm_bundles(n): sess = Session(engine) bundle = Bundle('customer', Customer.id, Customer.name, Customer.description) for row in sess.query(bundle).yield_per(10000).limit(n): pass
[ "@", "Profiler", ".", "profile", "def", "test_orm_bundles", "(", "n", ")", ":", "sess", "=", "Session", "(", "engine", ")", "bundle", "=", "Bundle", "(", "'customer'", ",", "Customer", ".", "id", ",", "Customer", ".", "name", ",", "Customer", ".", "description", ")", "for", "row", "in", "sess", ".", "query", "(", "bundle", ")", ".", "yield_per", "(", "10000", ")", ".", "limit", "(", "n", ")", ":", "pass" ]
load lightweight "bundle" objects using the orm .
train
false
21,351
def get_auth_password(): password = AUTH_PASSWORD_SCRIPT.get() if password: return password return DEFAULT_AUTH_PASSWORD.get()
[ "def", "get_auth_password", "(", ")", ":", "password", "=", "AUTH_PASSWORD_SCRIPT", ".", "get", "(", ")", "if", "password", ":", "return", "password", "return", "DEFAULT_AUTH_PASSWORD", ".", "get", "(", ")" ]
get from script or backward compatibility .
train
false
21,352
def style_from_pygments(style_cls=pygments_DefaultStyle, style_dict=None, include_defaults=True): assert ((style_dict is None) or isinstance(style_dict, dict)) assert ((style_cls is None) or issubclass(style_cls, pygments_Style)) styles_dict = {} if (style_cls is not None): styles_dict.update(style_cls.styles) if (style_dict is not None): styles_dict.update(style_dict) return style_from_dict(styles_dict, include_defaults=include_defaults)
[ "def", "style_from_pygments", "(", "style_cls", "=", "pygments_DefaultStyle", ",", "style_dict", "=", "None", ",", "include_defaults", "=", "True", ")", ":", "assert", "(", "(", "style_dict", "is", "None", ")", "or", "isinstance", "(", "style_dict", ",", "dict", ")", ")", "assert", "(", "(", "style_cls", "is", "None", ")", "or", "issubclass", "(", "style_cls", ",", "pygments_Style", ")", ")", "styles_dict", "=", "{", "}", "if", "(", "style_cls", "is", "not", "None", ")", ":", "styles_dict", ".", "update", "(", "style_cls", ".", "styles", ")", "if", "(", "style_dict", "is", "not", "None", ")", ":", "styles_dict", ".", "update", "(", "style_dict", ")", "return", "style_from_dict", "(", "styles_dict", ",", "include_defaults", "=", "include_defaults", ")" ]
shortcut to create a :class: .
train
true
21,354
def matchDLLArch(filename): if (not is_win): return True from ..lib import pefile global _exe_machine_type if (_exe_machine_type is None): exe_pe = pefile.PE(sys.executable, fast_load=True) _exe_machine_type = exe_pe.FILE_HEADER.Machine exe_pe.close() pe = pefile.PE(filename, fast_load=True) match_arch = (pe.FILE_HEADER.Machine == _exe_machine_type) pe.close() return match_arch
[ "def", "matchDLLArch", "(", "filename", ")", ":", "if", "(", "not", "is_win", ")", ":", "return", "True", "from", ".", ".", "lib", "import", "pefile", "global", "_exe_machine_type", "if", "(", "_exe_machine_type", "is", "None", ")", ":", "exe_pe", "=", "pefile", ".", "PE", "(", "sys", ".", "executable", ",", "fast_load", "=", "True", ")", "_exe_machine_type", "=", "exe_pe", ".", "FILE_HEADER", ".", "Machine", "exe_pe", ".", "close", "(", ")", "pe", "=", "pefile", ".", "PE", "(", "filename", ",", "fast_load", "=", "True", ")", "match_arch", "=", "(", "pe", ".", "FILE_HEADER", ".", "Machine", "==", "_exe_machine_type", ")", "pe", ".", "close", "(", ")", "return", "match_arch" ]
return true if the dll given by filename matches the cpu type/architecture of the python process running pyinstaller .
train
false
21,357
def fileConfig(fname, defaults=None, disable_existing_loggers=1): import ConfigParser cp = ConfigParser.ConfigParser(defaults) if (hasattr(cp, 'readfp') and hasattr(fname, 'readline')): cp.readfp(fname) else: cp.read(fname) formatters = _create_formatters(cp) logging._acquireLock() try: logging._handlers.clear() del logging._handlerList[:] handlers = _install_handlers(cp, formatters) _install_loggers(cp, handlers, disable_existing_loggers) finally: logging._releaseLock()
[ "def", "fileConfig", "(", "fname", ",", "defaults", "=", "None", ",", "disable_existing_loggers", "=", "1", ")", ":", "import", "ConfigParser", "cp", "=", "ConfigParser", ".", "ConfigParser", "(", "defaults", ")", "if", "(", "hasattr", "(", "cp", ",", "'readfp'", ")", "and", "hasattr", "(", "fname", ",", "'readline'", ")", ")", ":", "cp", ".", "readfp", "(", "fname", ")", "else", ":", "cp", ".", "read", "(", "fname", ")", "formatters", "=", "_create_formatters", "(", "cp", ")", "logging", ".", "_acquireLock", "(", ")", "try", ":", "logging", ".", "_handlers", ".", "clear", "(", ")", "del", "logging", ".", "_handlerList", "[", ":", "]", "handlers", "=", "_install_handlers", "(", "cp", ",", "formatters", ")", "_install_loggers", "(", "cp", ",", "handlers", ",", "disable_existing_loggers", ")", "finally", ":", "logging", ".", "_releaseLock", "(", ")" ]
read the logging configuration from a configparser-format file .
train
false
21,359
def B_012(x): x = np.atleast_1d(x) return np.piecewise(x, [((x < 0) | (x > 2)), ((x >= 0) & (x < 1)), ((x >= 1) & (x <= 2))], [(lambda x: 0.0), (lambda x: x), (lambda x: (2.0 - x))])
[ "def", "B_012", "(", "x", ")", ":", "x", "=", "np", ".", "atleast_1d", "(", "x", ")", "return", "np", ".", "piecewise", "(", "x", ",", "[", "(", "(", "x", "<", "0", ")", "|", "(", "x", ">", "2", ")", ")", ",", "(", "(", "x", ">=", "0", ")", "&", "(", "x", "<", "1", ")", ")", ",", "(", "(", "x", ">=", "1", ")", "&", "(", "x", "<=", "2", ")", ")", "]", ",", "[", "(", "lambda", "x", ":", "0.0", ")", ",", "(", "lambda", "x", ":", "x", ")", ",", "(", "lambda", "x", ":", "(", "2.0", "-", "x", ")", ")", "]", ")" ]
a linear b-spline function b .
train
false
21,360
@hook.command def rainbow(text): text = str(text) text = strip(text) col = list(COLORS.items()) out = '' l = len(COLORS) for (i, t) in enumerate(text): if (t == ' '): out += t else: out += (col[(i % l)][1] + t) return out
[ "@", "hook", ".", "command", "def", "rainbow", "(", "text", ")", ":", "text", "=", "str", "(", "text", ")", "text", "=", "strip", "(", "text", ")", "col", "=", "list", "(", "COLORS", ".", "items", "(", ")", ")", "out", "=", "''", "l", "=", "len", "(", "COLORS", ")", "for", "(", "i", ",", "t", ")", "in", "enumerate", "(", "text", ")", ":", "if", "(", "t", "==", "' '", ")", ":", "out", "+=", "t", "else", ":", "out", "+=", "(", "col", "[", "(", "i", "%", "l", ")", "]", "[", "1", "]", "+", "t", ")", "return", "out" ]
returns a list of colors sampled at equal intervals over the spectrum .
train
false
21,362
def variable_on_cpu(name, shape, initializer, trainable=True): var = tf.get_variable(name, shape, initializer=initializer, trainable=trainable) return var
[ "def", "variable_on_cpu", "(", "name", ",", "shape", ",", "initializer", ",", "trainable", "=", "True", ")", ":", "var", "=", "tf", ".", "get_variable", "(", "name", ",", "shape", ",", "initializer", "=", "initializer", ",", "trainable", "=", "trainable", ")", "return", "var" ]
helper to create a variable stored on cpu memory .
train
false
21,364
def generate_exception(err_code): if (err_code == (-1)): return EigenMatException('Incompatible matrix dimensions.') elif (err_code == (-2)): return EigenMatException('CUBLAS error.') elif (err_code == (-3)): return EigenMatException(('CUDA error: ' + get_last_cuda_error())) elif (err_code == (-4)): return EigenMatException('Operation not supported on views.') elif (err_code == (-5)): return EigenMatException('Operation not supported on transposed matrices.') elif (err_code == (-6)): return EigenMatException('') elif (err_code == (-7)): return EigenMatException('Incompatible transposedness.') elif (err_code == (-8)): return EigenMatException('Matrix is not in device memory.') elif (err_code == (-9)): return EigenMatException('Operation not supported.')
[ "def", "generate_exception", "(", "err_code", ")", ":", "if", "(", "err_code", "==", "(", "-", "1", ")", ")", ":", "return", "EigenMatException", "(", "'Incompatible matrix dimensions.'", ")", "elif", "(", "err_code", "==", "(", "-", "2", ")", ")", ":", "return", "EigenMatException", "(", "'CUBLAS error.'", ")", "elif", "(", "err_code", "==", "(", "-", "3", ")", ")", ":", "return", "EigenMatException", "(", "(", "'CUDA error: '", "+", "get_last_cuda_error", "(", ")", ")", ")", "elif", "(", "err_code", "==", "(", "-", "4", ")", ")", ":", "return", "EigenMatException", "(", "'Operation not supported on views.'", ")", "elif", "(", "err_code", "==", "(", "-", "5", ")", ")", ":", "return", "EigenMatException", "(", "'Operation not supported on transposed matrices.'", ")", "elif", "(", "err_code", "==", "(", "-", "6", ")", ")", ":", "return", "EigenMatException", "(", "''", ")", "elif", "(", "err_code", "==", "(", "-", "7", ")", ")", ":", "return", "EigenMatException", "(", "'Incompatible transposedness.'", ")", "elif", "(", "err_code", "==", "(", "-", "8", ")", ")", ":", "return", "EigenMatException", "(", "'Matrix is not in device memory.'", ")", "elif", "(", "err_code", "==", "(", "-", "9", ")", ")", ":", "return", "EigenMatException", "(", "'Operation not supported.'", ")" ]
return a cudamatexception object based on the error code err_code .
train
false
21,365
def getsignal(signalnum): if (signalnum != _signal.SIGCHLD): return _signal_getsignal(signalnum) global _child_handler if (_child_handler is _INITIAL): _child_handler = _signal_getsignal(_signal.SIGCHLD) return _child_handler
[ "def", "getsignal", "(", "signalnum", ")", ":", "if", "(", "signalnum", "!=", "_signal", ".", "SIGCHLD", ")", ":", "return", "_signal_getsignal", "(", "signalnum", ")", "global", "_child_handler", "if", "(", "_child_handler", "is", "_INITIAL", ")", ":", "_child_handler", "=", "_signal_getsignal", "(", "_signal", ".", "SIGCHLD", ")", "return", "_child_handler" ]
exactly the same as :func:signal .
train
false
21,366
def _is_separating_set(G, cut): if (len(cut) == (len(G) - 1)): return True H = G.copy(with_data=False) H.remove_nodes_from(cut) if nx.is_connected(H): return False return True
[ "def", "_is_separating_set", "(", "G", ",", "cut", ")", ":", "if", "(", "len", "(", "cut", ")", "==", "(", "len", "(", "G", ")", "-", "1", ")", ")", ":", "return", "True", "H", "=", "G", ".", "copy", "(", "with_data", "=", "False", ")", "H", ".", "remove_nodes_from", "(", "cut", ")", "if", "nx", ".", "is_connected", "(", "H", ")", ":", "return", "False", "return", "True" ]
assumes that the input graph is connected .
train
false
21,367
def save_orig(): _main_module()._orig_get_script_args = easy_install.get_script_args
[ "def", "save_orig", "(", ")", ":", "_main_module", "(", ")", ".", "_orig_get_script_args", "=", "easy_install", ".", "get_script_args" ]
save original easy_install .
train
false
21,369
def makeLogRecord(dict): rv = _logRecordFactory(None, None, '', 0, '', (), None, None) rv.__dict__.update(dict) return rv
[ "def", "makeLogRecord", "(", "dict", ")", ":", "rv", "=", "_logRecordFactory", "(", "None", ",", "None", ",", "''", ",", "0", ",", "''", ",", "(", ")", ",", "None", ",", "None", ")", "rv", ".", "__dict__", ".", "update", "(", "dict", ")", "return", "rv" ]
make a logrecord whose attributes are defined by the specified dictionary .
train
false
21,371
def test_empty_monitoring_datasets(): learning_rate = 0.001 batch_size = 5 dim = 3 rng = np.random.RandomState([25, 9, 2012]) train_dataset = DenseDesignMatrix(X=rng.randn(10, dim)) model = SoftmaxModel(dim) cost = DummyCost() algorithm = SGD(learning_rate, cost, batch_size=batch_size, monitoring_dataset={}, termination_criterion=EpochCounter(2)) train = Train(train_dataset, model, algorithm, save_path=None, save_freq=0, extensions=None) train.main_loop()
[ "def", "test_empty_monitoring_datasets", "(", ")", ":", "learning_rate", "=", "0.001", "batch_size", "=", "5", "dim", "=", "3", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "[", "25", ",", "9", ",", "2012", "]", ")", "train_dataset", "=", "DenseDesignMatrix", "(", "X", "=", "rng", ".", "randn", "(", "10", ",", "dim", ")", ")", "model", "=", "SoftmaxModel", "(", "dim", ")", "cost", "=", "DummyCost", "(", ")", "algorithm", "=", "SGD", "(", "learning_rate", ",", "cost", ",", "batch_size", "=", "batch_size", ",", "monitoring_dataset", "=", "{", "}", ",", "termination_criterion", "=", "EpochCounter", "(", "2", ")", ")", "train", "=", "Train", "(", "train_dataset", ",", "model", ",", "algorithm", ",", "save_path", "=", "None", ",", "save_freq", "=", "0", ",", "extensions", "=", "None", ")", "train", ".", "main_loop", "(", ")" ]
test that handling of monitoring datasets dictionnary does not fail when it is empty .
train
false
21,372
def supplier(): get_vars['organisation_type.name'] = 'Supplier' table = s3db.org_organisation s3.crud_strings.org_organisation = Storage(label_create=T('Create Supplier'), title_display=T('Supplier Details'), title_list=T('Suppliers'), title_update=T('Edit Supplier'), title_upload=T('Import Suppliers'), label_list_button=T('List Suppliers'), label_delete_button=T('Delete Supplier'), msg_record_created=T('Supplier added'), msg_record_modified=T('Supplier updated'), msg_record_deleted=T('Supplier deleted'), msg_list_empty=T('No Suppliers currently registered')) s3db.configure('org_organisation', create_next=URL(c='inv', f='supplier', args=['[id]', 'read'])) return s3db.org_organisation_controller()
[ "def", "supplier", "(", ")", ":", "get_vars", "[", "'organisation_type.name'", "]", "=", "'Supplier'", "table", "=", "s3db", ".", "org_organisation", "s3", ".", "crud_strings", ".", "org_organisation", "=", "Storage", "(", "label_create", "=", "T", "(", "'Create Supplier'", ")", ",", "title_display", "=", "T", "(", "'Supplier Details'", ")", ",", "title_list", "=", "T", "(", "'Suppliers'", ")", ",", "title_update", "=", "T", "(", "'Edit Supplier'", ")", ",", "title_upload", "=", "T", "(", "'Import Suppliers'", ")", ",", "label_list_button", "=", "T", "(", "'List Suppliers'", ")", ",", "label_delete_button", "=", "T", "(", "'Delete Supplier'", ")", ",", "msg_record_created", "=", "T", "(", "'Supplier added'", ")", ",", "msg_record_modified", "=", "T", "(", "'Supplier updated'", ")", ",", "msg_record_deleted", "=", "T", "(", "'Supplier deleted'", ")", ",", "msg_list_empty", "=", "T", "(", "'No Suppliers currently registered'", ")", ")", "s3db", ".", "configure", "(", "'org_organisation'", ",", "create_next", "=", "URL", "(", "c", "=", "'inv'", ",", "f", "=", "'supplier'", ",", "args", "=", "[", "'[id]'", ",", "'read'", "]", ")", ")", "return", "s3db", ".", "org_organisation_controller", "(", ")" ]
restful crud controller .
train
false
21,374
def parse_and_validate_reply_to_address(address): (recipient, sep, domain) = address.partition('@') if ((not sep) or (not recipient) or (domain != g.modmail_email_domain)): return (main, sep, remainder) = recipient.partition('+') if ((not sep) or (not main) or (main != 'zendeskreply')): return try: (email_id, email_mac) = remainder.split('-') except ValueError: return expected_mac = hmac.new(g.secrets['modmail_email_secret'], email_id, hashlib.sha256).hexdigest() if (not constant_time_compare(expected_mac, email_mac)): return message_id36 = email_id return message_id36
[ "def", "parse_and_validate_reply_to_address", "(", "address", ")", ":", "(", "recipient", ",", "sep", ",", "domain", ")", "=", "address", ".", "partition", "(", "'@'", ")", "if", "(", "(", "not", "sep", ")", "or", "(", "not", "recipient", ")", "or", "(", "domain", "!=", "g", ".", "modmail_email_domain", ")", ")", ":", "return", "(", "main", ",", "sep", ",", "remainder", ")", "=", "recipient", ".", "partition", "(", "'+'", ")", "if", "(", "(", "not", "sep", ")", "or", "(", "not", "main", ")", "or", "(", "main", "!=", "'zendeskreply'", ")", ")", ":", "return", "try", ":", "(", "email_id", ",", "email_mac", ")", "=", "remainder", ".", "split", "(", "'-'", ")", "except", "ValueError", ":", "return", "expected_mac", "=", "hmac", ".", "new", "(", "g", ".", "secrets", "[", "'modmail_email_secret'", "]", ",", "email_id", ",", "hashlib", ".", "sha256", ")", ".", "hexdigest", "(", ")", "if", "(", "not", "constant_time_compare", "(", "expected_mac", ",", "email_mac", ")", ")", ":", "return", "message_id36", "=", "email_id", "return", "message_id36" ]
validate the address and parse out and return the message id .
train
false
21,375
def addVector3ToElementNode(elementNode, key, vector3): elementNode.attributes[key] = ('[%s,%s,%s]' % (vector3.x, vector3.y, vector3.z))
[ "def", "addVector3ToElementNode", "(", "elementNode", ",", "key", ",", "vector3", ")", ":", "elementNode", ".", "attributes", "[", "key", "]", "=", "(", "'[%s,%s,%s]'", "%", "(", "vector3", ".", "x", ",", "vector3", ".", "y", ",", "vector3", ".", "z", ")", ")" ]
add vector3 to xml element .
train
false
21,376
def starting_offset(source_code, offset): word_finder = worder.Worder(source_code, True) (expression, starting, starting_offset) = word_finder.get_splitted_primary_before(offset) return starting_offset
[ "def", "starting_offset", "(", "source_code", ",", "offset", ")", ":", "word_finder", "=", "worder", ".", "Worder", "(", "source_code", ",", "True", ")", "(", "expression", ",", "starting", ",", "starting_offset", ")", "=", "word_finder", ".", "get_splitted_primary_before", "(", "offset", ")", "return", "starting_offset" ]
return the offset in which the completion should be inserted usually code assist proposals should be inserted like:: completion = proposal .
train
true
21,377
def load_apps(app_blacklist): global DESKTOP_MODULES global DESKTOP_APPS if (DESKTOP_APPS is not None): raise Exception(_('load_apps has already been called.')) DESKTOP_APPS = [] for sdk_app in pkg_resources.iter_entry_points('desktop.sdk.application'): if (sdk_app.name not in app_blacklist): if (('oozie' in app_blacklist) and (sdk_app.name in ('pig', 'jobsub'))): LOG.warn(('%s depends on oozie which is blacklisted, will skip loading %s app.' % (sdk_app.name, sdk_app.name))) else: m = sdk_app.load() dmi = DesktopModuleInfo(m) DESKTOP_APPS.append(dmi) LOG.debug(('Loaded Desktop Applications: ' + ', '.join((a.name for a in DESKTOP_APPS)))) DESKTOP_MODULES += DESKTOP_APPS
[ "def", "load_apps", "(", "app_blacklist", ")", ":", "global", "DESKTOP_MODULES", "global", "DESKTOP_APPS", "if", "(", "DESKTOP_APPS", "is", "not", "None", ")", ":", "raise", "Exception", "(", "_", "(", "'load_apps has already been called.'", ")", ")", "DESKTOP_APPS", "=", "[", "]", "for", "sdk_app", "in", "pkg_resources", ".", "iter_entry_points", "(", "'desktop.sdk.application'", ")", ":", "if", "(", "sdk_app", ".", "name", "not", "in", "app_blacklist", ")", ":", "if", "(", "(", "'oozie'", "in", "app_blacklist", ")", "and", "(", "sdk_app", ".", "name", "in", "(", "'pig'", ",", "'jobsub'", ")", ")", ")", ":", "LOG", ".", "warn", "(", "(", "'%s depends on oozie which is blacklisted, will skip loading %s app.'", "%", "(", "sdk_app", ".", "name", ",", "sdk_app", ".", "name", ")", ")", ")", "else", ":", "m", "=", "sdk_app", ".", "load", "(", ")", "dmi", "=", "DesktopModuleInfo", "(", "m", ")", "DESKTOP_APPS", ".", "append", "(", "dmi", ")", "LOG", ".", "debug", "(", "(", "'Loaded Desktop Applications: '", "+", "', '", ".", "join", "(", "(", "a", ".", "name", "for", "a", "in", "DESKTOP_APPS", ")", ")", ")", ")", "DESKTOP_MODULES", "+=", "DESKTOP_APPS" ]
loads the applications from the directories in app_dirs .
train
false
21,381
def mask_passwords(registry, xml_parent, data): XML.SubElement(xml_parent, 'com.michelin.cio.hudson.plugins.maskpasswords.MaskPasswordsBuildWrapper')
[ "def", "mask_passwords", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.michelin.cio.hudson.plugins.maskpasswords.MaskPasswordsBuildWrapper'", ")" ]
yaml: mask-passwords hide passwords in the console log .
train
false
21,382
def decode_language(code): for (tag, (language, region, iso639, iso3166)) in LANGUAGE_REGION.items(): if (iso639 == code.lower()): return language
[ "def", "decode_language", "(", "code", ")", ":", "for", "(", "tag", ",", "(", "language", ",", "region", ",", "iso639", ",", "iso3166", ")", ")", "in", "LANGUAGE_REGION", ".", "items", "(", ")", ":", "if", "(", "iso639", "==", "code", ".", "lower", "(", ")", ")", ":", "return", "language" ]
returns the language name for the given language code .
train
false
21,383
def _get_enabled_disabled(enabled_prop='true'): ret = set() cmd = '/usr/bin/svcprop -c -p general/enabled "*"' lines = __salt__['cmd.run_stdout'](cmd, python_shell=False).splitlines() for line in lines: comps = line.split() if (not comps): continue if (comps[2] == enabled_prop): ret.add(comps[0].split('/:properties')[0]) return sorted(ret)
[ "def", "_get_enabled_disabled", "(", "enabled_prop", "=", "'true'", ")", ":", "ret", "=", "set", "(", ")", "cmd", "=", "'/usr/bin/svcprop -c -p general/enabled \"*\"'", "lines", "=", "__salt__", "[", "'cmd.run_stdout'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", "for", "line", "in", "lines", ":", "comps", "=", "line", ".", "split", "(", ")", "if", "(", "not", "comps", ")", ":", "continue", "if", "(", "comps", "[", "2", "]", "==", "enabled_prop", ")", ":", "ret", ".", "add", "(", "comps", "[", "0", "]", ".", "split", "(", "'/:properties'", ")", "[", "0", "]", ")", "return", "sorted", "(", "ret", ")" ]
dry: get all service fmris and their enabled property .
train
true
21,384
def create_settings_from_fixture(fixture): created_settings = {} for settingsgroup in fixture: group = SettingsGroup(key=settingsgroup[0], name=settingsgroup[1][u'name'], description=settingsgroup[1][u'description']) group.save() created_settings[group] = [] for settings in settingsgroup[1][u'settings']: setting = Setting(key=settings[0], value=settings[1][u'value'], value_type=settings[1][u'value_type'], name=settings[1][u'name'], description=settings[1][u'description'], extra=settings[1].get(u'extra', u''), settingsgroup=group.key) if setting: setting.save() created_settings[group].append(setting) return created_settings
[ "def", "create_settings_from_fixture", "(", "fixture", ")", ":", "created_settings", "=", "{", "}", "for", "settingsgroup", "in", "fixture", ":", "group", "=", "SettingsGroup", "(", "key", "=", "settingsgroup", "[", "0", "]", ",", "name", "=", "settingsgroup", "[", "1", "]", "[", "u'name'", "]", ",", "description", "=", "settingsgroup", "[", "1", "]", "[", "u'description'", "]", ")", "group", ".", "save", "(", ")", "created_settings", "[", "group", "]", "=", "[", "]", "for", "settings", "in", "settingsgroup", "[", "1", "]", "[", "u'settings'", "]", ":", "setting", "=", "Setting", "(", "key", "=", "settings", "[", "0", "]", ",", "value", "=", "settings", "[", "1", "]", "[", "u'value'", "]", ",", "value_type", "=", "settings", "[", "1", "]", "[", "u'value_type'", "]", ",", "name", "=", "settings", "[", "1", "]", "[", "u'name'", "]", ",", "description", "=", "settings", "[", "1", "]", "[", "u'description'", "]", ",", "extra", "=", "settings", "[", "1", "]", ".", "get", "(", "u'extra'", ",", "u''", ")", ",", "settingsgroup", "=", "group", ".", "key", ")", "if", "setting", ":", "setting", ".", "save", "(", ")", "created_settings", "[", "group", "]", ".", "append", "(", "setting", ")", "return", "created_settings" ]
inserts the settings from a fixture into the database .
train
false
21,386
def get_auto_conf_images(agentConfig): from config import PathNotFound, get_auto_confd_path auto_conf_images = {} try: auto_confd_path = get_auto_confd_path() except PathNotFound: log.error("Couldn't find the check auto-configuration folder, no auto configuration will be used.") return None for yaml_file in os.listdir(auto_confd_path): check_name = yaml_file.split('.')[0] try: auto_conf = check_yaml(urljoin(auto_confd_path, yaml_file)) except Exception as e: log.error(('Enable to load the auto-config, yaml file.\n%s' % str(e))) auto_conf = {} images = auto_conf.get('docker_images', []) for image in images: auto_conf_images[image] = check_name return auto_conf_images
[ "def", "get_auto_conf_images", "(", "agentConfig", ")", ":", "from", "config", "import", "PathNotFound", ",", "get_auto_confd_path", "auto_conf_images", "=", "{", "}", "try", ":", "auto_confd_path", "=", "get_auto_confd_path", "(", ")", "except", "PathNotFound", ":", "log", ".", "error", "(", "\"Couldn't find the check auto-configuration folder, no auto configuration will be used.\"", ")", "return", "None", "for", "yaml_file", "in", "os", ".", "listdir", "(", "auto_confd_path", ")", ":", "check_name", "=", "yaml_file", ".", "split", "(", "'.'", ")", "[", "0", "]", "try", ":", "auto_conf", "=", "check_yaml", "(", "urljoin", "(", "auto_confd_path", ",", "yaml_file", ")", ")", "except", "Exception", "as", "e", ":", "log", ".", "error", "(", "(", "'Enable to load the auto-config, yaml file.\\n%s'", "%", "str", "(", "e", ")", ")", ")", "auto_conf", "=", "{", "}", "images", "=", "auto_conf", ".", "get", "(", "'docker_images'", ",", "[", "]", ")", "for", "image", "in", "images", ":", "auto_conf_images", "[", "image", "]", "=", "check_name", "return", "auto_conf_images" ]
walk through the auto_config folder and build a dict of auto-configurable images .
train
false
21,387
def _api_config_test_server(output, kwargs): (result, msg) = test_nntp_server_dict(kwargs) response = {'result': result, 'message': msg} if output: return report(output, data=response) else: return msg
[ "def", "_api_config_test_server", "(", "output", ",", "kwargs", ")", ":", "(", "result", ",", "msg", ")", "=", "test_nntp_server_dict", "(", "kwargs", ")", "response", "=", "{", "'result'", ":", "result", ",", "'message'", ":", "msg", "}", "if", "output", ":", "return", "report", "(", "output", ",", "data", "=", "response", ")", "else", ":", "return", "msg" ]
api: accepts output .
train
false
21,388
def generate_fs_subjects(filenames): for f in filenames: (yield File(f)) (yield Link(f)) (yield Dir(f)) if (dirname(f) in (u'.', u'')): (yield Dir(u''))
[ "def", "generate_fs_subjects", "(", "filenames", ")", ":", "for", "f", "in", "filenames", ":", "(", "yield", "File", "(", "f", ")", ")", "(", "yield", "Link", "(", "f", ")", ")", "(", "yield", "Dir", "(", "f", ")", ")", "if", "(", "dirname", "(", "f", ")", "in", "(", "u'.'", ",", "u''", ")", ")", ":", "(", "yield", "Dir", "(", "u''", ")", ")" ]
given filenames .
train
false
21,390
def longestCommonPrefix(*sequences): if (len(sequences) == 1): return sequences[0] sequences = [pair[1] for pair in sorted(((len(fi), fi) for fi in sequences))] if (not sequences): return None for (i, comparison_ch) in enumerate(sequences[0]): for fi in sequences[1:]: ch = fi[i] if (ch != comparison_ch): return fi[:i] return sequences[0]
[ "def", "longestCommonPrefix", "(", "*", "sequences", ")", ":", "if", "(", "len", "(", "sequences", ")", "==", "1", ")", ":", "return", "sequences", "[", "0", "]", "sequences", "=", "[", "pair", "[", "1", "]", "for", "pair", "in", "sorted", "(", "(", "(", "len", "(", "fi", ")", ",", "fi", ")", "for", "fi", "in", "sequences", ")", ")", "]", "if", "(", "not", "sequences", ")", ":", "return", "None", "for", "(", "i", ",", "comparison_ch", ")", "in", "enumerate", "(", "sequences", "[", "0", "]", ")", ":", "for", "fi", "in", "sequences", "[", "1", ":", "]", ":", "ch", "=", "fi", "[", "i", "]", "if", "(", "ch", "!=", "comparison_ch", ")", ":", "return", "fi", "[", ":", "i", "]", "return", "sequences", "[", "0", "]" ]
returns longest common prefix occuring in given sequences reference: URL .
train
false
21,391
def _gluster_output_cleanup(result): ret = '' for line in result.splitlines(): if line.startswith('gluster>'): ret += line[9:].strip() else: ret += line.strip() return ret
[ "def", "_gluster_output_cleanup", "(", "result", ")", ":", "ret", "=", "''", "for", "line", "in", "result", ".", "splitlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "'gluster>'", ")", ":", "ret", "+=", "line", "[", "9", ":", "]", ".", "strip", "(", ")", "else", ":", "ret", "+=", "line", ".", "strip", "(", ")", "return", "ret" ]
gluster versions prior to 6 have a bug that requires tricking isatty .
train
true
21,396
def _CheckNumberOfFields(returned_expressions, snippeted_fields, returned_fields): number_expressions = ((len(returned_expressions) + len(snippeted_fields)) + len(returned_fields)) if (number_expressions > MAXIMUM_FIELDS_RETURNED_PER_SEARCH): raise ValueError(('too many fields, snippets or expressions to return %d > maximum %d' % (number_expressions, MAXIMUM_FIELDS_RETURNED_PER_SEARCH)))
[ "def", "_CheckNumberOfFields", "(", "returned_expressions", ",", "snippeted_fields", ",", "returned_fields", ")", ":", "number_expressions", "=", "(", "(", "len", "(", "returned_expressions", ")", "+", "len", "(", "snippeted_fields", ")", ")", "+", "len", "(", "returned_fields", ")", ")", "if", "(", "number_expressions", ">", "MAXIMUM_FIELDS_RETURNED_PER_SEARCH", ")", ":", "raise", "ValueError", "(", "(", "'too many fields, snippets or expressions to return %d > maximum %d'", "%", "(", "number_expressions", ",", "MAXIMUM_FIELDS_RETURNED_PER_SEARCH", ")", ")", ")" ]
checks the count of all field kinds is less than limit .
train
false
21,397
def version_sorted(elements): return sorted(elements, key=LooseVersion, reverse=True)
[ "def", "version_sorted", "(", "elements", ")", ":", "return", "sorted", "(", "elements", ",", "key", "=", "LooseVersion", ",", "reverse", "=", "True", ")" ]
sort iterable based on loose description of "version" from newest to oldest .
train
false
21,399
def FormatMessage(eventLogRecord, logType='Application'): keyName = ('SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s' % (logType, eventLogRecord.SourceName)) handle = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, keyName) try: dllNames = win32api.RegQueryValueEx(handle, 'EventMessageFile')[0].split(';') data = None for dllName in dllNames: try: dllName = win32api.ExpandEnvironmentStrings(dllName) dllHandle = win32api.LoadLibraryEx(dllName, 0, win32con.DONT_RESOLVE_DLL_REFERENCES) try: data = win32api.FormatMessageW(win32con.FORMAT_MESSAGE_FROM_HMODULE, dllHandle, eventLogRecord.EventID, langid, eventLogRecord.StringInserts) finally: win32api.FreeLibrary(dllHandle) except win32api.error: pass if (data is not None): break finally: win32api.RegCloseKey(handle) return (data or u'')
[ "def", "FormatMessage", "(", "eventLogRecord", ",", "logType", "=", "'Application'", ")", ":", "keyName", "=", "(", "'SYSTEM\\\\CurrentControlSet\\\\Services\\\\EventLog\\\\%s\\\\%s'", "%", "(", "logType", ",", "eventLogRecord", ".", "SourceName", ")", ")", "handle", "=", "win32api", ".", "RegOpenKey", "(", "win32con", ".", "HKEY_LOCAL_MACHINE", ",", "keyName", ")", "try", ":", "dllNames", "=", "win32api", ".", "RegQueryValueEx", "(", "handle", ",", "'EventMessageFile'", ")", "[", "0", "]", ".", "split", "(", "';'", ")", "data", "=", "None", "for", "dllName", "in", "dllNames", ":", "try", ":", "dllName", "=", "win32api", ".", "ExpandEnvironmentStrings", "(", "dllName", ")", "dllHandle", "=", "win32api", ".", "LoadLibraryEx", "(", "dllName", ",", "0", ",", "win32con", ".", "DONT_RESOLVE_DLL_REFERENCES", ")", "try", ":", "data", "=", "win32api", ".", "FormatMessageW", "(", "win32con", ".", "FORMAT_MESSAGE_FROM_HMODULE", ",", "dllHandle", ",", "eventLogRecord", ".", "EventID", ",", "langid", ",", "eventLogRecord", ".", "StringInserts", ")", "finally", ":", "win32api", ".", "FreeLibrary", "(", "dllHandle", ")", "except", "win32api", ".", "error", ":", "pass", "if", "(", "data", "is", "not", "None", ")", ":", "break", "finally", ":", "win32api", ".", "RegCloseKey", "(", "handle", ")", "return", "(", "data", "or", "u''", ")" ]
given a tuple from readeventlog .
train
false
21,400
def libvlc_video_set_adjust_int(p_mi, option, value): f = (_Cfunctions.get('libvlc_video_set_adjust_int', None) or _Cfunction('libvlc_video_set_adjust_int', ((1,), (1,), (1,)), None, None, MediaPlayer, ctypes.c_uint, ctypes.c_int)) return f(p_mi, option, value)
[ "def", "libvlc_video_set_adjust_int", "(", "p_mi", ",", "option", ",", "value", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_video_set_adjust_int'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_video_set_adjust_int'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_uint", ",", "ctypes", ".", "c_int", ")", ")", "return", "f", "(", "p_mi", ",", "option", ",", "value", ")" ]
set adjust option as integer .
train
true
21,401
def _load(ns): ctx = builtins.__xonsh_ctx__ for name in ns.names: if ns.verbose: print 'loading xontrib {0!r}'.format(name) update_context(name, ctx=ctx) if update_context.bad_imports: prompt_xontrib_install(update_context.bad_imports) del update_context.bad_imports
[ "def", "_load", "(", "ns", ")", ":", "ctx", "=", "builtins", ".", "__xonsh_ctx__", "for", "name", "in", "ns", ".", "names", ":", "if", "ns", ".", "verbose", ":", "print", "'loading xontrib {0!r}'", ".", "format", "(", "name", ")", "update_context", "(", "name", ",", "ctx", "=", "ctx", ")", "if", "update_context", ".", "bad_imports", ":", "prompt_xontrib_install", "(", "update_context", ".", "bad_imports", ")", "del", "update_context", ".", "bad_imports" ]
load the given plugin .
train
false
21,404
def weblate_login(request): if request.user.is_authenticated(): return redirect_profile() auth_backends = list(load_backends(BACKENDS).keys()) if ((len(auth_backends) == 1) and (auth_backends[0] != u'email')): return redirect(u'social:begin', auth_backends[0]) return auth_views.login(request, template_name=u'accounts/login.html', authentication_form=LoginForm, extra_context={u'login_backends': [x for x in auth_backends if (x != u'email')], u'can_reset': (u'email' in auth_backends), u'title': _(u'Login')})
[ "def", "weblate_login", "(", "request", ")", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "return", "redirect_profile", "(", ")", "auth_backends", "=", "list", "(", "load_backends", "(", "BACKENDS", ")", ".", "keys", "(", ")", ")", "if", "(", "(", "len", "(", "auth_backends", ")", "==", "1", ")", "and", "(", "auth_backends", "[", "0", "]", "!=", "u'email'", ")", ")", ":", "return", "redirect", "(", "u'social:begin'", ",", "auth_backends", "[", "0", "]", ")", "return", "auth_views", ".", "login", "(", "request", ",", "template_name", "=", "u'accounts/login.html'", ",", "authentication_form", "=", "LoginForm", ",", "extra_context", "=", "{", "u'login_backends'", ":", "[", "x", "for", "x", "in", "auth_backends", "if", "(", "x", "!=", "u'email'", ")", "]", ",", "u'can_reset'", ":", "(", "u'email'", "in", "auth_backends", ")", ",", "u'title'", ":", "_", "(", "u'Login'", ")", "}", ")" ]
login handler .
train
false
21,405
def verify_private_key(private_key, public_key, passphrase=None): return bool((get_public_key(private_key, passphrase) == get_public_key(public_key)))
[ "def", "verify_private_key", "(", "private_key", ",", "public_key", ",", "passphrase", "=", "None", ")", ":", "return", "bool", "(", "(", "get_public_key", "(", "private_key", ",", "passphrase", ")", "==", "get_public_key", "(", "public_key", ")", ")", ")" ]
verify that private_key matches public_key private_key: the private key to verify .
train
false
21,406
def exponential_weights(length, decay_rate): return (full(length, decay_rate, float64_dtype) ** arange((length + 1), 1, (-1)))
[ "def", "exponential_weights", "(", "length", ",", "decay_rate", ")", ":", "return", "(", "full", "(", "length", ",", "decay_rate", ",", "float64_dtype", ")", "**", "arange", "(", "(", "length", "+", "1", ")", ",", "1", ",", "(", "-", "1", ")", ")", ")" ]
build a weight vector for an exponentially-weighted statistic .
train
false
21,407
def image_resize_image_small(base64_source, size=(64, 64), encoding='base64', filetype=None, avoid_if_small=False): return image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)
[ "def", "image_resize_image_small", "(", "base64_source", ",", "size", "=", "(", "64", ",", "64", ")", ",", "encoding", "=", "'base64'", ",", "filetype", "=", "None", ",", "avoid_if_small", "=", "False", ")", ":", "return", "image_resize_image", "(", "base64_source", ",", "size", ",", "encoding", ",", "filetype", ",", "avoid_if_small", ")" ]
wrapper on image_resize_image .
train
false
21,410
def libvlc_media_list_player_get_media_player(p_mlp): f = (_Cfunctions.get('libvlc_media_list_player_get_media_player', None) or _Cfunction('libvlc_media_list_player_get_media_player', ((1,),), class_result(MediaPlayer), ctypes.c_void_p, MediaListPlayer)) return f(p_mlp)
[ "def", "libvlc_media_list_player_get_media_player", "(", "p_mlp", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_get_media_player'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_get_media_player'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "class_result", "(", "MediaPlayer", ")", ",", "ctypes", ".", "c_void_p", ",", "MediaListPlayer", ")", ")", "return", "f", "(", "p_mlp", ")" ]
get media player of the media_list_player instance .
train
false
21,411
def repeat_op(repetitions, inputs, op, *args, **kwargs): scope = kwargs.pop('scope', None) with tf.variable_scope(scope, 'RepeatOp', [inputs]): tower = inputs for _ in range(repetitions): tower = op(tower, *args, **kwargs) return tower
[ "def", "repeat_op", "(", "repetitions", ",", "inputs", ",", "op", ",", "*", "args", ",", "**", "kwargs", ")", ":", "scope", "=", "kwargs", ".", "pop", "(", "'scope'", ",", "None", ")", "with", "tf", ".", "variable_scope", "(", "scope", ",", "'RepeatOp'", ",", "[", "inputs", "]", ")", ":", "tower", "=", "inputs", "for", "_", "in", "range", "(", "repetitions", ")", ":", "tower", "=", "op", "(", "tower", ",", "*", "args", ",", "**", "kwargs", ")", "return", "tower" ]
build a sequential tower starting from inputs by using an op repeatedly .
train
true
21,413
def test_preprocessor_exceptions(): try: macroexpand(tokenize('(defn)')[0], HyASTCompiler(__name__)) assert False except HyMacroExpansionError as e: assert ('_hy_anon_fn_' not in str(e)) assert ('TypeError' not in str(e))
[ "def", "test_preprocessor_exceptions", "(", ")", ":", "try", ":", "macroexpand", "(", "tokenize", "(", "'(defn)'", ")", "[", "0", "]", ",", "HyASTCompiler", "(", "__name__", ")", ")", "assert", "False", "except", "HyMacroExpansionError", "as", "e", ":", "assert", "(", "'_hy_anon_fn_'", "not", "in", "str", "(", "e", ")", ")", "assert", "(", "'TypeError'", "not", "in", "str", "(", "e", ")", ")" ]
test that macro expansion raises appropriate exceptions .
train
false
21,414
def get_conv_gradinputs_shape(kernel_shape, top_shape, border_mode, subsample, filter_dilation=None): (bsize, topshp) = (top_shape[0], top_shape[2:]) (nkern, kshp) = (kernel_shape[1], kernel_shape[2:]) if (filter_dilation is None): filter_dilation = numpy.ones(len(subsample), dtype='int') if isinstance(border_mode, tuple): out_shp = tuple((get_conv_gradinputs_shape_1axis(kshp[i], topshp[i], border_mode[i], subsample[i], filter_dilation[i]) for i in range(len(subsample)))) else: out_shp = tuple((get_conv_gradinputs_shape_1axis(kshp[i], topshp[i], border_mode, subsample[i], filter_dilation[i]) for i in range(len(subsample)))) return ((bsize, nkern) + out_shp)
[ "def", "get_conv_gradinputs_shape", "(", "kernel_shape", ",", "top_shape", ",", "border_mode", ",", "subsample", ",", "filter_dilation", "=", "None", ")", ":", "(", "bsize", ",", "topshp", ")", "=", "(", "top_shape", "[", "0", "]", ",", "top_shape", "[", "2", ":", "]", ")", "(", "nkern", ",", "kshp", ")", "=", "(", "kernel_shape", "[", "1", "]", ",", "kernel_shape", "[", "2", ":", "]", ")", "if", "(", "filter_dilation", "is", "None", ")", ":", "filter_dilation", "=", "numpy", ".", "ones", "(", "len", "(", "subsample", ")", ",", "dtype", "=", "'int'", ")", "if", "isinstance", "(", "border_mode", ",", "tuple", ")", ":", "out_shp", "=", "tuple", "(", "(", "get_conv_gradinputs_shape_1axis", "(", "kshp", "[", "i", "]", ",", "topshp", "[", "i", "]", ",", "border_mode", "[", "i", "]", ",", "subsample", "[", "i", "]", ",", "filter_dilation", "[", "i", "]", ")", "for", "i", "in", "range", "(", "len", "(", "subsample", ")", ")", ")", ")", "else", ":", "out_shp", "=", "tuple", "(", "(", "get_conv_gradinputs_shape_1axis", "(", "kshp", "[", "i", "]", ",", "topshp", "[", "i", "]", ",", "border_mode", ",", "subsample", "[", "i", "]", ",", "filter_dilation", "[", "i", "]", ")", "for", "i", "in", "range", "(", "len", "(", "subsample", ")", ")", ")", ")", "return", "(", "(", "bsize", ",", "nkern", ")", "+", "out_shp", ")" ]
this function tries to compute the image shape of convolution gradinputs .
train
false
21,415
def GetClass(clsid): return mapCLSIDToClass[clsid]
[ "def", "GetClass", "(", "clsid", ")", ":", "return", "mapCLSIDToClass", "[", "clsid", "]" ]
given a clsid .
train
false
21,417
def get_ufunc_info(ufunc_key): _lazy_init_db() return _ufunc_db[ufunc_key]
[ "def", "get_ufunc_info", "(", "ufunc_key", ")", ":", "_lazy_init_db", "(", ")", "return", "_ufunc_db", "[", "ufunc_key", "]" ]
get the lowering information for the ufunc with key ufunc_key .
train
false
21,418
def merge_options(options, optgroup=None): alloptions = {} options = list(options) for i in range((len(options) - 1), (-1), (-1)): (optname, optdict) = options[i] if (optname in alloptions): options.pop(i) alloptions[optname].update(optdict) else: optdict = optdict.copy() options[i] = (optname, optdict) alloptions[optname] = optdict if (optgroup is not None): alloptions[optname]['group'] = optgroup return tuple(options)
[ "def", "merge_options", "(", "options", ",", "optgroup", "=", "None", ")", ":", "alloptions", "=", "{", "}", "options", "=", "list", "(", "options", ")", "for", "i", "in", "range", "(", "(", "len", "(", "options", ")", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "(", "optname", ",", "optdict", ")", "=", "options", "[", "i", "]", "if", "(", "optname", "in", "alloptions", ")", ":", "options", ".", "pop", "(", "i", ")", "alloptions", "[", "optname", "]", ".", "update", "(", "optdict", ")", "else", ":", "optdict", "=", "optdict", ".", "copy", "(", ")", "options", "[", "i", "]", "=", "(", "optname", ",", "optdict", ")", "alloptions", "[", "optname", "]", "=", "optdict", "if", "(", "optgroup", "is", "not", "None", ")", ":", "alloptions", "[", "optname", "]", "[", "'group'", "]", "=", "optgroup", "return", "tuple", "(", "options", ")" ]
preprocess a list of options and remove duplicates .
train
false
21,419
def datetime_aligned(ds1, ds2, maxLen=None): aligned1 = dataseries.SequenceDataSeries(maxLen) aligned2 = dataseries.SequenceDataSeries(maxLen) Syncer(ds1, ds2, aligned1, aligned2) return (aligned1, aligned2)
[ "def", "datetime_aligned", "(", "ds1", ",", "ds2", ",", "maxLen", "=", "None", ")", ":", "aligned1", "=", "dataseries", ".", "SequenceDataSeries", "(", "maxLen", ")", "aligned2", "=", "dataseries", ".", "SequenceDataSeries", "(", "maxLen", ")", "Syncer", "(", "ds1", ",", "ds2", ",", "aligned1", ",", "aligned2", ")", "return", "(", "aligned1", ",", "aligned2", ")" ]
returns two dataseries that exhibit only those values whose datetimes are in both dataseries .
train
false
21,420
def _get_variables_to_train(): if (FLAGS.trainable_scopes is None): return tf.trainable_variables() else: scopes = [scope.strip() for scope in FLAGS.trainable_scopes.split(',')] variables_to_train = [] for scope in scopes: variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope) variables_to_train.extend(variables) return variables_to_train
[ "def", "_get_variables_to_train", "(", ")", ":", "if", "(", "FLAGS", ".", "trainable_scopes", "is", "None", ")", ":", "return", "tf", ".", "trainable_variables", "(", ")", "else", ":", "scopes", "=", "[", "scope", ".", "strip", "(", ")", "for", "scope", "in", "FLAGS", ".", "trainable_scopes", ".", "split", "(", "','", ")", "]", "variables_to_train", "=", "[", "]", "for", "scope", "in", "scopes", ":", "variables", "=", "tf", ".", "get_collection", "(", "tf", ".", "GraphKeys", ".", "TRAINABLE_VARIABLES", ",", "scope", ")", "variables_to_train", ".", "extend", "(", "variables", ")", "return", "variables_to_train" ]
returns a list of variables to train .
train
false
21,421
def joinedGroup(group, avatar): s = group.send({'text': 'Hello, monkeys'}) s.addCallback(sentMessage, group, avatar) return s
[ "def", "joinedGroup", "(", "group", ",", "avatar", ")", ":", "s", "=", "group", ".", "send", "(", "{", "'text'", ":", "'Hello, monkeys'", "}", ")", "s", ".", "addCallback", "(", "sentMessage", ",", "group", ",", "avatar", ")", "return", "s" ]
joined the group successfully .
train
false
21,422
def _wrap_results(result, dtype): if is_datetime64_dtype(dtype): if (not isinstance(result, np.ndarray)): result = lib.Timestamp(result) else: result = result.view(dtype) elif is_timedelta64_dtype(dtype): if (not isinstance(result, np.ndarray)): if (np.fabs(result) > _int64_max): raise ValueError('overflow in timedelta operation') result = lib.Timedelta(result, unit='ns') else: result = result.astype('i8').view(dtype) return result
[ "def", "_wrap_results", "(", "result", ",", "dtype", ")", ":", "if", "is_datetime64_dtype", "(", "dtype", ")", ":", "if", "(", "not", "isinstance", "(", "result", ",", "np", ".", "ndarray", ")", ")", ":", "result", "=", "lib", ".", "Timestamp", "(", "result", ")", "else", ":", "result", "=", "result", ".", "view", "(", "dtype", ")", "elif", "is_timedelta64_dtype", "(", "dtype", ")", ":", "if", "(", "not", "isinstance", "(", "result", ",", "np", ".", "ndarray", ")", ")", ":", "if", "(", "np", ".", "fabs", "(", "result", ")", ">", "_int64_max", ")", ":", "raise", "ValueError", "(", "'overflow in timedelta operation'", ")", "result", "=", "lib", ".", "Timedelta", "(", "result", ",", "unit", "=", "'ns'", ")", "else", ":", "result", "=", "result", ".", "astype", "(", "'i8'", ")", ".", "view", "(", "dtype", ")", "return", "result" ]
wrap our results if needed .
train
false
21,423
@csrf_exempt def update_project(request, project): if (not appsettings.ENABLE_HOOKS): return HttpResponseNotAllowed([]) obj = get_project(request, project, True) if (not obj.enable_hooks): return HttpResponseNotAllowed([]) perform_update(obj) return hook_response()
[ "@", "csrf_exempt", "def", "update_project", "(", "request", ",", "project", ")", ":", "if", "(", "not", "appsettings", ".", "ENABLE_HOOKS", ")", ":", "return", "HttpResponseNotAllowed", "(", "[", "]", ")", "obj", "=", "get_project", "(", "request", ",", "project", ",", "True", ")", "if", "(", "not", "obj", ".", "enable_hooks", ")", ":", "return", "HttpResponseNotAllowed", "(", "[", "]", ")", "perform_update", "(", "obj", ")", "return", "hook_response", "(", ")" ]
api hook for updating git repos .
train
false
21,425
def iddr_aidi(m, n, k): return _id.iddr_aidi(m, n, k)
[ "def", "iddr_aidi", "(", "m", ",", "n", ",", "k", ")", ":", "return", "_id", ".", "iddr_aidi", "(", "m", ",", "n", ",", "k", ")" ]
initialize array for :func:iddr_aid .
train
false
21,426
def nP(n, k=None, replacement=False): try: n = as_int(n) except ValueError: return Integer(_nP(_multiset_histogram(n), k, replacement)) return Integer(_nP(n, k, replacement))
[ "def", "nP", "(", "n", ",", "k", "=", "None", ",", "replacement", "=", "False", ")", ":", "try", ":", "n", "=", "as_int", "(", "n", ")", "except", "ValueError", ":", "return", "Integer", "(", "_nP", "(", "_multiset_histogram", "(", "n", ")", ",", "k", ",", "replacement", ")", ")", "return", "Integer", "(", "_nP", "(", "n", ",", "k", ",", "replacement", ")", ")" ]
return the number of permutations of n items taken k at a time .
train
false
21,427
def identity(x): return x
[ "def", "identity", "(", "x", ")", ":", "return", "x" ]
return argument untouched .
train
false
21,428
def main_loop(options, modules, sender, tags): next_heartbeat = int((time.time() + 600)) while ALIVE: populate_collectors(options.cdir) reload_changed_config_modules(modules, options, sender, tags) reap_children() check_children(options) spawn_children() time.sleep(15) now = int(time.time()) if (now >= next_heartbeat): LOG.info(('Heartbeat (%d collectors running)' % sum((1 for col in all_living_collectors())))) next_heartbeat = (now + 600)
[ "def", "main_loop", "(", "options", ",", "modules", ",", "sender", ",", "tags", ")", ":", "next_heartbeat", "=", "int", "(", "(", "time", ".", "time", "(", ")", "+", "600", ")", ")", "while", "ALIVE", ":", "populate_collectors", "(", "options", ".", "cdir", ")", "reload_changed_config_modules", "(", "modules", ",", "options", ",", "sender", ",", "tags", ")", "reap_children", "(", ")", "check_children", "(", "options", ")", "spawn_children", "(", ")", "time", ".", "sleep", "(", "15", ")", "now", "=", "int", "(", "time", ".", "time", "(", ")", ")", "if", "(", "now", ">=", "next_heartbeat", ")", ":", "LOG", ".", "info", "(", "(", "'Heartbeat (%d collectors running)'", "%", "sum", "(", "(", "1", "for", "col", "in", "all_living_collectors", "(", ")", ")", ")", ")", ")", "next_heartbeat", "=", "(", "now", "+", "600", ")" ]
main processing loop .
train
false
21,430
def _GetPercentile(sortedlist, percent): if (not sortedlist): return None k = (int(math.ceil((len(sortedlist) * percent))) - 1) if (k < 0): k = 0 return sortedlist[k]
[ "def", "_GetPercentile", "(", "sortedlist", ",", "percent", ")", ":", "if", "(", "not", "sortedlist", ")", ":", "return", "None", "k", "=", "(", "int", "(", "math", ".", "ceil", "(", "(", "len", "(", "sortedlist", ")", "*", "percent", ")", ")", ")", "-", "1", ")", "if", "(", "k", "<", "0", ")", ":", "k", "=", "0", "return", "sortedlist", "[", "k", "]" ]
returns a desired percentile value of a sorted list of numbers .
train
false
21,431
def list_known_nonphylogenetic_metrics(): result = [] for name in dir(distance_transform): if name.startswith('dist_'): result.append(name[5:]) elif name.startswith('binary_dist_'): result.append(('binary_' + name[12:])) result.sort() return result
[ "def", "list_known_nonphylogenetic_metrics", "(", ")", ":", "result", "=", "[", "]", "for", "name", "in", "dir", "(", "distance_transform", ")", ":", "if", "name", ".", "startswith", "(", "'dist_'", ")", ":", "result", ".", "append", "(", "name", "[", "5", ":", "]", ")", "elif", "name", ".", "startswith", "(", "'binary_dist_'", ")", ":", "result", ".", "append", "(", "(", "'binary_'", "+", "name", "[", "12", ":", "]", ")", ")", "result", ".", "sort", "(", ")", "return", "result" ]
lists known metrics by name from distance_transform .
train
false
21,432
def device_pointer(obj): return device_ctypes_pointer(obj).value
[ "def", "device_pointer", "(", "obj", ")", ":", "return", "device_ctypes_pointer", "(", "obj", ")", ".", "value" ]
get the device pointer as an integer .
train
false
21,433
def code_almost_equal(a, b): split_a = split_and_strip_non_empty_lines(a) split_b = split_and_strip_non_empty_lines(b) if (len(split_a) != len(split_b)): return False for index in range(len(split_a)): if (u''.join(split_a[index].split()) != u''.join(split_b[index].split())): return False return True
[ "def", "code_almost_equal", "(", "a", ",", "b", ")", ":", "split_a", "=", "split_and_strip_non_empty_lines", "(", "a", ")", "split_b", "=", "split_and_strip_non_empty_lines", "(", "b", ")", "if", "(", "len", "(", "split_a", ")", "!=", "len", "(", "split_b", ")", ")", ":", "return", "False", "for", "index", "in", "range", "(", "len", "(", "split_a", ")", ")", ":", "if", "(", "u''", ".", "join", "(", "split_a", "[", "index", "]", ".", "split", "(", ")", ")", "!=", "u''", ".", "join", "(", "split_b", "[", "index", "]", ".", "split", "(", ")", ")", ")", ":", "return", "False", "return", "True" ]
return true if code is similar .
train
false
21,434
def all_builtins(): return (p for p in plugin.plugins.values() if p.builtin)
[ "def", "all_builtins", "(", ")", ":", "return", "(", "p", "for", "p", "in", "plugin", ".", "plugins", ".", "values", "(", ")", "if", "p", ".", "builtin", ")" ]
helper function to return an iterator over all builtin plugins .
train
false
21,435
def delete_floating_ip(kwargs=None, call=None): if (call != 'function'): log.error('The delete_floating_ip function must be called with -f or --function.') return False if (not kwargs): kwargs = {} if ('floating_ip' not in kwargs): log.error('A floating IP is required.') return False floating_ip = kwargs['floating_ip'] log.debug('Floating ip is {0}'.format('floating_ip')) result = query(method='floating_ips', command=floating_ip, http_method='delete') return result
[ "def", "delete_floating_ip", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "log", ".", "error", "(", "'The delete_floating_ip function must be called with -f or --function.'", ")", "return", "False", "if", "(", "not", "kwargs", ")", ":", "kwargs", "=", "{", "}", "if", "(", "'floating_ip'", "not", "in", "kwargs", ")", ":", "log", ".", "error", "(", "'A floating IP is required.'", ")", "return", "False", "floating_ip", "=", "kwargs", "[", "'floating_ip'", "]", "log", ".", "debug", "(", "'Floating ip is {0}'", ".", "format", "(", "'floating_ip'", ")", ")", "result", "=", "query", "(", "method", "=", "'floating_ips'", ",", "command", "=", "floating_ip", ",", "http_method", "=", "'delete'", ")", "return", "result" ]
delete a floating ip .
train
true
21,439
def _hex(list=None): list = (list or []) if list: list.reverse() return int(''.join(list), 16) return 0
[ "def", "_hex", "(", "list", "=", "None", ")", ":", "list", "=", "(", "list", "or", "[", "]", ")", "if", "list", ":", "list", ".", "reverse", "(", ")", "return", "int", "(", "''", ".", "join", "(", "list", ")", ",", "16", ")", "return", "0" ]
format the return value in list into hex .
train
false
21,442
def detect_ncpus(): global _ncpus if (_ncpus is None): if iswindows: import win32api ans = win32api.GetSystemInfo()[5] else: import multiprocessing ans = (-1) try: ans = multiprocessing.cpu_count() except Exception: from PyQt5.Qt import QThread ans = QThread.idealThreadCount() _ncpus = max(1, ans) return _ncpus
[ "def", "detect_ncpus", "(", ")", ":", "global", "_ncpus", "if", "(", "_ncpus", "is", "None", ")", ":", "if", "iswindows", ":", "import", "win32api", "ans", "=", "win32api", ".", "GetSystemInfo", "(", ")", "[", "5", "]", "else", ":", "import", "multiprocessing", "ans", "=", "(", "-", "1", ")", "try", ":", "ans", "=", "multiprocessing", ".", "cpu_count", "(", ")", "except", "Exception", ":", "from", "PyQt5", ".", "Qt", "import", "QThread", "ans", "=", "QThread", ".", "idealThreadCount", "(", ")", "_ncpus", "=", "max", "(", "1", ",", "ans", ")", "return", "_ncpus" ]
detects the number of effective cpus in the system .
train
false
21,445
@removals.remove(message='keystoneclient auth plugins are deprecated. Use keystoneauth.', version='2.1.0', removal_version='3.0.0') def load_from_argparse_arguments(namespace, **kwargs): if (not namespace.os_auth_plugin): return None if isinstance(namespace.os_auth_plugin, type): plugin = namespace.os_auth_plugin else: plugin = base.get_plugin_class(namespace.os_auth_plugin) return plugin.load_from_argparse_arguments(namespace, **kwargs)
[ "@", "removals", ".", "remove", "(", "message", "=", "'keystoneclient auth plugins are deprecated. Use keystoneauth.'", ",", "version", "=", "'2.1.0'", ",", "removal_version", "=", "'3.0.0'", ")", "def", "load_from_argparse_arguments", "(", "namespace", ",", "**", "kwargs", ")", ":", "if", "(", "not", "namespace", ".", "os_auth_plugin", ")", ":", "return", "None", "if", "isinstance", "(", "namespace", ".", "os_auth_plugin", ",", "type", ")", ":", "plugin", "=", "namespace", ".", "os_auth_plugin", "else", ":", "plugin", "=", "base", ".", "get_plugin_class", "(", "namespace", ".", "os_auth_plugin", ")", "return", "plugin", ".", "load_from_argparse_arguments", "(", "namespace", ",", "**", "kwargs", ")" ]
retrieve the created plugin from the completed argparse results .
train
false
21,446
def CDLDOJISTAR(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLDOJISTAR)
[ "def", "CDLDOJISTAR", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLDOJISTAR", ")" ]
doji star .
train
false
21,447
def run_datastore_upgrade(db_access, zookeeper, log_postfix, total_entities): ensure_app_is_not_running() validate_and_update_entities(db_access, zookeeper, log_postfix, total_entities) logging.info('Updated invalid entities and deleted tombstoned entities.') db_access.set_metadata(cassandra_interface.VERSION_INFO_KEY, str(cassandra_interface.EXPECTED_DATA_VERSION)) logging.info('Stored the data version successfully.') db_access.delete_table(dbconstants.JOURNAL_TABLE) logging.info('Deleted Journal Table sucessfully.')
[ "def", "run_datastore_upgrade", "(", "db_access", ",", "zookeeper", ",", "log_postfix", ",", "total_entities", ")", ":", "ensure_app_is_not_running", "(", ")", "validate_and_update_entities", "(", "db_access", ",", "zookeeper", ",", "log_postfix", ",", "total_entities", ")", "logging", ".", "info", "(", "'Updated invalid entities and deleted tombstoned entities.'", ")", "db_access", ".", "set_metadata", "(", "cassandra_interface", ".", "VERSION_INFO_KEY", ",", "str", "(", "cassandra_interface", ".", "EXPECTED_DATA_VERSION", ")", ")", "logging", ".", "info", "(", "'Stored the data version successfully.'", ")", "db_access", ".", "delete_table", "(", "dbconstants", ".", "JOURNAL_TABLE", ")", "logging", ".", "info", "(", "'Deleted Journal Table sucessfully.'", ")" ]
runs the data upgrade process of fetching .
train
false
21,448
def libvlc_vlm_set_enabled(p_instance, psz_name, b_enabled): f = (_Cfunctions.get('libvlc_vlm_set_enabled', None) or _Cfunction('libvlc_vlm_set_enabled', ((1,), (1,), (1,)), None, ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)) return f(p_instance, psz_name, b_enabled)
[ "def", "libvlc_vlm_set_enabled", "(", "p_instance", ",", "psz_name", ",", "b_enabled", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_vlm_set_enabled'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_vlm_set_enabled'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "Instance", ",", "ctypes", ".", "c_char_p", ",", "ctypes", ".", "c_int", ")", ")", "return", "f", "(", "p_instance", ",", "psz_name", ",", "b_enabled", ")" ]
enable or disable a media .
train
true
21,449
def load_ipython_extension(ip): warnings.warn('The rmagic extension in IPython has moved to `rpy2.ipython`, please see `rpy2` documentation.')
[ "def", "load_ipython_extension", "(", "ip", ")", ":", "warnings", ".", "warn", "(", "'The rmagic extension in IPython has moved to `rpy2.ipython`, please see `rpy2` documentation.'", ")" ]
entry point of the ipython extension parameters ipython : ipython interpreter an instance of the ipython interpreter that is handed over to the extension .
train
false
21,452
def test_bad_precision(): f = PlainTextFormatter() def set_fp(p): f.float_precision = p nt.assert_raises(ValueError, set_fp, '%') nt.assert_raises(ValueError, set_fp, '%.3f%i') nt.assert_raises(ValueError, set_fp, 'foo') nt.assert_raises(ValueError, set_fp, (-1))
[ "def", "test_bad_precision", "(", ")", ":", "f", "=", "PlainTextFormatter", "(", ")", "def", "set_fp", "(", "p", ")", ":", "f", ".", "float_precision", "=", "p", "nt", ".", "assert_raises", "(", "ValueError", ",", "set_fp", ",", "'%'", ")", "nt", ".", "assert_raises", "(", "ValueError", ",", "set_fp", ",", "'%.3f%i'", ")", "nt", ".", "assert_raises", "(", "ValueError", ",", "set_fp", ",", "'foo'", ")", "nt", ".", "assert_raises", "(", "ValueError", ",", "set_fp", ",", "(", "-", "1", ")", ")" ]
test various invalid values for float_precision .
train
false
21,453
def page(): found = True get_vars = request.get_vars if ('name' in get_vars): table = s3db.cms_post query = ((table.name == get_vars.name) & (table.deleted != True)) row = db(query).select(table.id, limitby=(0, 1)).first() if row: request.args.append(str(row.id)) else: found = False def prep(r): if (not found): r.error(404, T('Page not found'), next=auth.permission.homepage) s3db.configure(r.tablename, listadd=False) return True s3.prep = prep def postp(r, output): if (r.record and (not r.transformable())): output = {'item': s3base.S3XMLContents(r.record.body).xml()} current.menu.options = None response.view = s3base.S3CRUD._view(r, 'cms/page.html') if r.record.replies: ckeditor = URL(c='static', f='ckeditor', args='ckeditor.js') s3.scripts.append(ckeditor) adapter = URL(c='static', f='ckeditor', args=['adapters', 'jquery.js']) s3.scripts.append(adapter) js = ''.join(('i18n.reply="', str(T('Reply')), '"\nvar img_path=S3.Ap.concat(\'/static/img/jCollapsible/\')\nvar ck_config={toolbar:[[\'Bold\',\'Italic\',\'-\',\'NumberedList\',\'BulletedList\',\'-\',\'Link\',\'Unlink\',\'-\',\'Smiley\',\'-\',\'Source\',\'Maximize\']],toolbarCanCollapse:false,removePlugins:\'elementspath\'}\nfunction comment_reply(id){\n $(\'#cms_comment_post_id__row\').hide()\n $(\'#cms_comment_post_id__row1\').hide()\n $(\'#comment-title\').html(i18n.reply)\n $(\'#cms_comment_body\').ckeditorGet().destroy()\n $(\'#cms_comment_body\').ckeditor(ck_config)\n $(\'#comment-form\').insertAfter($(\'#comment-\'+id))\n $(\'#cms_comment_parent\').val(id)\n var post_id = $(\'#comment-\'+id).attr(\'post_id\')\n $(\'#cms_comment_post_id\').val(post_id)\n}')) s3.js_global.append(js) return output s3.postp = postp output = s3_rest_controller('cms', 'post') return output
[ "def", "page", "(", ")", ":", "found", "=", "True", "get_vars", "=", "request", ".", "get_vars", "if", "(", "'name'", "in", "get_vars", ")", ":", "table", "=", "s3db", ".", "cms_post", "query", "=", "(", "(", "table", ".", "name", "==", "get_vars", ".", "name", ")", "&", "(", "table", ".", "deleted", "!=", "True", ")", ")", "row", "=", "db", "(", "query", ")", ".", "select", "(", "table", ".", "id", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "if", "row", ":", "request", ".", "args", ".", "append", "(", "str", "(", "row", ".", "id", ")", ")", "else", ":", "found", "=", "False", "def", "prep", "(", "r", ")", ":", "if", "(", "not", "found", ")", ":", "r", ".", "error", "(", "404", ",", "T", "(", "'Page not found'", ")", ",", "next", "=", "auth", ".", "permission", ".", "homepage", ")", "s3db", ".", "configure", "(", "r", ".", "tablename", ",", "listadd", "=", "False", ")", "return", "True", "s3", ".", "prep", "=", "prep", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "(", "r", ".", "record", "and", "(", "not", "r", ".", "transformable", "(", ")", ")", ")", ":", "output", "=", "{", "'item'", ":", "s3base", ".", "S3XMLContents", "(", "r", ".", "record", ".", "body", ")", ".", "xml", "(", ")", "}", "current", ".", "menu", ".", "options", "=", "None", "response", ".", "view", "=", "s3base", ".", "S3CRUD", ".", "_view", "(", "r", ",", "'cms/page.html'", ")", "if", "r", ".", "record", ".", "replies", ":", "ckeditor", "=", "URL", "(", "c", "=", "'static'", ",", "f", "=", "'ckeditor'", ",", "args", "=", "'ckeditor.js'", ")", "s3", ".", "scripts", ".", "append", "(", "ckeditor", ")", "adapter", "=", "URL", "(", "c", "=", "'static'", ",", "f", "=", "'ckeditor'", ",", "args", "=", "[", "'adapters'", ",", "'jquery.js'", "]", ")", "s3", ".", "scripts", ".", "append", "(", "adapter", ")", "js", "=", "''", ".", "join", "(", "(", "'i18n.reply=\"'", ",", "str", "(", "T", "(", "'Reply'", ")", ")", ",", "'\"\\nvar img_path=S3.Ap.concat(\\'/static/img/jCollapsible/\\')\\nvar ck_config={toolbar:[[\\'Bold\\',\\'Italic\\',\\'-\\',\\'NumberedList\\',\\'BulletedList\\',\\'-\\',\\'Link\\',\\'Unlink\\',\\'-\\',\\'Smiley\\',\\'-\\',\\'Source\\',\\'Maximize\\']],toolbarCanCollapse:false,removePlugins:\\'elementspath\\'}\\nfunction comment_reply(id){\\n $(\\'#cms_comment_post_id__row\\').hide()\\n $(\\'#cms_comment_post_id__row1\\').hide()\\n $(\\'#comment-title\\').html(i18n.reply)\\n $(\\'#cms_comment_body\\').ckeditorGet().destroy()\\n $(\\'#cms_comment_body\\').ckeditor(ck_config)\\n $(\\'#comment-form\\').insertAfter($(\\'#comment-\\'+id))\\n $(\\'#cms_comment_parent\\').val(id)\\n var post_id = $(\\'#comment-\\'+id).attr(\\'post_id\\')\\n $(\\'#cms_comment_post_id\\').val(post_id)\\n}'", ")", ")", "s3", ".", "js_global", ".", "append", "(", "js", ")", "return", "output", "s3", ".", "postp", "=", "postp", "output", "=", "s3_rest_controller", "(", "'cms'", ",", "'post'", ")", "return", "output" ]
get a wikipediapage object for the page with title title or the pageid pageid .
train
false
21,455
def extract_per_individual_states_from_sample_metadata(sample_metadata, state_category, state_values, individual_identifier_category, filter_missing_data=True): len_state_values = len(state_values) def inner_dict_constructor(): return ([None] * len_state_values) results = defaultdict(inner_dict_constructor) for (sample_id, metadata) in sample_metadata.items(): try: individual_id = metadata[individual_identifier_category] except KeyError: raise KeyError(('%s is not a sample metadata category.' % individual_identifier_category)) try: state_value = metadata[state_category] except KeyError: raise KeyError(('%s is not a sample metadata category.' % state_category)) try: state_index = state_values.index(state_value) except ValueError: continue results[individual_id][state_index] = sample_id if filter_missing_data: for (individual_id, sample_ids) in results.items(): if (None in sample_ids): del results[individual_id] return results
[ "def", "extract_per_individual_states_from_sample_metadata", "(", "sample_metadata", ",", "state_category", ",", "state_values", ",", "individual_identifier_category", ",", "filter_missing_data", "=", "True", ")", ":", "len_state_values", "=", "len", "(", "state_values", ")", "def", "inner_dict_constructor", "(", ")", ":", "return", "(", "[", "None", "]", "*", "len_state_values", ")", "results", "=", "defaultdict", "(", "inner_dict_constructor", ")", "for", "(", "sample_id", ",", "metadata", ")", "in", "sample_metadata", ".", "items", "(", ")", ":", "try", ":", "individual_id", "=", "metadata", "[", "individual_identifier_category", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "(", "'%s is not a sample metadata category.'", "%", "individual_identifier_category", ")", ")", "try", ":", "state_value", "=", "metadata", "[", "state_category", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "(", "'%s is not a sample metadata category.'", "%", "state_category", ")", ")", "try", ":", "state_index", "=", "state_values", ".", "index", "(", "state_value", ")", "except", "ValueError", ":", "continue", "results", "[", "individual_id", "]", "[", "state_index", "]", "=", "sample_id", "if", "filter_missing_data", ":", "for", "(", "individual_id", ",", "sample_ids", ")", "in", "results", ".", "items", "(", ")", ":", "if", "(", "None", "in", "sample_ids", ")", ":", "del", "results", "[", "individual_id", "]", "return", "results" ]
sample_metadata : 2d dictionary mapping sample ids to metadata state_category: metadata category name describing state of interest as a string state_values: ordered list of values of interest in the state_category metadata entry individual_identifier_category: metadata category name describing the individual as a string filter_missing_data: if true .
train
false
21,457
def _mk_client(): if ('cp.fileclient_{0}'.format(id(__opts__)) not in __context__): __context__['cp.fileclient_{0}'.format(id(__opts__))] = salt.fileclient.get_file_client(__opts__)
[ "def", "_mk_client", "(", ")", ":", "if", "(", "'cp.fileclient_{0}'", ".", "format", "(", "id", "(", "__opts__", ")", ")", "not", "in", "__context__", ")", ":", "__context__", "[", "'cp.fileclient_{0}'", ".", "format", "(", "id", "(", "__opts__", ")", ")", "]", "=", "salt", ".", "fileclient", ".", "get_file_client", "(", "__opts__", ")" ]
create a file client and add it to the context .
train
true