id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
44,880
def report_raw_stats(sect, stats, old_stats): total_lines = stats['total_lines'] if (not total_lines): raise EmptyReport() sect.description = ('%s lines have been analyzed' % total_lines) lines = ('type', 'number', '%', 'previous', 'difference') for node_type in ('code', 'docstring', 'comment', 'empty'): key = (node_type + '_lines') total = stats[key] percent = (float((total * 100)) / total_lines) old = old_stats.get(key, None) if (old is not None): diff_str = diff_string(old, total) else: (old, diff_str) = ('NC', 'NC') lines += (node_type, str(total), ('%.2f' % percent), str(old), diff_str) sect.append(Table(children=lines, cols=5, rheaders=1))
[ "def", "report_raw_stats", "(", "sect", ",", "stats", ",", "old_stats", ")", ":", "total_lines", "=", "stats", "[", "'total_lines'", "]", "if", "(", "not", "total_lines", ")", ":", "raise", "EmptyReport", "(", ")", "sect", ".", "description", "=", "(", "'%s lines have been analyzed'", "%", "total_lines", ")", "lines", "=", "(", "'type'", ",", "'number'", ",", "'%'", ",", "'previous'", ",", "'difference'", ")", "for", "node_type", "in", "(", "'code'", ",", "'docstring'", ",", "'comment'", ",", "'empty'", ")", ":", "key", "=", "(", "node_type", "+", "'_lines'", ")", "total", "=", "stats", "[", "key", "]", "percent", "=", "(", "float", "(", "(", "total", "*", "100", ")", ")", "/", "total_lines", ")", "old", "=", "old_stats", ".", "get", "(", "key", ",", "None", ")", "if", "(", "old", "is", "not", "None", ")", ":", "diff_str", "=", "diff_string", "(", "old", ",", "total", ")", "else", ":", "(", "old", ",", "diff_str", ")", "=", "(", "'NC'", ",", "'NC'", ")", "lines", "+=", "(", "node_type", ",", "str", "(", "total", ")", ",", "(", "'%.2f'", "%", "percent", ")", ",", "str", "(", "old", ")", ",", "diff_str", ")", "sect", ".", "append", "(", "Table", "(", "children", "=", "lines", ",", "cols", "=", "5", ",", "rheaders", "=", "1", ")", ")" ]
calculate percentage of code / doc / comment / empty .
train
false
44,881
def validate_required_if_set(other_field, **kwargs): def _validator(form, field): other_field_value = getattr(form, other_field).data if other_field_value: if ((field.data is None) or (isinstance(field.data, (str, unicode)) and (not field.data.strip())) or (isinstance(field.data, FileStorage) and (not field.data.filename.strip()))): raise validators.ValidationError(('This field is required if %s is set.' % other_field)) else: field.errors[:] = [] raise validators.StopValidation() return _validator
[ "def", "validate_required_if_set", "(", "other_field", ",", "**", "kwargs", ")", ":", "def", "_validator", "(", "form", ",", "field", ")", ":", "other_field_value", "=", "getattr", "(", "form", ",", "other_field", ")", ".", "data", "if", "other_field_value", ":", "if", "(", "(", "field", ".", "data", "is", "None", ")", "or", "(", "isinstance", "(", "field", ".", "data", ",", "(", "str", ",", "unicode", ")", ")", "and", "(", "not", "field", ".", "data", ".", "strip", "(", ")", ")", ")", "or", "(", "isinstance", "(", "field", ".", "data", ",", "FileStorage", ")", "and", "(", "not", "field", ".", "data", ".", "filename", ".", "strip", "(", ")", ")", ")", ")", ":", "raise", "validators", ".", "ValidationError", "(", "(", "'This field is required if %s is set.'", "%", "other_field", ")", ")", "else", ":", "field", ".", "errors", "[", ":", "]", "=", "[", "]", "raise", "validators", ".", "StopValidation", "(", ")", "return", "_validator" ]
used as a validator within a wtforms .
train
false
44,884
def get_price_infos(context, products, quantity=1): (mod, ctx) = _get_module_and_context(context) prices = mod.get_price_infos(ctx, products, quantity) for module in get_discount_modules(): prices = module.discount_prices(ctx, products, prices) return prices
[ "def", "get_price_infos", "(", "context", ",", "products", ",", "quantity", "=", "1", ")", ":", "(", "mod", ",", "ctx", ")", "=", "_get_module_and_context", "(", "context", ")", "prices", "=", "mod", ".", "get_price_infos", "(", "ctx", ",", "products", ",", "quantity", ")", "for", "module", "in", "get_discount_modules", "(", ")", ":", "prices", "=", "module", ".", "discount_prices", "(", "ctx", ",", "products", ",", "prices", ")", "return", "prices" ]
get priceinfo objects for a bunch of products .
train
false
44,885
def pbdn_seq(n, z): if (not (isscalar(n) and isscalar(z))): raise ValueError('arguments must be scalars.') if (floor(n) != n): raise ValueError('n must be an integer.') if (abs(n) <= 1): n1 = 1 else: n1 = n (cpb, cpd) = specfun.cpbdn(n1, z) return (cpb[:(n1 + 1)], cpd[:(n1 + 1)])
[ "def", "pbdn_seq", "(", "n", ",", "z", ")", ":", "if", "(", "not", "(", "isscalar", "(", "n", ")", "and", "isscalar", "(", "z", ")", ")", ")", ":", "raise", "ValueError", "(", "'arguments must be scalars.'", ")", "if", "(", "floor", "(", "n", ")", "!=", "n", ")", ":", "raise", "ValueError", "(", "'n must be an integer.'", ")", "if", "(", "abs", "(", "n", ")", "<=", "1", ")", ":", "n1", "=", "1", "else", ":", "n1", "=", "n", "(", "cpb", ",", "cpd", ")", "=", "specfun", ".", "cpbdn", "(", "n1", ",", "z", ")", "return", "(", "cpb", "[", ":", "(", "n1", "+", "1", ")", "]", ",", "cpd", "[", ":", "(", "n1", "+", "1", ")", "]", ")" ]
parabolic cylinder functions dn(z) and derivatives .
train
false
44,886
def reraise_as(new_exception_or_type): __traceback_hide__ = True (e_type, e_value, e_traceback) = sys.exc_info() if inspect.isclass(new_exception_or_type): new_type = new_exception_or_type new_exception = new_exception_or_type() else: new_type = type(new_exception_or_type) new_exception = new_exception_or_type new_exception.__cause__ = e_value try: six.reraise(new_type, new_exception, e_traceback) finally: del e_traceback
[ "def", "reraise_as", "(", "new_exception_or_type", ")", ":", "__traceback_hide__", "=", "True", "(", "e_type", ",", "e_value", ",", "e_traceback", ")", "=", "sys", ".", "exc_info", "(", ")", "if", "inspect", ".", "isclass", "(", "new_exception_or_type", ")", ":", "new_type", "=", "new_exception_or_type", "new_exception", "=", "new_exception_or_type", "(", ")", "else", ":", "new_type", "=", "type", "(", "new_exception_or_type", ")", "new_exception", "=", "new_exception_or_type", "new_exception", ".", "__cause__", "=", "e_value", "try", ":", "six", ".", "reraise", "(", "new_type", ",", "new_exception", ",", "e_traceback", ")", "finally", ":", "del", "e_traceback" ]
obtained from URL .
train
true
44,887
def clean_dict(data_dict): for (key, value) in data_dict.items(): if (not isinstance(value, list)): continue for inner_dict in value[:]: if isinstance(inner_dict, basestring): break if (not any(inner_dict.values())): value.remove(inner_dict) else: clean_dict(inner_dict) return data_dict
[ "def", "clean_dict", "(", "data_dict", ")", ":", "for", "(", "key", ",", "value", ")", "in", "data_dict", ".", "items", "(", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "list", ")", ")", ":", "continue", "for", "inner_dict", "in", "value", "[", ":", "]", ":", "if", "isinstance", "(", "inner_dict", ",", "basestring", ")", ":", "break", "if", "(", "not", "any", "(", "inner_dict", ".", "values", "(", ")", ")", ")", ":", "value", ".", "remove", "(", "inner_dict", ")", "else", ":", "clean_dict", "(", "inner_dict", ")", "return", "data_dict" ]
takes a dict and if any of the values are lists of dicts .
train
false
44,889
def getwriter(encoding): return lookup(encoding).streamwriter
[ "def", "getwriter", "(", "encoding", ")", ":", "return", "lookup", "(", "encoding", ")", ".", "streamwriter" ]
lookup up the codec for the given encoding and return its streamwriter class or factory function .
train
false
44,890
def update_user_attributes(user, profile, attributes_dict, save=False): profile_fields = [] if profile: profile_fields = [f.name for f in profile._meta.fields] user_fields = [f.name for f in user._meta.fields] is_profile_field = (lambda f: ((f in profile_fields) and hasattr(profile, f))) is_user_field = (lambda f: ((f in user_fields) and hasattr(user, f))) for (f, value) in attributes_dict.items(): if is_profile_field(f): setattr(profile, f, value) profile._fb_is_dirty = True elif is_user_field(f): setattr(user, f, value) user._fb_is_dirty = True else: logger.info('skipping update of field %s', f) if save: if getattr(user, '_fb_is_dirty', False): user.save() if (profile and getattr(profile, '_fb_is_dirty', False)): profile.save()
[ "def", "update_user_attributes", "(", "user", ",", "profile", ",", "attributes_dict", ",", "save", "=", "False", ")", ":", "profile_fields", "=", "[", "]", "if", "profile", ":", "profile_fields", "=", "[", "f", ".", "name", "for", "f", "in", "profile", ".", "_meta", ".", "fields", "]", "user_fields", "=", "[", "f", ".", "name", "for", "f", "in", "user", ".", "_meta", ".", "fields", "]", "is_profile_field", "=", "(", "lambda", "f", ":", "(", "(", "f", "in", "profile_fields", ")", "and", "hasattr", "(", "profile", ",", "f", ")", ")", ")", "is_user_field", "=", "(", "lambda", "f", ":", "(", "(", "f", "in", "user_fields", ")", "and", "hasattr", "(", "user", ",", "f", ")", ")", ")", "for", "(", "f", ",", "value", ")", "in", "attributes_dict", ".", "items", "(", ")", ":", "if", "is_profile_field", "(", "f", ")", ":", "setattr", "(", "profile", ",", "f", ",", "value", ")", "profile", ".", "_fb_is_dirty", "=", "True", "elif", "is_user_field", "(", "f", ")", ":", "setattr", "(", "user", ",", "f", ",", "value", ")", "user", ".", "_fb_is_dirty", "=", "True", "else", ":", "logger", ".", "info", "(", "'skipping update of field %s'", ",", "f", ")", "if", "save", ":", "if", "getattr", "(", "user", ",", "'_fb_is_dirty'", ",", "False", ")", ":", "user", ".", "save", "(", ")", "if", "(", "profile", "and", "getattr", "(", "profile", ",", "'_fb_is_dirty'", ",", "False", ")", ")", ":", "profile", ".", "save", "(", ")" ]
write the attributes either to the user or profile instance .
train
false
44,891
def clear_entries(options): with Session() as session: query = session.query(PendingEntry).filter((PendingEntry.approved == False)) if options.task_name: query = query.filter((PendingEntry.task_name == options.task_name)) deleted = query.delete() console((u'Successfully deleted %i pending entries' % deleted))
[ "def", "clear_entries", "(", "options", ")", ":", "with", "Session", "(", ")", "as", "session", ":", "query", "=", "session", ".", "query", "(", "PendingEntry", ")", ".", "filter", "(", "(", "PendingEntry", ".", "approved", "==", "False", ")", ")", "if", "options", ".", "task_name", ":", "query", "=", "query", ".", "filter", "(", "(", "PendingEntry", ".", "task_name", "==", "options", ".", "task_name", ")", ")", "deleted", "=", "query", ".", "delete", "(", ")", "console", "(", "(", "u'Successfully deleted %i pending entries'", "%", "deleted", ")", ")" ]
clear pending entries .
train
false
44,893
def _choose_existing_thread_for_gmail(message, db_session): prior_threads = db_session.query(ImapThread).filter_by(g_thrid=message.g_thrid, namespace_id=message.namespace_id).order_by(desc(ImapThread.recentdate)).all() if (not prior_threads): return None if (len(prior_threads) == 1): return prior_threads[0] if (not message.in_reply_to): return prior_threads[0] for prior_thread in prior_threads: prior_message_ids = [m.message_id_header for m in prior_thread.messages] if (message.in_reply_to in prior_message_ids): return prior_thread return prior_threads[0]
[ "def", "_choose_existing_thread_for_gmail", "(", "message", ",", "db_session", ")", ":", "prior_threads", "=", "db_session", ".", "query", "(", "ImapThread", ")", ".", "filter_by", "(", "g_thrid", "=", "message", ".", "g_thrid", ",", "namespace_id", "=", "message", ".", "namespace_id", ")", ".", "order_by", "(", "desc", "(", "ImapThread", ".", "recentdate", ")", ")", ".", "all", "(", ")", "if", "(", "not", "prior_threads", ")", ":", "return", "None", "if", "(", "len", "(", "prior_threads", ")", "==", "1", ")", ":", "return", "prior_threads", "[", "0", "]", "if", "(", "not", "message", ".", "in_reply_to", ")", ":", "return", "prior_threads", "[", "0", "]", "for", "prior_thread", "in", "prior_threads", ":", "prior_message_ids", "=", "[", "m", ".", "message_id_header", "for", "m", "in", "prior_thread", ".", "messages", "]", "if", "(", "message", ".", "in_reply_to", "in", "prior_message_ids", ")", ":", "return", "prior_thread", "return", "prior_threads", "[", "0", "]" ]
for gmail .
train
false
44,894
@_get_client def image_tag_create(client, image_id, value, session=None): return client.image_tag_create(image_id=image_id, value=value)
[ "@", "_get_client", "def", "image_tag_create", "(", "client", ",", "image_id", ",", "value", ",", "session", "=", "None", ")", ":", "return", "client", ".", "image_tag_create", "(", "image_id", "=", "image_id", ",", "value", "=", "value", ")" ]
create an image tag .
train
false
44,897
def UserEnum(): resume = 0 nuser = 0 while 1: (data, total, resume) = win32net.NetUserEnum(server, 3, win32netcon.FILTER_NORMAL_ACCOUNT, resume) verbose(('Call to NetUserEnum obtained %d entries of %d total' % (len(data), total))) for user in data: verbose(('Found user %s' % user['name'])) nuser = (nuser + 1) if (not resume): break assert nuser, 'Could not find any users!' print 'Enumerated all the local users'
[ "def", "UserEnum", "(", ")", ":", "resume", "=", "0", "nuser", "=", "0", "while", "1", ":", "(", "data", ",", "total", ",", "resume", ")", "=", "win32net", ".", "NetUserEnum", "(", "server", ",", "3", ",", "win32netcon", ".", "FILTER_NORMAL_ACCOUNT", ",", "resume", ")", "verbose", "(", "(", "'Call to NetUserEnum obtained %d entries of %d total'", "%", "(", "len", "(", "data", ")", ",", "total", ")", ")", ")", "for", "user", "in", "data", ":", "verbose", "(", "(", "'Found user %s'", "%", "user", "[", "'name'", "]", ")", ")", "nuser", "=", "(", "nuser", "+", "1", ")", "if", "(", "not", "resume", ")", ":", "break", "assert", "nuser", ",", "'Could not find any users!'", "print", "'Enumerated all the local users'" ]
enumerates all the local users .
train
false
44,899
def linux_hibernate(): if (not HAVE_DBUS): return (proxy, interface) = _get_sessionproxy() if proxy: if proxy.CanHibernate(): proxy.Hibernate(dbus_interface=interface) else: (proxy, interface, pinterface) = _get_systemproxy('UPower') if (not proxy): (proxy, interface, pinterface) = _get_systemproxy('DeviceKit') if proxy: if proxy.Get(interface, 'can-hibernate', dbus_interface=pinterface): try: proxy.Hibernate(dbus_interface=interface) except dbus.exceptions.DBusException as msg: logging.info('Received a DBus exception %s', msg) else: logging.info('DBus does not support Hibernate') time.sleep(10)
[ "def", "linux_hibernate", "(", ")", ":", "if", "(", "not", "HAVE_DBUS", ")", ":", "return", "(", "proxy", ",", "interface", ")", "=", "_get_sessionproxy", "(", ")", "if", "proxy", ":", "if", "proxy", ".", "CanHibernate", "(", ")", ":", "proxy", ".", "Hibernate", "(", "dbus_interface", "=", "interface", ")", "else", ":", "(", "proxy", ",", "interface", ",", "pinterface", ")", "=", "_get_systemproxy", "(", "'UPower'", ")", "if", "(", "not", "proxy", ")", ":", "(", "proxy", ",", "interface", ",", "pinterface", ")", "=", "_get_systemproxy", "(", "'DeviceKit'", ")", "if", "proxy", ":", "if", "proxy", ".", "Get", "(", "interface", ",", "'can-hibernate'", ",", "dbus_interface", "=", "pinterface", ")", ":", "try", ":", "proxy", ".", "Hibernate", "(", "dbus_interface", "=", "interface", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", "as", "msg", ":", "logging", ".", "info", "(", "'Received a DBus exception %s'", ",", "msg", ")", "else", ":", "logging", ".", "info", "(", "'DBus does not support Hibernate'", ")", "time", ".", "sleep", "(", "10", ")" ]
make linux system go into hibernate .
train
false
44,901
def catalog_item(): return s3_rest_controller('supply', 'catalog_item', csv_template=('supply', 'catalog_item'), csv_stylesheet=('supply', 'catalog_item.xsl'))
[ "def", "catalog_item", "(", ")", ":", "return", "s3_rest_controller", "(", "'supply'", ",", "'catalog_item'", ",", "csv_template", "=", "(", "'supply'", ",", "'catalog_item'", ")", ",", "csv_stylesheet", "=", "(", "'supply'", ",", "'catalog_item.xsl'", ")", ")" ]
restful crud controller - used for imports .
train
false
44,902
def _unquote_or_none(s): if (s is None): return s return escape.url_unescape(s, encoding=None, plus=False)
[ "def", "_unquote_or_none", "(", "s", ")", ":", "if", "(", "s", "is", "None", ")", ":", "return", "s", "return", "escape", ".", "url_unescape", "(", "s", ",", "encoding", "=", "None", ",", "plus", "=", "False", ")" ]
none-safe wrapper around url_unescape to handle unamteched optional groups correctly .
train
false
44,903
def schaffer(individual): return (sum((((((x ** 2) + (x1 ** 2)) ** 0.25) * ((sin((50 * (((x ** 2) + (x1 ** 2)) ** 0.1))) ** 2) + 1.0)) for (x, x1) in zip(individual[:(-1)], individual[1:]))),)
[ "def", "schaffer", "(", "individual", ")", ":", "return", "(", "sum", "(", "(", "(", "(", "(", "(", "x", "**", "2", ")", "+", "(", "x1", "**", "2", ")", ")", "**", "0.25", ")", "*", "(", "(", "sin", "(", "(", "50", "*", "(", "(", "(", "x", "**", "2", ")", "+", "(", "x1", "**", "2", ")", ")", "**", "0.1", ")", ")", ")", "**", "2", ")", "+", "1.0", ")", ")", "for", "(", "x", ",", "x1", ")", "in", "zip", "(", "individual", "[", ":", "(", "-", "1", ")", "]", ",", "individual", "[", "1", ":", "]", ")", ")", ")", ",", ")" ]
schaffer test objective function .
train
false
44,905
def descr(col): col_dtype = (u'O' if (col.info.dtype is None) else col.info.dtype) col_shape = (col.shape[1:] if hasattr(col, u'shape') else ()) return (col.info.name, col_dtype, col_shape)
[ "def", "descr", "(", "col", ")", ":", "col_dtype", "=", "(", "u'O'", "if", "(", "col", ".", "info", ".", "dtype", "is", "None", ")", "else", "col", ".", "info", ".", "dtype", ")", "col_shape", "=", "(", "col", ".", "shape", "[", "1", ":", "]", "if", "hasattr", "(", "col", ",", "u'shape'", ")", "else", "(", ")", ")", "return", "(", "col", ".", "info", ".", "name", ",", "col_dtype", ",", "col_shape", ")" ]
array-interface compliant full description of a column .
train
false
44,907
def process_all_json_files(version, build_dir=True): if build_dir: full_path = version.project.full_json_path(version.slug) else: full_path = version.project.get_production_media_path(type_='json', version_slug=version.slug, include_file=False) html_files = [] for (root, dirs, files) in os.walk(full_path): for filename in fnmatch.filter(files, '*.fjson'): if (filename in ['search.fjson', 'genindex.fjson', 'py-modindex.fjson']): continue html_files.append(os.path.join(root, filename)) page_list = [] for filename in html_files: try: result = process_file(filename) if result: page_list.append(result) except: pass return page_list
[ "def", "process_all_json_files", "(", "version", ",", "build_dir", "=", "True", ")", ":", "if", "build_dir", ":", "full_path", "=", "version", ".", "project", ".", "full_json_path", "(", "version", ".", "slug", ")", "else", ":", "full_path", "=", "version", ".", "project", ".", "get_production_media_path", "(", "type_", "=", "'json'", ",", "version_slug", "=", "version", ".", "slug", ",", "include_file", "=", "False", ")", "html_files", "=", "[", "]", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "full_path", ")", ":", "for", "filename", "in", "fnmatch", ".", "filter", "(", "files", ",", "'*.fjson'", ")", ":", "if", "(", "filename", "in", "[", "'search.fjson'", ",", "'genindex.fjson'", ",", "'py-modindex.fjson'", "]", ")", ":", "continue", "html_files", ".", "append", "(", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", ")", "page_list", "=", "[", "]", "for", "filename", "in", "html_files", ":", "try", ":", "result", "=", "process_file", "(", "filename", ")", "if", "result", ":", "page_list", ".", "append", "(", "result", ")", "except", ":", "pass", "return", "page_list" ]
return a list of pages to index .
train
false
44,908
def count_out_of_date(): es = es_utils.get_es() index_name = es_utils.write_index('default') settings = es.indices.get_settings(index_name).get(index_name, {}).get('settings', {}) synonym_key_re = re.compile('index\\.analysis\\.filter\\.synonyms-.*\\.synonyms\\.\\d+') synonyms_in_es = set() for (key, val) in settings.items(): if synonym_key_re.match(key): synonyms_in_es.add(val) synonyms_in_db = set((unicode(s) for s in Synonym.objects.all())) synonyms_to_add = (synonyms_in_db - synonyms_in_es) synonyms_to_remove = (synonyms_in_es - synonyms_in_db) if (synonyms_to_remove == set(['firefox => firefox'])): synonyms_to_remove = set() return (len(synonyms_to_add), len(synonyms_to_remove))
[ "def", "count_out_of_date", "(", ")", ":", "es", "=", "es_utils", ".", "get_es", "(", ")", "index_name", "=", "es_utils", ".", "write_index", "(", "'default'", ")", "settings", "=", "es", ".", "indices", ".", "get_settings", "(", "index_name", ")", ".", "get", "(", "index_name", ",", "{", "}", ")", ".", "get", "(", "'settings'", ",", "{", "}", ")", "synonym_key_re", "=", "re", ".", "compile", "(", "'index\\\\.analysis\\\\.filter\\\\.synonyms-.*\\\\.synonyms\\\\.\\\\d+'", ")", "synonyms_in_es", "=", "set", "(", ")", "for", "(", "key", ",", "val", ")", "in", "settings", ".", "items", "(", ")", ":", "if", "synonym_key_re", ".", "match", "(", "key", ")", ":", "synonyms_in_es", ".", "add", "(", "val", ")", "synonyms_in_db", "=", "set", "(", "(", "unicode", "(", "s", ")", "for", "s", "in", "Synonym", ".", "objects", ".", "all", "(", ")", ")", ")", "synonyms_to_add", "=", "(", "synonyms_in_db", "-", "synonyms_in_es", ")", "synonyms_to_remove", "=", "(", "synonyms_in_es", "-", "synonyms_in_db", ")", "if", "(", "synonyms_to_remove", "==", "set", "(", "[", "'firefox => firefox'", "]", ")", ")", ":", "synonyms_to_remove", "=", "set", "(", ")", "return", "(", "len", "(", "synonyms_to_add", ")", ",", "len", "(", "synonyms_to_remove", ")", ")" ]
count number of synonyms that differ between the database and es .
train
false
44,909
def NOASLR(v): context.defaults['aslr'] = (not asbool(v))
[ "def", "NOASLR", "(", "v", ")", ":", "context", ".", "defaults", "[", "'aslr'", "]", "=", "(", "not", "asbool", "(", "v", ")", ")" ]
disables aslr via context .
train
false
44,911
def check_log_file_level(this_level, data, context, echoerr): havemarks(this_level) hadproblem = False top_level = context[0][1].get(u'common', {}).get(u'log_level', u'WARNING') top_level_str = top_level top_level_mark = getattr(top_level, u'mark', None) if ((not isinstance(top_level, unicode)) or (not hasattr(logging, top_level)) or (not isinstance(this_level, unicode)) or (not hasattr(logging, this_level))): return (True, False, hadproblem) top_level = getattr(logging, top_level) this_level_str = this_level this_level_mark = this_level.mark this_level = getattr(logging, this_level) if (this_level < top_level): echoerr(context=u'Error while checking log level index (key {key})'.format(key=context.key), context_mark=this_level_mark, problem=u'found level that is less critical then top level ({0} < {0})'.format(this_level_str, top_level_str), problem_mark=top_level_mark) hadproblem = True return (True, False, hadproblem)
[ "def", "check_log_file_level", "(", "this_level", ",", "data", ",", "context", ",", "echoerr", ")", ":", "havemarks", "(", "this_level", ")", "hadproblem", "=", "False", "top_level", "=", "context", "[", "0", "]", "[", "1", "]", ".", "get", "(", "u'common'", ",", "{", "}", ")", ".", "get", "(", "u'log_level'", ",", "u'WARNING'", ")", "top_level_str", "=", "top_level", "top_level_mark", "=", "getattr", "(", "top_level", ",", "u'mark'", ",", "None", ")", "if", "(", "(", "not", "isinstance", "(", "top_level", ",", "unicode", ")", ")", "or", "(", "not", "hasattr", "(", "logging", ",", "top_level", ")", ")", "or", "(", "not", "isinstance", "(", "this_level", ",", "unicode", ")", ")", "or", "(", "not", "hasattr", "(", "logging", ",", "this_level", ")", ")", ")", ":", "return", "(", "True", ",", "False", ",", "hadproblem", ")", "top_level", "=", "getattr", "(", "logging", ",", "top_level", ")", "this_level_str", "=", "this_level", "this_level_mark", "=", "this_level", ".", "mark", "this_level", "=", "getattr", "(", "logging", ",", "this_level", ")", "if", "(", "this_level", "<", "top_level", ")", ":", "echoerr", "(", "context", "=", "u'Error while checking log level index (key {key})'", ".", "format", "(", "key", "=", "context", ".", "key", ")", ",", "context_mark", "=", "this_level_mark", ",", "problem", "=", "u'found level that is less critical then top level ({0} < {0})'", ".", "format", "(", "this_level_str", ",", "top_level_str", ")", ",", "problem_mark", "=", "top_level_mark", ")", "hadproblem", "=", "True", "return", "(", "True", ",", "False", ",", "hadproblem", ")" ]
check handler level specified in :ref:log_file key <config-common-log> this level must be greater or equal to the level in :ref:log_level key <config-common-log_level> .
train
false
44,912
def _add_to_path_envvar(directory): orig_path = os.environ.get('PATH', '') if directory: if (not os.path.isdir(directory)): log.error('The given parameter is not a directory') os.environ['PATH'] = '{0}{1}{2}'.format(orig_path, os.pathsep, directory) return orig_path
[ "def", "_add_to_path_envvar", "(", "directory", ")", ":", "orig_path", "=", "os", ".", "environ", ".", "get", "(", "'PATH'", ",", "''", ")", "if", "directory", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "directory", ")", ")", ":", "log", ".", "error", "(", "'The given parameter is not a directory'", ")", "os", ".", "environ", "[", "'PATH'", "]", "=", "'{0}{1}{2}'", ".", "format", "(", "orig_path", ",", "os", ".", "pathsep", ",", "directory", ")", "return", "orig_path" ]
adds directory to the path environment variable and returns the original one .
train
false
44,915
def _is_generator(obj): return isinstance(obj, types.GeneratorType)
[ "def", "_is_generator", "(", "obj", ")", ":", "return", "isinstance", "(", "obj", ",", "types", ".", "GeneratorType", ")" ]
helper to test for a generator object .
train
false
44,916
@not_implemented_for('undirected') @not_implemented_for('multigraph') def hamiltonian_path(G): if (len(G) == 0): return [] if (len(G) == 1): return [arbitrary_element(G)] v = arbitrary_element(G) hampath = hamiltonian_path(G.subgraph((set(G) - {v}))) index = index_satisfying(hampath, (lambda u: (v not in G[u]))) hampath.insert(index, v) return hampath
[ "@", "not_implemented_for", "(", "'undirected'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "hamiltonian_path", "(", "G", ")", ":", "if", "(", "len", "(", "G", ")", "==", "0", ")", ":", "return", "[", "]", "if", "(", "len", "(", "G", ")", "==", "1", ")", ":", "return", "[", "arbitrary_element", "(", "G", ")", "]", "v", "=", "arbitrary_element", "(", "G", ")", "hampath", "=", "hamiltonian_path", "(", "G", ".", "subgraph", "(", "(", "set", "(", "G", ")", "-", "{", "v", "}", ")", ")", ")", "index", "=", "index_satisfying", "(", "hampath", ",", "(", "lambda", "u", ":", "(", "v", "not", "in", "G", "[", "u", "]", ")", ")", ")", "hampath", ".", "insert", "(", "index", ",", "v", ")", "return", "hampath" ]
returns a hamiltonian path in the given tournament graph .
train
false
44,917
def to_flags(value): if ((value < 0) or (value > 4095)): raise ValueError('rcode must be >= 0 and <= 4095') v = (value & 15) ev = (long((value & 4080)) << 20) return (v, ev)
[ "def", "to_flags", "(", "value", ")", ":", "if", "(", "(", "value", "<", "0", ")", "or", "(", "value", ">", "4095", ")", ")", ":", "raise", "ValueError", "(", "'rcode must be >= 0 and <= 4095'", ")", "v", "=", "(", "value", "&", "15", ")", "ev", "=", "(", "long", "(", "(", "value", "&", "4080", ")", ")", "<<", "20", ")", "return", "(", "v", ",", "ev", ")" ]
return a tuple which encodes the rcode .
train
true
44,918
def chain_future(a, b): def copy(future): assert (future is a) if b.done(): return if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture) and (a.exc_info() is not None)): b.set_exc_info(a.exc_info()) elif (a.exception() is not None): b.set_exception(a.exception()) else: b.set_result(a.result()) a.add_done_callback(copy)
[ "def", "chain_future", "(", "a", ",", "b", ")", ":", "def", "copy", "(", "future", ")", ":", "assert", "(", "future", "is", "a", ")", "if", "b", ".", "done", "(", ")", ":", "return", "if", "(", "isinstance", "(", "a", ",", "TracebackFuture", ")", "and", "isinstance", "(", "b", ",", "TracebackFuture", ")", "and", "(", "a", ".", "exc_info", "(", ")", "is", "not", "None", ")", ")", ":", "b", ".", "set_exc_info", "(", "a", ".", "exc_info", "(", ")", ")", "elif", "(", "a", ".", "exception", "(", ")", "is", "not", "None", ")", ":", "b", ".", "set_exception", "(", "a", ".", "exception", "(", ")", ")", "else", ":", "b", ".", "set_result", "(", "a", ".", "result", "(", ")", ")", "a", ".", "add_done_callback", "(", "copy", ")" ]
chain two futures together so that when one completes .
train
true
44,920
def setRandomSeed(seed): random.seed(seed) numpy.random.seed(seed)
[ "def", "setRandomSeed", "(", "seed", ")", ":", "random", ".", "seed", "(", "seed", ")", "numpy", ".", "random", ".", "seed", "(", "seed", ")" ]
set the random seeds .
train
false
44,922
def _get_dashboard_link(course_key): analytics_dashboard_url = '{0}/courses/{1}'.format(settings.ANALYTICS_DASHBOARD_URL, unicode(course_key)) link = HTML(u'<a href="{0}" target="_blank">{1}</a>').format(analytics_dashboard_url, settings.ANALYTICS_DASHBOARD_NAME) return link
[ "def", "_get_dashboard_link", "(", "course_key", ")", ":", "analytics_dashboard_url", "=", "'{0}/courses/{1}'", ".", "format", "(", "settings", ".", "ANALYTICS_DASHBOARD_URL", ",", "unicode", "(", "course_key", ")", ")", "link", "=", "HTML", "(", "u'<a href=\"{0}\" target=\"_blank\">{1}</a>'", ")", ".", "format", "(", "analytics_dashboard_url", ",", "settings", ".", "ANALYTICS_DASHBOARD_NAME", ")", "return", "link" ]
construct a url to the external analytics dashboard .
train
false
44,923
def test_patched_errwindow(capfd, mocker, monkeypatch): monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.hexversion', 50331648) monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.exit', (lambda status: None)) try: import tkinter except ImportError: tk_mock = mocker.patch('qutebrowser.misc.checkpyver.Tk', spec=['withdraw'], new_callable=mocker.Mock) msgbox_mock = mocker.patch('qutebrowser.misc.checkpyver.messagebox', spec=['showerror']) else: tk_mock = mocker.patch('qutebrowser.misc.checkpyver.Tk', autospec=True) msgbox_mock = mocker.patch('qutebrowser.misc.checkpyver.messagebox', autospec=True) checkpyver.check_python_version() (stdout, stderr) = capfd.readouterr() assert (not stdout) assert (not stderr) tk_mock.assert_called_with() tk_mock().withdraw.assert_called_with() msgbox_mock.showerror.assert_called_with('qutebrowser: Fatal error!', unittest.mock.ANY)
[ "def", "test_patched_errwindow", "(", "capfd", ",", "mocker", ",", "monkeypatch", ")", ":", "monkeypatch", ".", "setattr", "(", "'qutebrowser.misc.checkpyver.sys.hexversion'", ",", "50331648", ")", "monkeypatch", ".", "setattr", "(", "'qutebrowser.misc.checkpyver.sys.exit'", ",", "(", "lambda", "status", ":", "None", ")", ")", "try", ":", "import", "tkinter", "except", "ImportError", ":", "tk_mock", "=", "mocker", ".", "patch", "(", "'qutebrowser.misc.checkpyver.Tk'", ",", "spec", "=", "[", "'withdraw'", "]", ",", "new_callable", "=", "mocker", ".", "Mock", ")", "msgbox_mock", "=", "mocker", ".", "patch", "(", "'qutebrowser.misc.checkpyver.messagebox'", ",", "spec", "=", "[", "'showerror'", "]", ")", "else", ":", "tk_mock", "=", "mocker", ".", "patch", "(", "'qutebrowser.misc.checkpyver.Tk'", ",", "autospec", "=", "True", ")", "msgbox_mock", "=", "mocker", ".", "patch", "(", "'qutebrowser.misc.checkpyver.messagebox'", ",", "autospec", "=", "True", ")", "checkpyver", ".", "check_python_version", "(", ")", "(", "stdout", ",", "stderr", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "not", "stdout", ")", "assert", "(", "not", "stderr", ")", "tk_mock", ".", "assert_called_with", "(", ")", "tk_mock", "(", ")", ".", "withdraw", ".", "assert_called_with", "(", ")", "msgbox_mock", ".", "showerror", ".", "assert_called_with", "(", "'qutebrowser: Fatal error!'", ",", "unittest", ".", "mock", ".", "ANY", ")" ]
test with a patched sys .
train
false
44,924
def ir2tf(imp_resp, shape, dim=None, is_real=True): if (not dim): dim = imp_resp.ndim irpadded = np.zeros(shape) irpadded[tuple([slice(0, s) for s in imp_resp.shape])] = imp_resp for (axis, axis_size) in enumerate(imp_resp.shape): if (axis >= (imp_resp.ndim - dim)): irpadded = np.roll(irpadded, shift=(- int(np.floor((axis_size / 2)))), axis=axis) if is_real: return np.fft.rfftn(irpadded, axes=range((- dim), 0)) else: return np.fft.fftn(irpadded, axes=range((- dim), 0))
[ "def", "ir2tf", "(", "imp_resp", ",", "shape", ",", "dim", "=", "None", ",", "is_real", "=", "True", ")", ":", "if", "(", "not", "dim", ")", ":", "dim", "=", "imp_resp", ".", "ndim", "irpadded", "=", "np", ".", "zeros", "(", "shape", ")", "irpadded", "[", "tuple", "(", "[", "slice", "(", "0", ",", "s", ")", "for", "s", "in", "imp_resp", ".", "shape", "]", ")", "]", "=", "imp_resp", "for", "(", "axis", ",", "axis_size", ")", "in", "enumerate", "(", "imp_resp", ".", "shape", ")", ":", "if", "(", "axis", ">=", "(", "imp_resp", ".", "ndim", "-", "dim", ")", ")", ":", "irpadded", "=", "np", ".", "roll", "(", "irpadded", ",", "shift", "=", "(", "-", "int", "(", "np", ".", "floor", "(", "(", "axis_size", "/", "2", ")", ")", ")", ")", ",", "axis", "=", "axis", ")", "if", "is_real", ":", "return", "np", ".", "fft", ".", "rfftn", "(", "irpadded", ",", "axes", "=", "range", "(", "(", "-", "dim", ")", ",", "0", ")", ")", "else", ":", "return", "np", ".", "fft", ".", "fftn", "(", "irpadded", ",", "axes", "=", "range", "(", "(", "-", "dim", ")", ",", "0", ")", ")" ]
compute the transfer function of an impulse response .
train
false
44,928
def _validate_tag_sets(tag_sets): if (tag_sets is None): return tag_sets if (not isinstance(tag_sets, list)): raise TypeError(('Tag sets %r invalid, must be a list' % (tag_sets,))) if (len(tag_sets) == 0): raise ValueError(('Tag sets %r invalid, must be None or contain at least one set of tags' % (tag_sets,))) for tags in tag_sets: if (not isinstance(tags, Mapping)): raise TypeError(('Tag set %r invalid, must be an instance of dict, bson.son.SON or other type that inherits from collection.Mapping' % (tags,))) return tag_sets
[ "def", "_validate_tag_sets", "(", "tag_sets", ")", ":", "if", "(", "tag_sets", "is", "None", ")", ":", "return", "tag_sets", "if", "(", "not", "isinstance", "(", "tag_sets", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "(", "'Tag sets %r invalid, must be a list'", "%", "(", "tag_sets", ",", ")", ")", ")", "if", "(", "len", "(", "tag_sets", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "(", "'Tag sets %r invalid, must be None or contain at least one set of tags'", "%", "(", "tag_sets", ",", ")", ")", ")", "for", "tags", "in", "tag_sets", ":", "if", "(", "not", "isinstance", "(", "tags", ",", "Mapping", ")", ")", ":", "raise", "TypeError", "(", "(", "'Tag set %r invalid, must be an instance of dict, bson.son.SON or other type that inherits from collection.Mapping'", "%", "(", "tags", ",", ")", ")", ")", "return", "tag_sets" ]
validate tag sets for a mongoreplicasetclient .
train
true
44,929
def _try_decode(byte_string): try: return str_cls(byte_string, _encoding) except UnicodeDecodeError: for encoding in _fallback_encodings: try: return str_cls(byte_string, encoding, errors=u'strict') except UnicodeDecodeError: pass return str_cls(byte_string, errors=u'replace')
[ "def", "_try_decode", "(", "byte_string", ")", ":", "try", ":", "return", "str_cls", "(", "byte_string", ",", "_encoding", ")", "except", "UnicodeDecodeError", ":", "for", "encoding", "in", "_fallback_encodings", ":", "try", ":", "return", "str_cls", "(", "byte_string", ",", "encoding", ",", "errors", "=", "u'strict'", ")", "except", "UnicodeDecodeError", ":", "pass", "return", "str_cls", "(", "byte_string", ",", "errors", "=", "u'replace'", ")" ]
tries decoding a byte string from the os into a unicode string .
train
true
44,930
def only_xmodules(identifier, entry_points): from_xmodule = [entry_point for entry_point in entry_points if (entry_point.dist.key == 'xmodule')] return default_select(identifier, from_xmodule)
[ "def", "only_xmodules", "(", "identifier", ",", "entry_points", ")", ":", "from_xmodule", "=", "[", "entry_point", "for", "entry_point", "in", "entry_points", "if", "(", "entry_point", ".", "dist", ".", "key", "==", "'xmodule'", ")", "]", "return", "default_select", "(", "identifier", ",", "from_xmodule", ")" ]
only use entry_points that are supplied by the xmodule package .
train
false
44,931
@frappe.whitelist() def get_student_group_students(student_group): students = frappe.get_list(u'Student Group Student', fields=[u'student', u'student_name'], filters={u'parent': student_group, u'active': 1}, order_by=u'idx') return students
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_student_group_students", "(", "student_group", ")", ":", "students", "=", "frappe", ".", "get_list", "(", "u'Student Group Student'", ",", "fields", "=", "[", "u'student'", ",", "u'student_name'", "]", ",", "filters", "=", "{", "u'parent'", ":", "student_group", ",", "u'active'", ":", "1", "}", ",", "order_by", "=", "u'idx'", ")", "return", "students" ]
returns list of student .
train
false
44,932
def _get_wmi_setting(wmi_class_name, setting, server): with salt.utils.winapi.Com(): try: connection = wmi.WMI(namespace=_WMI_NAMESPACE) wmi_class = getattr(connection, wmi_class_name) objs = wmi_class([setting], Name=server)[0] ret = getattr(objs, setting) except wmi.x_wmi as error: _LOG.error('Encountered WMI error: %s', error.com_error) except (AttributeError, IndexError) as error: _LOG.error('Error getting %s: %s', wmi_class_name, error) return ret
[ "def", "_get_wmi_setting", "(", "wmi_class_name", ",", "setting", ",", "server", ")", ":", "with", "salt", ".", "utils", ".", "winapi", ".", "Com", "(", ")", ":", "try", ":", "connection", "=", "wmi", ".", "WMI", "(", "namespace", "=", "_WMI_NAMESPACE", ")", "wmi_class", "=", "getattr", "(", "connection", ",", "wmi_class_name", ")", "objs", "=", "wmi_class", "(", "[", "setting", "]", ",", "Name", "=", "server", ")", "[", "0", "]", "ret", "=", "getattr", "(", "objs", ",", "setting", ")", "except", "wmi", ".", "x_wmi", "as", "error", ":", "_LOG", ".", "error", "(", "'Encountered WMI error: %s'", ",", "error", ".", "com_error", ")", "except", "(", "AttributeError", ",", "IndexError", ")", "as", "error", ":", "_LOG", ".", "error", "(", "'Error getting %s: %s'", ",", "wmi_class_name", ",", "error", ")", "return", "ret" ]
get the value of the setting for the provided class .
train
true
44,934
def test_output(): rtmbot = init_rtmbot() slackclient_mock = create_autospec(SlackClient) server_mock = create_autospec(_server.Server) slackclient_mock.server = server_mock searchlist_mock = create_autospec(_util.SearchList) server_mock.channels = searchlist_mock channel_mock = create_autospec(_channel.Channel) slackclient_mock.server.channels.find.return_value = channel_mock rtmbot.slack_client = slackclient_mock plugin_mock = create_autospec(Plugin) plugin_mock.do_output.return_value = [['C12345678', 'test message']] rtmbot.bot_plugins.append(plugin_mock) rtmbot.output() channel_mock.send_message.assert_called_with('test message') channel_mock.reset_mock() plugin_mock.reset_mock() plugin_mock.do_output.return_value = [['C12345678', '\xf0\x9f\x9a\x80 testing']] rtmbot.output() channel_mock.send_message.assert_called_with('\xf0\x9f\x9a\x80 testing') channel_mock.reset_mock() plugin_mock.reset_mock() plugin_mock.do_output.return_value = [['C12345678', '\xc3\xb9 h\xc5\x93\xc3\xb83\xc3\xb6']] rtmbot.output() channel_mock.send_message.assert_called_with('\xc3\xb9 h\xc5\x93\xc3\xb83\xc3\xb6')
[ "def", "test_output", "(", ")", ":", "rtmbot", "=", "init_rtmbot", "(", ")", "slackclient_mock", "=", "create_autospec", "(", "SlackClient", ")", "server_mock", "=", "create_autospec", "(", "_server", ".", "Server", ")", "slackclient_mock", ".", "server", "=", "server_mock", "searchlist_mock", "=", "create_autospec", "(", "_util", ".", "SearchList", ")", "server_mock", ".", "channels", "=", "searchlist_mock", "channel_mock", "=", "create_autospec", "(", "_channel", ".", "Channel", ")", "slackclient_mock", ".", "server", ".", "channels", ".", "find", ".", "return_value", "=", "channel_mock", "rtmbot", ".", "slack_client", "=", "slackclient_mock", "plugin_mock", "=", "create_autospec", "(", "Plugin", ")", "plugin_mock", ".", "do_output", ".", "return_value", "=", "[", "[", "'C12345678'", ",", "'test message'", "]", "]", "rtmbot", ".", "bot_plugins", ".", "append", "(", "plugin_mock", ")", "rtmbot", ".", "output", "(", ")", "channel_mock", ".", "send_message", ".", "assert_called_with", "(", "'test message'", ")", "channel_mock", ".", "reset_mock", "(", ")", "plugin_mock", ".", "reset_mock", "(", ")", "plugin_mock", ".", "do_output", ".", "return_value", "=", "[", "[", "'C12345678'", ",", "'\\xf0\\x9f\\x9a\\x80 testing'", "]", "]", "rtmbot", ".", "output", "(", ")", "channel_mock", ".", "send_message", ".", "assert_called_with", "(", "'\\xf0\\x9f\\x9a\\x80 testing'", ")", "channel_mock", ".", "reset_mock", "(", ")", "plugin_mock", ".", "reset_mock", "(", ")", "plugin_mock", ".", "do_output", ".", "return_value", "=", "[", "[", "'C12345678'", ",", "'\\xc3\\xb9 h\\xc5\\x93\\xc3\\xb83\\xc3\\xb6'", "]", "]", "rtmbot", ".", "output", "(", ")", "channel_mock", ".", "send_message", ".", "assert_called_with", "(", "'\\xc3\\xb9 h\\xc5\\x93\\xc3\\xb83\\xc3\\xb6'", ")" ]
test that sending a message behaves as expected .
train
false
44,935
def create_socket(): if has_ipv6: sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) if hasattr(socket, u'IPPROTO_IPV6'): sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) elif (sys.platform == u'win32'): sock.setsockopt(41, 27, 0) else: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) return sock
[ "def", "create_socket", "(", ")", ":", "if", "has_ipv6", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET6", ",", "socket", ".", "SOCK_STREAM", ")", "if", "hasattr", "(", "socket", ",", "u'IPPROTO_IPV6'", ")", ":", "sock", ".", "setsockopt", "(", "socket", ".", "IPPROTO_IPV6", ",", "socket", ".", "IPV6_V6ONLY", ",", "0", ")", "elif", "(", "sys", ".", "platform", "==", "u'win32'", ")", ":", "sock", ".", "setsockopt", "(", "41", ",", "27", ",", "0", ")", "else", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "return", "sock" ]
create a tcp socket with or without ipv6 depending on system support .
train
false
44,936
def GetUserSecretsManager(can_prompt=None): assert options.options.devbox, 'User secrets manager is only available in --devbox mode.' global _user_secrets_manager if (_user_secrets_manager is None): _user_secrets_manager = SecretsManager('user', options.options.domain, options.options.user_secrets_dir) prompt = (can_prompt if (can_prompt is not None) else sys.stderr.isatty()) _user_secrets_manager.Init(can_prompt=prompt) return _user_secrets_manager
[ "def", "GetUserSecretsManager", "(", "can_prompt", "=", "None", ")", ":", "assert", "options", ".", "options", ".", "devbox", ",", "'User secrets manager is only available in --devbox mode.'", "global", "_user_secrets_manager", "if", "(", "_user_secrets_manager", "is", "None", ")", ":", "_user_secrets_manager", "=", "SecretsManager", "(", "'user'", ",", "options", ".", "options", ".", "domain", ",", "options", ".", "options", ".", "user_secrets_dir", ")", "prompt", "=", "(", "can_prompt", "if", "(", "can_prompt", "is", "not", "None", ")", "else", "sys", ".", "stderr", ".", "isatty", "(", ")", ")", "_user_secrets_manager", ".", "Init", "(", "can_prompt", "=", "prompt", ")", "return", "_user_secrets_manager" ]
returns the user secrets manager .
train
false
44,940
def test_jacobian_disconnected_inputs(): v1 = tensor.vector() v2 = tensor.vector() jacobian_v = theano.gradient.jacobian((1 + v1), v2, disconnected_inputs='ignore') func_v = theano.function([v1, v2], jacobian_v) val = numpy.arange(4.0).astype(theano.config.floatX) assert numpy.allclose(func_v(val, val), numpy.zeros((4, 4))) s1 = tensor.scalar() s2 = tensor.scalar() jacobian_s = theano.gradient.jacobian((1 + s1), s2, disconnected_inputs='ignore') func_s = theano.function([s2], jacobian_s) val = numpy.array(1.0).astype(theano.config.floatX) assert numpy.allclose(func_s(val), numpy.zeros(1))
[ "def", "test_jacobian_disconnected_inputs", "(", ")", ":", "v1", "=", "tensor", ".", "vector", "(", ")", "v2", "=", "tensor", ".", "vector", "(", ")", "jacobian_v", "=", "theano", ".", "gradient", ".", "jacobian", "(", "(", "1", "+", "v1", ")", ",", "v2", ",", "disconnected_inputs", "=", "'ignore'", ")", "func_v", "=", "theano", ".", "function", "(", "[", "v1", ",", "v2", "]", ",", "jacobian_v", ")", "val", "=", "numpy", ".", "arange", "(", "4.0", ")", ".", "astype", "(", "theano", ".", "config", ".", "floatX", ")", "assert", "numpy", ".", "allclose", "(", "func_v", "(", "val", ",", "val", ")", ",", "numpy", ".", "zeros", "(", "(", "4", ",", "4", ")", ")", ")", "s1", "=", "tensor", ".", "scalar", "(", ")", "s2", "=", "tensor", ".", "scalar", "(", ")", "jacobian_s", "=", "theano", ".", "gradient", ".", "jacobian", "(", "(", "1", "+", "s1", ")", ",", "s2", ",", "disconnected_inputs", "=", "'ignore'", ")", "func_s", "=", "theano", ".", "function", "(", "[", "s2", "]", ",", "jacobian_s", ")", "val", "=", "numpy", ".", "array", "(", "1.0", ")", ".", "astype", "(", "theano", ".", "config", ".", "floatX", ")", "assert", "numpy", ".", "allclose", "(", "func_s", "(", "val", ")", ",", "numpy", ".", "zeros", "(", "1", ")", ")" ]
test that disconnected inputs are properly handled by jacobian .
train
false
44,941
def _get_entities(kind, namespace, order, start, count): query = datastore.Query(kind, _namespace=namespace) if order: if order.startswith('-'): direction = datastore.Query.DESCENDING order = order[1:] else: direction = datastore.Query.ASCENDING query.Order((order, direction)) total = query.Count() entities = query.Get(count, start) return (entities, total)
[ "def", "_get_entities", "(", "kind", ",", "namespace", ",", "order", ",", "start", ",", "count", ")", ":", "query", "=", "datastore", ".", "Query", "(", "kind", ",", "_namespace", "=", "namespace", ")", "if", "order", ":", "if", "order", ".", "startswith", "(", "'-'", ")", ":", "direction", "=", "datastore", ".", "Query", ".", "DESCENDING", "order", "=", "order", "[", "1", ":", "]", "else", ":", "direction", "=", "datastore", ".", "Query", ".", "ASCENDING", "query", ".", "Order", "(", "(", "order", ",", "direction", ")", ")", "total", "=", "query", ".", "Count", "(", ")", "entities", "=", "query", ".", "Get", "(", "count", ",", "start", ")", "return", "(", "entities", ",", "total", ")" ]
returns a list and a count of entities of the given kind .
train
false
44,942
def stopped(): if (not is_stopped()): stop('shorewall')
[ "def", "stopped", "(", ")", ":", "if", "(", "not", "is_stopped", "(", ")", ")", ":", "stop", "(", "'shorewall'", ")" ]
ensure that the firewall is stopped .
train
false
44,944
@decorator def verbose(function, *args, **kwargs): arg_names = _get_args(function) default_level = verbose_level = None if ((len(arg_names) > 0) and (arg_names[0] == 'self')): default_level = getattr(args[0], 'verbose', None) if ('verbose' in arg_names): verbose_level = args[arg_names.index('verbose')] elif ('verbose' in kwargs): verbose_level = kwargs.pop('verbose') verbose_level = (default_level if (verbose_level is None) else verbose_level) if (verbose_level is not None): with use_log_level(verbose_level): return function(*args, **kwargs) return function(*args, **kwargs)
[ "@", "decorator", "def", "verbose", "(", "function", ",", "*", "args", ",", "**", "kwargs", ")", ":", "arg_names", "=", "_get_args", "(", "function", ")", "default_level", "=", "verbose_level", "=", "None", "if", "(", "(", "len", "(", "arg_names", ")", ">", "0", ")", "and", "(", "arg_names", "[", "0", "]", "==", "'self'", ")", ")", ":", "default_level", "=", "getattr", "(", "args", "[", "0", "]", ",", "'verbose'", ",", "None", ")", "if", "(", "'verbose'", "in", "arg_names", ")", ":", "verbose_level", "=", "args", "[", "arg_names", ".", "index", "(", "'verbose'", ")", "]", "elif", "(", "'verbose'", "in", "kwargs", ")", ":", "verbose_level", "=", "kwargs", ".", "pop", "(", "'verbose'", ")", "verbose_level", "=", "(", "default_level", "if", "(", "verbose_level", "is", "None", ")", "else", "verbose_level", ")", "if", "(", "verbose_level", "is", "not", "None", ")", ":", "with", "use_log_level", "(", "verbose_level", ")", ":", "return", "function", "(", "*", "args", ",", "**", "kwargs", ")", "return", "function", "(", "*", "args", ",", "**", "kwargs", ")" ]
verbose decorator to allow functions to override log-level .
train
false
44,945
def _macro_defn_action(_s, _l, tokens): assert (len(tokens) == 3) assert (tokens[0] == u'@') return {tokens[1]: tokens[2]}
[ "def", "_macro_defn_action", "(", "_s", ",", "_l", ",", "tokens", ")", ":", "assert", "(", "len", "(", "tokens", ")", "==", "3", ")", "assert", "(", "tokens", "[", "0", "]", "==", "u'@'", ")", "return", "{", "tokens", "[", "1", "]", ":", "tokens", "[", "2", "]", "}" ]
builds a dictionary structure which defines the given macro .
train
false
44,948
def changed_locations(a, include_first): if (a.ndim > 1): raise ValueError('indices_of_changed_values only supports 1D arrays.') indices = (flatnonzero(diff(a)) + 1) if (not include_first): return indices return hstack([[0], indices])
[ "def", "changed_locations", "(", "a", ",", "include_first", ")", ":", "if", "(", "a", ".", "ndim", ">", "1", ")", ":", "raise", "ValueError", "(", "'indices_of_changed_values only supports 1D arrays.'", ")", "indices", "=", "(", "flatnonzero", "(", "diff", "(", "a", ")", ")", "+", "1", ")", "if", "(", "not", "include_first", ")", ":", "return", "indices", "return", "hstack", "(", "[", "[", "0", "]", ",", "indices", "]", ")" ]
compute indices of values in a that differ from the previous value .
train
true
44,949
@pytest.mark.django_db def test_boolean_field_choices_spanning_relations(): class Table(tables.Table, ): boolean = tables.BooleanColumn(accessor=u'occupation.boolean_with_choices') class Meta: model = Person model_true = Occupation.objects.create(name=u'true-name', boolean_with_choices=True) model_false = Occupation.objects.create(name=u'false-name', boolean_with_choices=False) table = Table([Person(first_name=u'True', last_name=u'False', occupation=model_true), Person(first_name=u'True', last_name=u'False', occupation=model_false)]) assert (table.rows[0].get_cell(u'boolean') == u'<span class="true">\u2714</span>') assert (table.rows[1].get_cell(u'boolean') == u'<span class="false">\u2718</span>')
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_boolean_field_choices_spanning_relations", "(", ")", ":", "class", "Table", "(", "tables", ".", "Table", ",", ")", ":", "boolean", "=", "tables", ".", "BooleanColumn", "(", "accessor", "=", "u'occupation.boolean_with_choices'", ")", "class", "Meta", ":", "model", "=", "Person", "model_true", "=", "Occupation", ".", "objects", ".", "create", "(", "name", "=", "u'true-name'", ",", "boolean_with_choices", "=", "True", ")", "model_false", "=", "Occupation", ".", "objects", ".", "create", "(", "name", "=", "u'false-name'", ",", "boolean_with_choices", "=", "False", ")", "table", "=", "Table", "(", "[", "Person", "(", "first_name", "=", "u'True'", ",", "last_name", "=", "u'False'", ",", "occupation", "=", "model_true", ")", ",", "Person", "(", "first_name", "=", "u'True'", ",", "last_name", "=", "u'False'", ",", "occupation", "=", "model_false", ")", "]", ")", "assert", "(", "table", ".", "rows", "[", "0", "]", ".", "get_cell", "(", "u'boolean'", ")", "==", "u'<span class=\"true\">\\u2714</span>'", ")", "assert", "(", "table", ".", "rows", "[", "1", "]", ".", "get_cell", "(", "u'boolean'", ")", "==", "u'<span class=\"false\">\\u2718</span>'", ")" ]
the inverse lookup voor boolean choices should also work on related models .
train
false
44,951
def _compute_min_std_IQR(data): s1 = np.std(data, axis=0) q75 = mquantiles(data, 0.75, axis=0).data[0] q25 = mquantiles(data, 0.25, axis=0).data[0] s2 = ((q75 - q25) / 1.349) dispersion = np.minimum(s1, s2) return dispersion
[ "def", "_compute_min_std_IQR", "(", "data", ")", ":", "s1", "=", "np", ".", "std", "(", "data", ",", "axis", "=", "0", ")", "q75", "=", "mquantiles", "(", "data", ",", "0.75", ",", "axis", "=", "0", ")", ".", "data", "[", "0", "]", "q25", "=", "mquantiles", "(", "data", ",", "0.25", ",", "axis", "=", "0", ")", ".", "data", "[", "0", "]", "s2", "=", "(", "(", "q75", "-", "q25", ")", "/", "1.349", ")", "dispersion", "=", "np", ".", "minimum", "(", "s1", ",", "s2", ")", "return", "dispersion" ]
compute minimum of std and iqr for each variable .
train
false
44,954
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialize the global qwebsettings .
train
false
44,955
def max_edge(g, src, dst, n): w1 = g[n].get(src, {'weight': (- np.inf)})['weight'] w2 = g[n].get(dst, {'weight': (- np.inf)})['weight'] return {'weight': max(w1, w2)}
[ "def", "max_edge", "(", "g", ",", "src", ",", "dst", ",", "n", ")", ":", "w1", "=", "g", "[", "n", "]", ".", "get", "(", "src", ",", "{", "'weight'", ":", "(", "-", "np", ".", "inf", ")", "}", ")", "[", "'weight'", "]", "w2", "=", "g", "[", "n", "]", ".", "get", "(", "dst", ",", "{", "'weight'", ":", "(", "-", "np", ".", "inf", ")", "}", ")", "[", "'weight'", "]", "return", "{", "'weight'", ":", "max", "(", "w1", ",", "w2", ")", "}" ]
callback to handle merging nodes by choosing maximum weight .
train
false
44,956
def normalize_rest_paragraph(text, line_len=80, indent=''): toreport = '' lines = [] line_len = (line_len - len(indent)) for line in text.splitlines(): line = (toreport + _NORM_SPACES_RGX.sub(' ', line.strip())) toreport = '' while (len(line) > line_len): (line, toreport) = splittext(line, line_len) lines.append((indent + line)) if toreport: line = (toreport + ' ') toreport = '' else: line = '' if line: lines.append((indent + line.strip())) return linesep.join(lines)
[ "def", "normalize_rest_paragraph", "(", "text", ",", "line_len", "=", "80", ",", "indent", "=", "''", ")", ":", "toreport", "=", "''", "lines", "=", "[", "]", "line_len", "=", "(", "line_len", "-", "len", "(", "indent", ")", ")", "for", "line", "in", "text", ".", "splitlines", "(", ")", ":", "line", "=", "(", "toreport", "+", "_NORM_SPACES_RGX", ".", "sub", "(", "' '", ",", "line", ".", "strip", "(", ")", ")", ")", "toreport", "=", "''", "while", "(", "len", "(", "line", ")", ">", "line_len", ")", ":", "(", "line", ",", "toreport", ")", "=", "splittext", "(", "line", ",", "line_len", ")", "lines", ".", "append", "(", "(", "indent", "+", "line", ")", ")", "if", "toreport", ":", "line", "=", "(", "toreport", "+", "' '", ")", "toreport", "=", "''", "else", ":", "line", "=", "''", "if", "line", ":", "lines", ".", "append", "(", "(", "indent", "+", "line", ".", "strip", "(", ")", ")", ")", "return", "linesep", ".", "join", "(", "lines", ")" ]
normalize a rest text to display it with a maximum line size and optionally arbitrary indentation .
train
false
44,958
def role_present(name, profile=None, **connection_args): ret = {'name': name, 'changes': {}, 'result': True, 'comment': 'Role "{0}" already exists'.format(name)} role = __salt__['keystone.role_get'](name=name, profile=profile, **connection_args) if ('Error' not in role): return ret else: if __opts__.get('test'): ret['result'] = None ret['comment'] = 'Role "{0}" will be added'.format(name) return ret __salt__['keystone.role_create'](name, profile=profile, **connection_args) ret['comment'] = 'Role "{0}" has been added'.format(name) ret['changes']['Role'] = 'Created' return ret
[ "def", "role_present", "(", "name", ",", "profile", "=", "None", ",", "**", "connection_args", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "'Role \"{0}\" already exists'", ".", "format", "(", "name", ")", "}", "role", "=", "__salt__", "[", "'keystone.role_get'", "]", "(", "name", "=", "name", ",", "profile", "=", "profile", ",", "**", "connection_args", ")", "if", "(", "'Error'", "not", "in", "role", ")", ":", "return", "ret", "else", ":", "if", "__opts__", ".", "get", "(", "'test'", ")", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Role \"{0}\" will be added'", ".", "format", "(", "name", ")", "return", "ret", "__salt__", "[", "'keystone.role_create'", "]", "(", "name", ",", "profile", "=", "profile", ",", "**", "connection_args", ")", "ret", "[", "'comment'", "]", "=", "'Role \"{0}\" has been added'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "[", "'Role'", "]", "=", "'Created'", "return", "ret" ]
ensures that the keystone role exists name the name of the role that should be present .
train
true
44,960
@csrf_exempt def roles(request): if (settings.AUTH_IP_WHITELIST and (not (get_client_ip(request) in settings.AUTH_IP_WHITELIST))): return HttpResponse(json.dumps({'error': 'unauthorized_request'}), status=403, content_type='application/json') groups = [group.name for group in Group.objects.all()] groups.append('admin') return HttpResponse(json.dumps({'groups': groups}), content_type='application/json')
[ "@", "csrf_exempt", "def", "roles", "(", "request", ")", ":", "if", "(", "settings", ".", "AUTH_IP_WHITELIST", "and", "(", "not", "(", "get_client_ip", "(", "request", ")", "in", "settings", ".", "AUTH_IP_WHITELIST", ")", ")", ")", ":", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'error'", ":", "'unauthorized_request'", "}", ")", ",", "status", "=", "403", ",", "content_type", "=", "'application/json'", ")", "groups", "=", "[", "group", ".", "name", "for", "group", "in", "Group", ".", "objects", ".", "all", "(", ")", "]", "groups", ".", "append", "(", "'admin'", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'groups'", ":", "groups", "}", ")", ",", "content_type", "=", "'application/json'", ")" ]
check ip whitelist / blacklist .
train
false
44,961
def get_geolocation(all_the_ip_address): print 'Getting geo information...' updated_addresses = [] counter = 1 header_row = all_the_ip_address.pop(0) header_row.extend(['Country', 'City']) for line in all_the_ip_address: print 'Grabbing geo info for row # {0}'.format(counter) r = requests.get('https://freegeoip.net/json/{0}'.format(line[0])) line.extend([str(r.json()['country_name']), str(r.json()['city'])]) updated_addresses.append(line) counter += 1 updated_addresses.insert(0, header_row) return updated_addresses
[ "def", "get_geolocation", "(", "all_the_ip_address", ")", ":", "print", "'Getting geo information...'", "updated_addresses", "=", "[", "]", "counter", "=", "1", "header_row", "=", "all_the_ip_address", ".", "pop", "(", "0", ")", "header_row", ".", "extend", "(", "[", "'Country'", ",", "'City'", "]", ")", "for", "line", "in", "all_the_ip_address", ":", "print", "'Grabbing geo info for row # {0}'", ".", "format", "(", "counter", ")", "r", "=", "requests", ".", "get", "(", "'https://freegeoip.net/json/{0}'", ".", "format", "(", "line", "[", "0", "]", ")", ")", "line", ".", "extend", "(", "[", "str", "(", "r", ".", "json", "(", ")", "[", "'country_name'", "]", ")", ",", "str", "(", "r", ".", "json", "(", ")", "[", "'city'", "]", ")", "]", ")", "updated_addresses", ".", "append", "(", "line", ")", "counter", "+=", "1", "updated_addresses", ".", "insert", "(", "0", ",", "header_row", ")", "return", "updated_addresses" ]
given a list of lists from get_addresses() .
train
false
44,962
def get_server_time(): return datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
[ "def", "get_server_time", "(", ")", ":", "return", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y-%m-%d %H:%M'", ")" ]
return server current time .
train
false
44,963
def disp(name=None, idx=None): return CMADataLogger((name if name else 'outcmaes')).disp(idx)
[ "def", "disp", "(", "name", "=", "None", ",", "idx", "=", "None", ")", ":", "return", "CMADataLogger", "(", "(", "name", "if", "name", "else", "'outcmaes'", ")", ")", ".", "disp", "(", "idx", ")" ]
displays selected data from the class cmadatalogger .
train
true
44,964
def setOs(): infoMsg = '' if (not kb.bannerFp): return if ('type' in kb.bannerFp): Backend.setOs(Format.humanize(kb.bannerFp['type'])) infoMsg = ('the back-end DBMS operating system is %s' % Backend.getOs()) if ('distrib' in kb.bannerFp): kb.osVersion = Format.humanize(kb.bannerFp['distrib']) infoMsg += (' %s' % kb.osVersion) if ('sp' in kb.bannerFp): kb.osSP = int(Format.humanize(kb.bannerFp['sp']).replace('Service Pack ', '')) elif (('sp' not in kb.bannerFp) and Backend.isOs(OS.WINDOWS)): kb.osSP = 0 if (Backend.getOs() and kb.osVersion and kb.osSP): infoMsg += (' Service Pack %d' % kb.osSP) if infoMsg: logger.info(infoMsg) hashDBWrite(HASHDB_KEYS.OS, Backend.getOs())
[ "def", "setOs", "(", ")", ":", "infoMsg", "=", "''", "if", "(", "not", "kb", ".", "bannerFp", ")", ":", "return", "if", "(", "'type'", "in", "kb", ".", "bannerFp", ")", ":", "Backend", ".", "setOs", "(", "Format", ".", "humanize", "(", "kb", ".", "bannerFp", "[", "'type'", "]", ")", ")", "infoMsg", "=", "(", "'the back-end DBMS operating system is %s'", "%", "Backend", ".", "getOs", "(", ")", ")", "if", "(", "'distrib'", "in", "kb", ".", "bannerFp", ")", ":", "kb", ".", "osVersion", "=", "Format", ".", "humanize", "(", "kb", ".", "bannerFp", "[", "'distrib'", "]", ")", "infoMsg", "+=", "(", "' %s'", "%", "kb", ".", "osVersion", ")", "if", "(", "'sp'", "in", "kb", ".", "bannerFp", ")", ":", "kb", ".", "osSP", "=", "int", "(", "Format", ".", "humanize", "(", "kb", ".", "bannerFp", "[", "'sp'", "]", ")", ".", "replace", "(", "'Service Pack '", ",", "''", ")", ")", "elif", "(", "(", "'sp'", "not", "in", "kb", ".", "bannerFp", ")", "and", "Backend", ".", "isOs", "(", "OS", ".", "WINDOWS", ")", ")", ":", "kb", ".", "osSP", "=", "0", "if", "(", "Backend", ".", "getOs", "(", ")", "and", "kb", ".", "osVersion", "and", "kb", ".", "osSP", ")", ":", "infoMsg", "+=", "(", "' Service Pack %d'", "%", "kb", ".", "osSP", ")", "if", "infoMsg", ":", "logger", ".", "info", "(", "infoMsg", ")", "hashDBWrite", "(", "HASHDB_KEYS", ".", "OS", ",", "Backend", ".", "getOs", "(", ")", ")" ]
example of kb .
train
false
44,965
def _getDateList(numSamples, startDatetime): dateList = [] td = datetime.timedelta(minutes=1) curDate = (startDatetime + td) for _ in range(numSamples): dateList.append(curDate) curDate = (curDate + td) return dateList
[ "def", "_getDateList", "(", "numSamples", ",", "startDatetime", ")", ":", "dateList", "=", "[", "]", "td", "=", "datetime", ".", "timedelta", "(", "minutes", "=", "1", ")", "curDate", "=", "(", "startDatetime", "+", "td", ")", "for", "_", "in", "range", "(", "numSamples", ")", ":", "dateList", ".", "append", "(", "curDate", ")", "curDate", "=", "(", "curDate", "+", "td", ")", "return", "dateList" ]
generate a sequence of sample dates starting at startdatetime and incrementing every minute .
train
false
44,966
def isTipcAvailable(): if (not hasattr(socket, 'AF_TIPC')): return False if (not os.path.isfile('/proc/modules')): return False with open('/proc/modules') as f: for line in f: if line.startswith('tipc '): return True if test_support.verbose: print "TIPC module is not loaded, please 'sudo modprobe tipc'" return False
[ "def", "isTipcAvailable", "(", ")", ":", "if", "(", "not", "hasattr", "(", "socket", ",", "'AF_TIPC'", ")", ")", ":", "return", "False", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "'/proc/modules'", ")", ")", ":", "return", "False", "with", "open", "(", "'/proc/modules'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "line", ".", "startswith", "(", "'tipc '", ")", ":", "return", "True", "if", "test_support", ".", "verbose", ":", "print", "\"TIPC module is not loaded, please 'sudo modprobe tipc'\"", "return", "False" ]
check if the tipc module is loaded the tipc module is not loaded automatically on ubuntu and probably other linux distros .
train
false
44,967
def assign_space(total_size, specs): total_weight = 0.0 for spec in specs: total_weight += float(spec.weight) for spec in specs: num_blocks = int(((float(spec.weight) / total_weight) * (total_size / float(spec.size)))) spec.set_count(num_blocks) total_size -= (num_blocks * spec.size) total_weight -= spec.weight
[ "def", "assign_space", "(", "total_size", ",", "specs", ")", ":", "total_weight", "=", "0.0", "for", "spec", "in", "specs", ":", "total_weight", "+=", "float", "(", "spec", ".", "weight", ")", "for", "spec", "in", "specs", ":", "num_blocks", "=", "int", "(", "(", "(", "float", "(", "spec", ".", "weight", ")", "/", "total_weight", ")", "*", "(", "total_size", "/", "float", "(", "spec", ".", "size", ")", ")", ")", ")", "spec", ".", "set_count", "(", "num_blocks", ")", "total_size", "-=", "(", "num_blocks", "*", "spec", ".", "size", ")", "total_weight", "-=", "spec", ".", "weight" ]
satisfy all the partitionspecs according to their weight .
train
false
44,968
def create_context(options=None, passthru_args=None, target_roots=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, workspace=None): options = create_options((options or {}), passthru_args=passthru_args) run_tracker = TestContext.DummyRunTracker() target_roots = (maybe_list(target_roots, Target) if target_roots else []) return TestContext(options=options, run_tracker=run_tracker, target_roots=target_roots, build_graph=build_graph, build_file_parser=build_file_parser, address_mapper=address_mapper, console_outstream=console_outstream, workspace=workspace)
[ "def", "create_context", "(", "options", "=", "None", ",", "passthru_args", "=", "None", ",", "target_roots", "=", "None", ",", "build_graph", "=", "None", ",", "build_file_parser", "=", "None", ",", "address_mapper", "=", "None", ",", "console_outstream", "=", "None", ",", "workspace", "=", "None", ")", ":", "options", "=", "create_options", "(", "(", "options", "or", "{", "}", ")", ",", "passthru_args", "=", "passthru_args", ")", "run_tracker", "=", "TestContext", ".", "DummyRunTracker", "(", ")", "target_roots", "=", "(", "maybe_list", "(", "target_roots", ",", "Target", ")", "if", "target_roots", "else", "[", "]", ")", "return", "TestContext", "(", "options", "=", "options", ",", "run_tracker", "=", "run_tracker", ",", "target_roots", "=", "target_roots", ",", "build_graph", "=", "build_graph", ",", "build_file_parser", "=", "build_file_parser", ",", "address_mapper", "=", "address_mapper", ",", "console_outstream", "=", "console_outstream", ",", "workspace", "=", "workspace", ")" ]
creates a context with no options or targets by default .
train
false
44,971
def closeness_vitality(G, node=None, weight=None, wiener_index=None): if (wiener_index is None): wiener_index = nx.wiener_index(G, weight=weight) if (node is not None): after = nx.wiener_index(G.subgraph((set(G) - {node})), weight=weight) return (wiener_index - after) vitality = partial(closeness_vitality, G, weight=weight, wiener_index=wiener_index) return {v: vitality(node=v) for v in G}
[ "def", "closeness_vitality", "(", "G", ",", "node", "=", "None", ",", "weight", "=", "None", ",", "wiener_index", "=", "None", ")", ":", "if", "(", "wiener_index", "is", "None", ")", ":", "wiener_index", "=", "nx", ".", "wiener_index", "(", "G", ",", "weight", "=", "weight", ")", "if", "(", "node", "is", "not", "None", ")", ":", "after", "=", "nx", ".", "wiener_index", "(", "G", ".", "subgraph", "(", "(", "set", "(", "G", ")", "-", "{", "node", "}", ")", ")", ",", "weight", "=", "weight", ")", "return", "(", "wiener_index", "-", "after", ")", "vitality", "=", "partial", "(", "closeness_vitality", ",", "G", ",", "weight", "=", "weight", ",", "wiener_index", "=", "wiener_index", ")", "return", "{", "v", ":", "vitality", "(", "node", "=", "v", ")", "for", "v", "in", "G", "}" ]
returns the closeness vitality for nodes in the graph .
train
false
44,972
def ParseResponseEx(response, select_default=False, form_parser_class=FormParser, request_class=_request.Request, entitydefs=None, encoding=DEFAULT_ENCODING, _urljoin=urlparse.urljoin, _urlparse=urlparse.urlparse, _urlunparse=urlparse.urlunparse): return _ParseFileEx(response, response.geturl(), select_default, False, form_parser_class, request_class, entitydefs, False, encoding, _urljoin=_urljoin, _urlparse=_urlparse, _urlunparse=_urlunparse)
[ "def", "ParseResponseEx", "(", "response", ",", "select_default", "=", "False", ",", "form_parser_class", "=", "FormParser", ",", "request_class", "=", "_request", ".", "Request", ",", "entitydefs", "=", "None", ",", "encoding", "=", "DEFAULT_ENCODING", ",", "_urljoin", "=", "urlparse", ".", "urljoin", ",", "_urlparse", "=", "urlparse", ".", "urlparse", ",", "_urlunparse", "=", "urlparse", ".", "urlunparse", ")", ":", "return", "_ParseFileEx", "(", "response", ",", "response", ".", "geturl", "(", ")", ",", "select_default", ",", "False", ",", "form_parser_class", ",", "request_class", ",", "entitydefs", ",", "False", ",", "encoding", ",", "_urljoin", "=", "_urljoin", ",", "_urlparse", "=", "_urlparse", ",", "_urlunparse", "=", "_urlunparse", ")" ]
identical to parseresponse .
train
false
44,973
def get_locally_formatted_datetime(datetime): return format_datetime(localtime(datetime), locale=get_current_babel_locale())
[ "def", "get_locally_formatted_datetime", "(", "datetime", ")", ":", "return", "format_datetime", "(", "localtime", "(", "datetime", ")", ",", "locale", "=", "get_current_babel_locale", "(", ")", ")" ]
return a formatted .
train
false
44,974
def _client(): _mk_client() return __context__['cp.fileclient_{0}'.format(id(__opts__))]
[ "def", "_client", "(", ")", ":", "_mk_client", "(", ")", "return", "__context__", "[", "'cp.fileclient_{0}'", ".", "format", "(", "id", "(", "__opts__", ")", ")", "]" ]
return a client .
train
false
44,975
def _MSVSOnly(tool, name, setting_type): def _Translate(unused_value, unused_msbuild_settings): pass _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
[ "def", "_MSVSOnly", "(", "tool", ",", "name", ",", "setting_type", ")", ":", "def", "_Translate", "(", "unused_value", ",", "unused_msbuild_settings", ")", ":", "pass", "_msvs_validators", "[", "tool", ".", "msvs_name", "]", "[", "name", "]", "=", "setting_type", ".", "ValidateMSVS", "_msvs_to_msbuild_converters", "[", "tool", ".", "msvs_name", "]", "[", "name", "]", "=", "_Translate" ]
defines a setting that is only found in msvs .
train
false
44,976
def plugins_dict(module, plugin_type_identifier): plugin_dict = {} for plugin_module in submodules(module): for clazz in getattr(plugin_module, '__all__', []): try: clazz = getattr(plugin_module, clazz) except TypeError: clazz = clazz plugin_type = getattr(clazz, plugin_type_identifier, None) if plugin_type: plugin_dict[plugin_type] = clazz return plugin_dict
[ "def", "plugins_dict", "(", "module", ",", "plugin_type_identifier", ")", ":", "plugin_dict", "=", "{", "}", "for", "plugin_module", "in", "submodules", "(", "module", ")", ":", "for", "clazz", "in", "getattr", "(", "plugin_module", ",", "'__all__'", ",", "[", "]", ")", ":", "try", ":", "clazz", "=", "getattr", "(", "plugin_module", ",", "clazz", ")", "except", "TypeError", ":", "clazz", "=", "clazz", "plugin_type", "=", "getattr", "(", "clazz", ",", "plugin_type_identifier", ",", "None", ")", "if", "plugin_type", ":", "plugin_dict", "[", "plugin_type", "]", "=", "clazz", "return", "plugin_dict" ]
walk through all classes in submodules of module and find ones labelled with specified plugin_type_identifier and throw in a dictionary to allow constructions from plugins by these types later on .
train
false
44,977
def webhook_notification(version, build, hook_url): project = version.project data = json.dumps({'name': project.name, 'slug': project.slug, 'build': {'id': build.id, 'success': build.success, 'date': build.date.strftime('%Y-%m-%d %H:%M:%S')}}) log.debug(LOG_TEMPLATE.format(project=project.slug, version='', msg=('sending notification to: %s' % hook_url))) requests.post(hook_url, data=data)
[ "def", "webhook_notification", "(", "version", ",", "build", ",", "hook_url", ")", ":", "project", "=", "version", ".", "project", "data", "=", "json", ".", "dumps", "(", "{", "'name'", ":", "project", ".", "name", ",", "'slug'", ":", "project", ".", "slug", ",", "'build'", ":", "{", "'id'", ":", "build", ".", "id", ",", "'success'", ":", "build", ".", "success", ",", "'date'", ":", "build", ".", "date", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ")", "}", "}", ")", "log", ".", "debug", "(", "LOG_TEMPLATE", ".", "format", "(", "project", "=", "project", ".", "slug", ",", "version", "=", "''", ",", "msg", "=", "(", "'sending notification to: %s'", "%", "hook_url", ")", ")", ")", "requests", ".", "post", "(", "hook_url", ",", "data", "=", "data", ")" ]
send webhook notification for project webhook .
train
false
44,978
def test_cpaste(): def runf(): 'Marker function: sets a flag when executed.\n ' ip.user_ns['code_ran'] = True return 'runf' tests = {'pass': ['runf()', 'In [1]: runf()', 'In [1]: if 1:\n ...: runf()', '> > > runf()', '>>> runf()', ' >>> runf()'], 'fail': ['1 + runf()', '++ runf()']} ip.user_ns['runf'] = runf for code in tests['pass']: check_cpaste(code) for code in tests['fail']: check_cpaste(code, should_fail=True)
[ "def", "test_cpaste", "(", ")", ":", "def", "runf", "(", ")", ":", "ip", ".", "user_ns", "[", "'code_ran'", "]", "=", "True", "return", "'runf'", "tests", "=", "{", "'pass'", ":", "[", "'runf()'", ",", "'In [1]: runf()'", ",", "'In [1]: if 1:\\n ...: runf()'", ",", "'> > > runf()'", ",", "'>>> runf()'", ",", "' >>> runf()'", "]", ",", "'fail'", ":", "[", "'1 + runf()'", ",", "'++ runf()'", "]", "}", "ip", ".", "user_ns", "[", "'runf'", "]", "=", "runf", "for", "code", "in", "tests", "[", "'pass'", "]", ":", "check_cpaste", "(", "code", ")", "for", "code", "in", "tests", "[", "'fail'", "]", ":", "check_cpaste", "(", "code", ",", "should_fail", "=", "True", ")" ]
test cpaste magic .
train
false
44,980
def verify_history(client, fragment, design=None, reverse=False, server_name='beeswax'): resp = client.get(('/%(server_name)s/query_history' % {'server_name': server_name})) my_assert = ((reverse and assert_false) or assert_true) my_assert((fragment in resp.content), resp.content) if design: my_assert((design in resp.content), resp.content) if resp.context: try: return len(resp.context['page'].object_list) except KeyError: pass LOG.warn('Cannot find history size. Response context clobbered') return (-1)
[ "def", "verify_history", "(", "client", ",", "fragment", ",", "design", "=", "None", ",", "reverse", "=", "False", ",", "server_name", "=", "'beeswax'", ")", ":", "resp", "=", "client", ".", "get", "(", "(", "'/%(server_name)s/query_history'", "%", "{", "'server_name'", ":", "server_name", "}", ")", ")", "my_assert", "=", "(", "(", "reverse", "and", "assert_false", ")", "or", "assert_true", ")", "my_assert", "(", "(", "fragment", "in", "resp", ".", "content", ")", ",", "resp", ".", "content", ")", "if", "design", ":", "my_assert", "(", "(", "design", "in", "resp", ".", "content", ")", ",", "resp", ".", "content", ")", "if", "resp", ".", "context", ":", "try", ":", "return", "len", "(", "resp", ".", "context", "[", "'page'", "]", ".", "object_list", ")", "except", "KeyError", ":", "pass", "LOG", ".", "warn", "(", "'Cannot find history size. Response context clobbered'", ")", "return", "(", "-", "1", ")" ]
verify that the query fragment and/or design are in the query history .
train
false
44,981
def __set(collection, item, _sa_initiator=None): if (_sa_initiator is not False): executor = collection._sa_adapter if executor: item = executor.fire_append_event(item, _sa_initiator) return item
[ "def", "__set", "(", "collection", ",", "item", ",", "_sa_initiator", "=", "None", ")", ":", "if", "(", "_sa_initiator", "is", "not", "False", ")", ":", "executor", "=", "collection", ".", "_sa_adapter", "if", "executor", ":", "item", "=", "executor", ".", "fire_append_event", "(", "item", ",", "_sa_initiator", ")", "return", "item" ]
run set events .
train
false
44,983
def is_valid_mac(mac_string): if (not _mac_pattern.match(mac_string)): return False return True
[ "def", "is_valid_mac", "(", "mac_string", ")", ":", "if", "(", "not", "_mac_pattern", ".", "match", "(", "mac_string", ")", ")", ":", "return", "False", "return", "True" ]
return true if mac_string is of form eg 00:11:22:33:aa:bb .
train
false
44,985
def to_text(value): if ((value < 0) or (value > 65535)): raise ValueError('type must be between >= 0 and <= 65535') text = _by_value.get(value) if (text is None): text = ('TYPE' + `value`) return text
[ "def", "to_text", "(", "value", ")", ":", "if", "(", "(", "value", "<", "0", ")", "or", "(", "value", ">", "65535", ")", ")", ":", "raise", "ValueError", "(", "'type must be between >= 0 and <= 65535'", ")", "text", "=", "_by_value", ".", "get", "(", "value", ")", "if", "(", "text", "is", "None", ")", ":", "text", "=", "(", "'TYPE'", "+", " ", "`", "value", "`", ")", "return", "text" ]
convert an opcode to text .
train
true
44,986
def get_features(): return get_var('FEATURES')
[ "def", "get_features", "(", ")", ":", "return", "get_var", "(", "'FEATURES'", ")" ]
list features on the system or in a package args: package : the full path to the package .
train
false
44,988
def getCentersFromIntersectionLoops(circleIntersectionLoops, radius): centers = [] for circleIntersectionLoop in circleIntersectionLoops: centers.append(getCentersFromIntersectionLoop(circleIntersectionLoop, radius)) return centers
[ "def", "getCentersFromIntersectionLoops", "(", "circleIntersectionLoops", ",", "radius", ")", ":", "centers", "=", "[", "]", "for", "circleIntersectionLoop", "in", "circleIntersectionLoops", ":", "centers", ".", "append", "(", "getCentersFromIntersectionLoop", "(", "circleIntersectionLoop", ",", "radius", ")", ")", "return", "centers" ]
get the centers from the intersection loops .
train
false
44,989
def show_lowstate(): __opts__['grains'] = __grains__ st_ = salt.client.ssh.state.SSHHighState(__opts__, __pillar__, __salt__, __context__['fileclient']) return st_.compile_low_chunks()
[ "def", "show_lowstate", "(", ")", ":", "__opts__", "[", "'grains'", "]", "=", "__grains__", "st_", "=", "salt", ".", "client", ".", "ssh", ".", "state", ".", "SSHHighState", "(", "__opts__", ",", "__pillar__", ",", "__salt__", ",", "__context__", "[", "'fileclient'", "]", ")", "return", "st_", ".", "compile_low_chunks", "(", ")" ]
list out the low data that will be applied to this minion cli example: .
train
false
44,992
def safe_dump(data, stream=None, **kwds): return dump_all([data], stream, Dumper=SafeDumper, **kwds)
[ "def", "safe_dump", "(", "data", ",", "stream", "=", "None", ",", "**", "kwds", ")", ":", "return", "dump_all", "(", "[", "data", "]", ",", "stream", ",", "Dumper", "=", "SafeDumper", ",", "**", "kwds", ")" ]
serialize a python object into a yaml stream .
train
true
44,993
def add_operation(feed, activities, trim=True, batch_interface=None): t = timer() msg_format = 'running %s.add_many operation for %s activities batch interface %s and trim %s' logger.debug(msg_format, feed, len(activities), batch_interface, trim) feed.add_many(activities, batch_interface=batch_interface, trim=trim) logger.debug('add many operation took %s seconds', t.next())
[ "def", "add_operation", "(", "feed", ",", "activities", ",", "trim", "=", "True", ",", "batch_interface", "=", "None", ")", ":", "t", "=", "timer", "(", ")", "msg_format", "=", "'running %s.add_many operation for %s activities batch interface %s and trim %s'", "logger", ".", "debug", "(", "msg_format", ",", "feed", ",", "len", "(", "activities", ")", ",", "batch_interface", ",", "trim", ")", "feed", ".", "add_many", "(", "activities", ",", "batch_interface", "=", "batch_interface", ",", "trim", "=", "trim", ")", "logger", ".", "debug", "(", "'add many operation took %s seconds'", ",", "t", ".", "next", "(", ")", ")" ]
add the activities to the feed functions used in tasks need to be at the main level of the module .
train
false
44,994
def escape2null(text): parts = [] start = 0 while 1: found = text.find('\\', start) if (found == (-1)): parts.append(text[start:]) return ''.join(parts) parts.append(text[start:found]) parts.append(('\x00' + text[(found + 1):(found + 2)])) start = (found + 2)
[ "def", "escape2null", "(", "text", ")", ":", "parts", "=", "[", "]", "start", "=", "0", "while", "1", ":", "found", "=", "text", ".", "find", "(", "'\\\\'", ",", "start", ")", "if", "(", "found", "==", "(", "-", "1", ")", ")", ":", "parts", ".", "append", "(", "text", "[", "start", ":", "]", ")", "return", "''", ".", "join", "(", "parts", ")", "parts", ".", "append", "(", "text", "[", "start", ":", "found", "]", ")", "parts", ".", "append", "(", "(", "'\\x00'", "+", "text", "[", "(", "found", "+", "1", ")", ":", "(", "found", "+", "2", ")", "]", ")", ")", "start", "=", "(", "found", "+", "2", ")" ]
return a string with escape-backslashes converted to nulls .
train
false
44,995
def rgw_create(**kwargs): return ceph_cfg.rgw_create(**kwargs)
[ "def", "rgw_create", "(", "**", "kwargs", ")", ":", "return", "ceph_cfg", ".", "rgw_create", "(", "**", "kwargs", ")" ]
create a rgw cli example: .
train
false
44,996
def _strip_comment_tags(comments, tags): def _strip(line): for tag in tags: if line.startswith(tag): return line[len(tag):].strip() return line comments[:] = map(_strip, comments)
[ "def", "_strip_comment_tags", "(", "comments", ",", "tags", ")", ":", "def", "_strip", "(", "line", ")", ":", "for", "tag", "in", "tags", ":", "if", "line", ".", "startswith", "(", "tag", ")", ":", "return", "line", "[", "len", "(", "tag", ")", ":", "]", ".", "strip", "(", ")", "return", "line", "comments", "[", ":", "]", "=", "map", "(", "_strip", ",", "comments", ")" ]
helper function for extract that strips comment tags from strings in a list of comment lines .
train
false
44,997
def package_resource_reorder(context, data_dict): id = _get_or_bust(data_dict, 'id') order = _get_or_bust(data_dict, 'order') if (not isinstance(order, list)): raise ValidationError({'order': 'Must be a list of resource'}) if (len(set(order)) != len(order)): raise ValidationError({'order': 'Must supply unique resource_ids'}) package_dict = _get_action('package_show')(context, {'id': id}) existing_resources = package_dict.get('resources', []) ordered_resources = [] for resource_id in order: for i in range(0, len(existing_resources)): if (existing_resources[i]['id'] == resource_id): resource = existing_resources.pop(i) ordered_resources.append(resource) break else: raise ValidationError({'order': 'resource_id {id} can not be found'.format(id=resource_id)}) new_resources = (ordered_resources + existing_resources) package_dict['resources'] = new_resources _check_access('package_resource_reorder', context, package_dict) _get_action('package_update')(context, package_dict) return {'id': id, 'order': [resource['id'] for resource in new_resources]}
[ "def", "package_resource_reorder", "(", "context", ",", "data_dict", ")", ":", "id", "=", "_get_or_bust", "(", "data_dict", ",", "'id'", ")", "order", "=", "_get_or_bust", "(", "data_dict", ",", "'order'", ")", "if", "(", "not", "isinstance", "(", "order", ",", "list", ")", ")", ":", "raise", "ValidationError", "(", "{", "'order'", ":", "'Must be a list of resource'", "}", ")", "if", "(", "len", "(", "set", "(", "order", ")", ")", "!=", "len", "(", "order", ")", ")", ":", "raise", "ValidationError", "(", "{", "'order'", ":", "'Must supply unique resource_ids'", "}", ")", "package_dict", "=", "_get_action", "(", "'package_show'", ")", "(", "context", ",", "{", "'id'", ":", "id", "}", ")", "existing_resources", "=", "package_dict", ".", "get", "(", "'resources'", ",", "[", "]", ")", "ordered_resources", "=", "[", "]", "for", "resource_id", "in", "order", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "existing_resources", ")", ")", ":", "if", "(", "existing_resources", "[", "i", "]", "[", "'id'", "]", "==", "resource_id", ")", ":", "resource", "=", "existing_resources", ".", "pop", "(", "i", ")", "ordered_resources", ".", "append", "(", "resource", ")", "break", "else", ":", "raise", "ValidationError", "(", "{", "'order'", ":", "'resource_id {id} can not be found'", ".", "format", "(", "id", "=", "resource_id", ")", "}", ")", "new_resources", "=", "(", "ordered_resources", "+", "existing_resources", ")", "package_dict", "[", "'resources'", "]", "=", "new_resources", "_check_access", "(", "'package_resource_reorder'", ",", "context", ",", "package_dict", ")", "_get_action", "(", "'package_update'", ")", "(", "context", ",", "package_dict", ")", "return", "{", "'id'", ":", "id", ",", "'order'", ":", "[", "resource", "[", "'id'", "]", "for", "resource", "in", "new_resources", "]", "}" ]
reorder resources against datasets .
train
false
44,998
@task @timed def clear_mongo(): sh("mongo {} --eval 'db.dropDatabase()' > /dev/null".format(Env.BOK_CHOY_MONGO_DATABASE))
[ "@", "task", "@", "timed", "def", "clear_mongo", "(", ")", ":", "sh", "(", "\"mongo {} --eval 'db.dropDatabase()' > /dev/null\"", ".", "format", "(", "Env", ".", "BOK_CHOY_MONGO_DATABASE", ")", ")" ]
clears mongo database .
train
false
45,000
def run_examples(windowed=False, quiet=False, summary=True): successes = [] failures = [] examples = TERMINAL_EXAMPLES if windowed: examples += WINDOWED_EXAMPLES if quiet: from sympy.utilities.runtests import PyTestReporter reporter = PyTestReporter() reporter.write('Testing Examples\n') reporter.write(('-' * reporter.terminal_width)) else: reporter = None for example in examples: if run_example(example, reporter=reporter): successes.append(example) else: failures.append(example) if summary: show_summary(successes, failures, reporter=reporter) return (len(failures) == 0)
[ "def", "run_examples", "(", "windowed", "=", "False", ",", "quiet", "=", "False", ",", "summary", "=", "True", ")", ":", "successes", "=", "[", "]", "failures", "=", "[", "]", "examples", "=", "TERMINAL_EXAMPLES", "if", "windowed", ":", "examples", "+=", "WINDOWED_EXAMPLES", "if", "quiet", ":", "from", "sympy", ".", "utilities", ".", "runtests", "import", "PyTestReporter", "reporter", "=", "PyTestReporter", "(", ")", "reporter", ".", "write", "(", "'Testing Examples\\n'", ")", "reporter", ".", "write", "(", "(", "'-'", "*", "reporter", ".", "terminal_width", ")", ")", "else", ":", "reporter", "=", "None", "for", "example", "in", "examples", ":", "if", "run_example", "(", "example", ",", "reporter", "=", "reporter", ")", ":", "successes", ".", "append", "(", "example", ")", "else", ":", "failures", ".", "append", "(", "example", ")", "if", "summary", ":", "show_summary", "(", "successes", ",", "failures", ",", "reporter", "=", "reporter", ")", "return", "(", "len", "(", "failures", ")", "==", "0", ")" ]
run all examples in the list of modules .
train
false
45,003
def wrap_fragment(fragment, new_content): wrapper_frag = Fragment(content=new_content) wrapper_frag.add_frag_resources(fragment) return wrapper_frag
[ "def", "wrap_fragment", "(", "fragment", ",", "new_content", ")", ":", "wrapper_frag", "=", "Fragment", "(", "content", "=", "new_content", ")", "wrapper_frag", ".", "add_frag_resources", "(", "fragment", ")", "return", "wrapper_frag" ]
returns a new fragment that has new_content and all as its content .
train
false
45,004
def _quota_reserve(context, project_id, user_id): def get_sync(resource, usage): def sync(elevated, project_id, user_id): return {resource: usage} return sync quotas = {} user_quotas = {} resources = {} deltas = {} for i in range(3): resource = ('resource%d' % i) if (i == 2): resource = 'fixed_ips' quotas[resource] = db.quota_create(context, project_id, resource, (i + 2)).hard_limit user_quotas[resource] = quotas[resource] else: quotas[resource] = db.quota_create(context, project_id, resource, (i + 1)).hard_limit user_quotas[resource] = db.quota_create(context, project_id, resource, (i + 1), user_id=user_id).hard_limit sync_name = ('_sync_%s' % resource) resources[resource] = quota.ReservableResource(resource, sync_name, ('quota_res_%d' % i)) deltas[resource] = i setattr(sqlalchemy_api, sync_name, get_sync(resource, i)) sqlalchemy_api.QUOTA_SYNC_FUNCTIONS[sync_name] = getattr(sqlalchemy_api, sync_name) return db.quota_reserve(context, resources, quotas, user_quotas, deltas, timeutils.utcnow(), CONF.quota.until_refresh, datetime.timedelta(days=1), project_id, user_id)
[ "def", "_quota_reserve", "(", "context", ",", "project_id", ",", "user_id", ")", ":", "def", "get_sync", "(", "resource", ",", "usage", ")", ":", "def", "sync", "(", "elevated", ",", "project_id", ",", "user_id", ")", ":", "return", "{", "resource", ":", "usage", "}", "return", "sync", "quotas", "=", "{", "}", "user_quotas", "=", "{", "}", "resources", "=", "{", "}", "deltas", "=", "{", "}", "for", "i", "in", "range", "(", "3", ")", ":", "resource", "=", "(", "'resource%d'", "%", "i", ")", "if", "(", "i", "==", "2", ")", ":", "resource", "=", "'fixed_ips'", "quotas", "[", "resource", "]", "=", "db", ".", "quota_create", "(", "context", ",", "project_id", ",", "resource", ",", "(", "i", "+", "2", ")", ")", ".", "hard_limit", "user_quotas", "[", "resource", "]", "=", "quotas", "[", "resource", "]", "else", ":", "quotas", "[", "resource", "]", "=", "db", ".", "quota_create", "(", "context", ",", "project_id", ",", "resource", ",", "(", "i", "+", "1", ")", ")", ".", "hard_limit", "user_quotas", "[", "resource", "]", "=", "db", ".", "quota_create", "(", "context", ",", "project_id", ",", "resource", ",", "(", "i", "+", "1", ")", ",", "user_id", "=", "user_id", ")", ".", "hard_limit", "sync_name", "=", "(", "'_sync_%s'", "%", "resource", ")", "resources", "[", "resource", "]", "=", "quota", ".", "ReservableResource", "(", "resource", ",", "sync_name", ",", "(", "'quota_res_%d'", "%", "i", ")", ")", "deltas", "[", "resource", "]", "=", "i", "setattr", "(", "sqlalchemy_api", ",", "sync_name", ",", "get_sync", "(", "resource", ",", "i", ")", ")", "sqlalchemy_api", ".", "QUOTA_SYNC_FUNCTIONS", "[", "sync_name", "]", "=", "getattr", "(", "sqlalchemy_api", ",", "sync_name", ")", "return", "db", ".", "quota_reserve", "(", "context", ",", "resources", ",", "quotas", ",", "user_quotas", ",", "deltas", ",", "timeutils", ".", "utcnow", "(", ")", ",", "CONF", ".", "quota", ".", "until_refresh", ",", "datetime", ".", "timedelta", "(", "days", "=", "1", ")", ",", "project_id", ",", "user_id", ")" ]
create sample quota .
train
false
45,005
def handle_protectederror(obj, request, e): dependent_objects = e[1] try: dep_class = dependent_objects[0]._meta.verbose_name_plural except IndexError: raise e if (type(obj) in (list, tuple)): err_message = 'Unable to delete the requested {}. The following dependent {} were found: '.format(obj[0]._meta.verbose_name_plural, dep_class) else: err_message = 'Unable to delete {} {}. The following dependent {} were found: '.format(obj._meta.verbose_name, obj, dep_class) dependent_objects = [] for o in e[1]: if hasattr(o, 'get_absolute_url'): dependent_objects.append('<a href="{}">{}</a>'.format(o.get_absolute_url(), str(o))) else: dependent_objects.append(str(o)) err_message += ', '.join(dependent_objects) messages.error(request, err_message)
[ "def", "handle_protectederror", "(", "obj", ",", "request", ",", "e", ")", ":", "dependent_objects", "=", "e", "[", "1", "]", "try", ":", "dep_class", "=", "dependent_objects", "[", "0", "]", ".", "_meta", ".", "verbose_name_plural", "except", "IndexError", ":", "raise", "e", "if", "(", "type", "(", "obj", ")", "in", "(", "list", ",", "tuple", ")", ")", ":", "err_message", "=", "'Unable to delete the requested {}. The following dependent {} were found: '", ".", "format", "(", "obj", "[", "0", "]", ".", "_meta", ".", "verbose_name_plural", ",", "dep_class", ")", "else", ":", "err_message", "=", "'Unable to delete {} {}. The following dependent {} were found: '", ".", "format", "(", "obj", ".", "_meta", ".", "verbose_name", ",", "obj", ",", "dep_class", ")", "dependent_objects", "=", "[", "]", "for", "o", "in", "e", "[", "1", "]", ":", "if", "hasattr", "(", "o", ",", "'get_absolute_url'", ")", ":", "dependent_objects", ".", "append", "(", "'<a href=\"{}\">{}</a>'", ".", "format", "(", "o", ".", "get_absolute_url", "(", ")", ",", "str", "(", "o", ")", ")", ")", "else", ":", "dependent_objects", ".", "append", "(", "str", "(", "o", ")", ")", "err_message", "+=", "', '", ".", "join", "(", "dependent_objects", ")", "messages", ".", "error", "(", "request", ",", "err_message", ")" ]
generate a user-friendly error message in response to a protectederror exception .
train
false
45,006
def libvlc_audio_equalizer_get_band_count(): f = (_Cfunctions.get('libvlc_audio_equalizer_get_band_count', None) or _Cfunction('libvlc_audio_equalizer_get_band_count', (), None, ctypes.c_uint)) return f()
[ "def", "libvlc_audio_equalizer_get_band_count", "(", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_equalizer_get_band_count'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_equalizer_get_band_count'", ",", "(", ")", ",", "None", ",", "ctypes", ".", "c_uint", ")", ")", "return", "f", "(", ")" ]
get the number of distinct frequency bands for an equalizer .
train
false
45,007
def demo_error_analysis(): postag(error_output='errors.txt')
[ "def", "demo_error_analysis", "(", ")", ":", "postag", "(", "error_output", "=", "'errors.txt'", ")" ]
writes a file with context for each erroneous word after tagging testing data .
train
false
45,010
def update_metadata(account_id, folder_id, folder_role, new_flags, session): if (not new_flags): return account = Account.get(account_id, session) change_count = 0 for item in session.query(ImapUid).filter((ImapUid.account_id == account_id), ImapUid.msg_uid.in_(new_flags.keys()), (ImapUid.folder_id == folder_id)): flags = new_flags[item.msg_uid].flags labels = getattr(new_flags[item.msg_uid], 'labels', None) changed = item.update_flags(flags) if (labels is not None): item.update_labels(labels) changed = True if changed: change_count += 1 is_draft = (item.is_draft and ((folder_role == 'drafts') or (folder_role == 'all'))) update_message_metadata(session, account, item.message, is_draft) session.commit() log.info('Updated UID metadata', changed=change_count, out_of=len(new_flags))
[ "def", "update_metadata", "(", "account_id", ",", "folder_id", ",", "folder_role", ",", "new_flags", ",", "session", ")", ":", "if", "(", "not", "new_flags", ")", ":", "return", "account", "=", "Account", ".", "get", "(", "account_id", ",", "session", ")", "change_count", "=", "0", "for", "item", "in", "session", ".", "query", "(", "ImapUid", ")", ".", "filter", "(", "(", "ImapUid", ".", "account_id", "==", "account_id", ")", ",", "ImapUid", ".", "msg_uid", ".", "in_", "(", "new_flags", ".", "keys", "(", ")", ")", ",", "(", "ImapUid", ".", "folder_id", "==", "folder_id", ")", ")", ":", "flags", "=", "new_flags", "[", "item", ".", "msg_uid", "]", ".", "flags", "labels", "=", "getattr", "(", "new_flags", "[", "item", ".", "msg_uid", "]", ",", "'labels'", ",", "None", ")", "changed", "=", "item", ".", "update_flags", "(", "flags", ")", "if", "(", "labels", "is", "not", "None", ")", ":", "item", ".", "update_labels", "(", "labels", ")", "changed", "=", "True", "if", "changed", ":", "change_count", "+=", "1", "is_draft", "=", "(", "item", ".", "is_draft", "and", "(", "(", "folder_role", "==", "'drafts'", ")", "or", "(", "folder_role", "==", "'all'", ")", ")", ")", "update_message_metadata", "(", "session", ",", "account", ",", "item", ".", "message", ",", "is_draft", ")", "session", ".", "commit", "(", ")", "log", ".", "info", "(", "'Updated UID metadata'", ",", "changed", "=", "change_count", ",", "out_of", "=", "len", "(", "new_flags", ")", ")" ]
update flags and labels .
train
false
45,011
def enabled_service_owners(): error = {} if ('pkg.owner' not in __salt__): error['Unsupported Package Manager'] = 'The module for the package manager on this system does not support looking up which package(s) owns which file(s)' if ('service.show' not in __salt__): error['Unsupported Service Manager'] = 'The module for the service manager on this system does not support showing descriptive service data' if error: return {'Error': error} ret = {} services = __salt__['service.get_enabled']() for service in services: data = __salt__['service.show'](service) if ('ExecStart' not in data): continue start_cmd = data['ExecStart']['path'] pkg = __salt__['pkg.owner'](start_cmd) ret[service] = next(six.itervalues(pkg)) return ret
[ "def", "enabled_service_owners", "(", ")", ":", "error", "=", "{", "}", "if", "(", "'pkg.owner'", "not", "in", "__salt__", ")", ":", "error", "[", "'Unsupported Package Manager'", "]", "=", "'The module for the package manager on this system does not support looking up which package(s) owns which file(s)'", "if", "(", "'service.show'", "not", "in", "__salt__", ")", ":", "error", "[", "'Unsupported Service Manager'", "]", "=", "'The module for the service manager on this system does not support showing descriptive service data'", "if", "error", ":", "return", "{", "'Error'", ":", "error", "}", "ret", "=", "{", "}", "services", "=", "__salt__", "[", "'service.get_enabled'", "]", "(", ")", "for", "service", "in", "services", ":", "data", "=", "__salt__", "[", "'service.show'", "]", "(", "service", ")", "if", "(", "'ExecStart'", "not", "in", "data", ")", ":", "continue", "start_cmd", "=", "data", "[", "'ExecStart'", "]", "[", "'path'", "]", "pkg", "=", "__salt__", "[", "'pkg.owner'", "]", "(", "start_cmd", ")", "ret", "[", "service", "]", "=", "next", "(", "six", ".", "itervalues", "(", "pkg", ")", ")", "return", "ret" ]
return which packages own each of the services that are currently enabled .
train
true
45,013
@validator def domain(value): return pattern.match(value)
[ "@", "validator", "def", "domain", "(", "value", ")", ":", "return", "pattern", ".", "match", "(", "value", ")" ]
defined in admin module .
train
false
45,014
def _ruby_installed(ret, ruby, user=None): default = __salt__['rbenv.default'](runas=user) for version in __salt__['rbenv.versions'](user): if (version == ruby): ret['result'] = True ret['comment'] = 'Requested ruby exists.' ret['default'] = (default == ruby) break return ret
[ "def", "_ruby_installed", "(", "ret", ",", "ruby", ",", "user", "=", "None", ")", ":", "default", "=", "__salt__", "[", "'rbenv.default'", "]", "(", "runas", "=", "user", ")", "for", "version", "in", "__salt__", "[", "'rbenv.versions'", "]", "(", "user", ")", ":", "if", "(", "version", "==", "ruby", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Requested ruby exists.'", "ret", "[", "'default'", "]", "=", "(", "default", "==", "ruby", ")", "break", "return", "ret" ]
check to see if given ruby is installed .
train
true
45,015
def nevercache_token(): return (u'nevercache.' + settings.NEVERCACHE_KEY)
[ "def", "nevercache_token", "(", ")", ":", "return", "(", "u'nevercache.'", "+", "settings", ".", "NEVERCACHE_KEY", ")" ]
returns the secret token that delimits content wrapped in the nevercache template tag .
train
false
45,016
def add_dep_paths(): paths = [] for (importer, modname, ispkg) in pkgutil.iter_modules(kivy.deps.__path__): if (not ispkg): continue try: mod = importer.find_module(modname).load_module(modname) except ImportError as e: logging.warn('deps: Error importing dependency: {}'.format(str(e))) continue if hasattr(mod, 'dep_bins'): paths.extend(mod.dep_bins) sys.path.extend(paths)
[ "def", "add_dep_paths", "(", ")", ":", "paths", "=", "[", "]", "for", "(", "importer", ",", "modname", ",", "ispkg", ")", "in", "pkgutil", ".", "iter_modules", "(", "kivy", ".", "deps", ".", "__path__", ")", ":", "if", "(", "not", "ispkg", ")", ":", "continue", "try", ":", "mod", "=", "importer", ".", "find_module", "(", "modname", ")", ".", "load_module", "(", "modname", ")", "except", "ImportError", "as", "e", ":", "logging", ".", "warn", "(", "'deps: Error importing dependency: {}'", ".", "format", "(", "str", "(", "e", ")", ")", ")", "continue", "if", "hasattr", "(", "mod", ",", "'dep_bins'", ")", ":", "paths", ".", "extend", "(", "mod", ".", "dep_bins", ")", "sys", ".", "path", ".", "extend", "(", "paths", ")" ]
should be called by the hook .
train
false
45,017
def getEndpointsFromSegments(segments): endpoints = [] for segment in segments: for endpoint in segment: endpoints.append(endpoint) return endpoints
[ "def", "getEndpointsFromSegments", "(", "segments", ")", ":", "endpoints", "=", "[", "]", "for", "segment", "in", "segments", ":", "for", "endpoint", "in", "segment", ":", "endpoints", ".", "append", "(", "endpoint", ")", "return", "endpoints" ]
get endpoints from segments .
train
false
45,020
def reload_django_url_config(): urlconf = settings.ROOT_URLCONF if (urlconf and (urlconf in sys.modules)): reload(sys.modules[urlconf]) reloaded = import_module(urlconf) reloaded_urls = reloaded.urlpatterns set_urlconf(tuple(reloaded_urls))
[ "def", "reload_django_url_config", "(", ")", ":", "urlconf", "=", "settings", ".", "ROOT_URLCONF", "if", "(", "urlconf", "and", "(", "urlconf", "in", "sys", ".", "modules", ")", ")", ":", "reload", "(", "sys", ".", "modules", "[", "urlconf", "]", ")", "reloaded", "=", "import_module", "(", "urlconf", ")", "reloaded_urls", "=", "reloaded", ".", "urlpatterns", "set_urlconf", "(", "tuple", "(", "reloaded_urls", ")", ")" ]
reloads djangos url config .
train
false
45,021
def div_proxy(x, y): f = int_or_true_div((as_scalar(x).type in discrete_types), (as_scalar(y).type in discrete_types)) return f(x, y)
[ "def", "div_proxy", "(", "x", ",", "y", ")", ":", "f", "=", "int_or_true_div", "(", "(", "as_scalar", "(", "x", ")", ".", "type", "in", "discrete_types", ")", ",", "(", "as_scalar", "(", "y", ")", ".", "type", "in", "discrete_types", ")", ")", "return", "f", "(", "x", ",", "y", ")" ]
proxy for either true_div or int_div .
train
false
45,022
def get_tasktrackers(request): return [Tracker(tracker) for tracker in request.jt.all_task_trackers().trackers]
[ "def", "get_tasktrackers", "(", "request", ")", ":", "return", "[", "Tracker", "(", "tracker", ")", "for", "tracker", "in", "request", ".", "jt", ".", "all_task_trackers", "(", ")", ".", "trackers", "]" ]
return a thrifttasktrackerstatuslist object containing all task trackers .
train
false
45,023
def get_available_datastores(session, cluster=None, datastore_regex=None): ds = session._call_method(vutil, 'get_object_property', cluster, 'datastore') if (not ds): return [] data_store_mors = ds.ManagedObjectReference data_stores = session._call_method(vim_util, 'get_properties_for_a_collection_of_objects', 'Datastore', data_store_mors, ['summary.type', 'summary.name', 'summary.accessible', 'summary.maintenanceMode']) allowed = [] while data_stores: allowed.extend(_get_allowed_datastores(data_stores, datastore_regex)) data_stores = session._call_method(vutil, 'continue_retrieval', data_stores) return allowed
[ "def", "get_available_datastores", "(", "session", ",", "cluster", "=", "None", ",", "datastore_regex", "=", "None", ")", ":", "ds", "=", "session", ".", "_call_method", "(", "vutil", ",", "'get_object_property'", ",", "cluster", ",", "'datastore'", ")", "if", "(", "not", "ds", ")", ":", "return", "[", "]", "data_store_mors", "=", "ds", ".", "ManagedObjectReference", "data_stores", "=", "session", ".", "_call_method", "(", "vim_util", ",", "'get_properties_for_a_collection_of_objects'", ",", "'Datastore'", ",", "data_store_mors", ",", "[", "'summary.type'", ",", "'summary.name'", ",", "'summary.accessible'", ",", "'summary.maintenanceMode'", "]", ")", "allowed", "=", "[", "]", "while", "data_stores", ":", "allowed", ".", "extend", "(", "_get_allowed_datastores", "(", "data_stores", ",", "datastore_regex", ")", ")", "data_stores", "=", "session", ".", "_call_method", "(", "vutil", ",", "'continue_retrieval'", ",", "data_stores", ")", "return", "allowed" ]
get the datastore list and choose the first local storage .
train
false
45,026
def getWiddershinsUnitPolar(angle): return complex(math.cos(angle), math.sin(angle))
[ "def", "getWiddershinsUnitPolar", "(", "angle", ")", ":", "return", "complex", "(", "math", ".", "cos", "(", "angle", ")", ",", "math", ".", "sin", "(", "angle", ")", ")" ]
get polar complex from counterclockwise angle from 1 .
train
false
45,027
@contextfunction def documents_objects_list(context, objects, folder, skip_group=False): request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('documents/tags/objects_list', {'objects': objects, 'skip_group': skip_group, 'folder': folder}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "documents_objects_list", "(", "context", ",", "objects", ",", "folder", ",", "skip_group", "=", "False", ")", ":", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ")", ":", "response_format", "=", "context", "[", "'response_format'", "]", "return", "Markup", "(", "render_to_string", "(", "'documents/tags/objects_list'", ",", "{", "'objects'", ":", "objects", ",", "'skip_group'", ":", "skip_group", ",", "'folder'", ":", "folder", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")", ")" ]
print a list of all objects .
train
false