id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
6,615
def libvlc_audio_output_device_set(p_mi, psz_audio_output, psz_device_id): f = (_Cfunctions.get('libvlc_audio_output_device_set', None) or _Cfunction('libvlc_audio_output_device_set', ((1,), (1,), (1,)), None, None, MediaPlayer, ctypes.c_char_p, ctypes.c_char_p)) return f(p_mi, psz_audio_output, psz_device_id)
[ "def", "libvlc_audio_output_device_set", "(", "p_mi", ",", "psz_audio_output", ",", "psz_device_id", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_output_device_set'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_output_device_set'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_char_p", ",", "ctypes", ".", "c_char_p", ")", ")", "return", "f", "(", "p_mi", ",", "psz_audio_output", ",", "psz_device_id", ")" ]
configures an explicit audio output device .
train
false
6,616
def _rec_degree_list(g, v, i, degs): degs[i] = max(degs[i], dmp_degree(g, v)) if (v > 0): (v, i) = ((v - 1), (i + 1)) for c in g: _rec_degree_list(c, v, i, degs)
[ "def", "_rec_degree_list", "(", "g", ",", "v", ",", "i", ",", "degs", ")", ":", "degs", "[", "i", "]", "=", "max", "(", "degs", "[", "i", "]", ",", "dmp_degree", "(", "g", ",", "v", ")", ")", "if", "(", "v", ">", "0", ")", ":", "(", "v", ",", "i", ")", "=", "(", "(", "v", "-", "1", ")", ",", "(", "i", "+", "1", ")", ")", "for", "c", "in", "g", ":", "_rec_degree_list", "(", "c", ",", "v", ",", "i", ",", "degs", ")" ]
recursive helper for :func:dmp_degree_list .
train
false
6,617
def get_children_pids(ppid): return system_output(('ps -L --ppid=%d -o lwp' % ppid)).split('\n')[1:]
[ "def", "get_children_pids", "(", "ppid", ")", ":", "return", "system_output", "(", "(", "'ps -L --ppid=%d -o lwp'", "%", "ppid", ")", ")", ".", "split", "(", "'\\n'", ")", "[", "1", ":", "]" ]
get all pids of children/threads of parent ppid param ppid: parent pid return: list of pids of all children/threads of ppid .
train
false
6,621
def do_post_ops(cnxt, stack, current_stack=None, action=None, is_stack_failure=False): cinstances = get_plug_point_class_instances() if (action is None): action = stack.action _do_ops(cinstances, 'do_post_op', cnxt, stack, current_stack, action, None)
[ "def", "do_post_ops", "(", "cnxt", ",", "stack", ",", "current_stack", "=", "None", ",", "action", "=", "None", ",", "is_stack_failure", "=", "False", ")", ":", "cinstances", "=", "get_plug_point_class_instances", "(", ")", "if", "(", "action", "is", "None", ")", ":", "action", "=", "stack", ".", "action", "_do_ops", "(", "cinstances", ",", "'do_post_op'", ",", "cnxt", ",", "stack", ",", "current_stack", ",", "action", ",", "None", ")" ]
call available post-op methods sequentially .
train
false
6,622
def get_translation(modname, dirname=None): if (dirname is None): dirname = modname locale_path = get_module_data_path(dirname, relpath='locale', attr_name='LOCALEPATH') language = load_lang_conf() if (os.name == 'nt'): os.environ['LANG'] = language else: os.environ['LANGUAGE'] = language import gettext try: _trans = gettext.translation(modname, locale_path, codeset='utf-8') lgettext = _trans.lgettext def translate_gettext(x): if ((not PY3) and is_unicode(x)): x = x.encode('utf-8') y = lgettext(x) if (is_text_string(y) and PY3): return y else: return to_text_string(y, 'utf-8') return translate_gettext except IOError as _e: def translate_dumb(x): if (not is_unicode(x)): return to_text_string(x, 'utf-8') return x return translate_dumb
[ "def", "get_translation", "(", "modname", ",", "dirname", "=", "None", ")", ":", "if", "(", "dirname", "is", "None", ")", ":", "dirname", "=", "modname", "locale_path", "=", "get_module_data_path", "(", "dirname", ",", "relpath", "=", "'locale'", ",", "attr_name", "=", "'LOCALEPATH'", ")", "language", "=", "load_lang_conf", "(", ")", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "os", ".", "environ", "[", "'LANG'", "]", "=", "language", "else", ":", "os", ".", "environ", "[", "'LANGUAGE'", "]", "=", "language", "import", "gettext", "try", ":", "_trans", "=", "gettext", ".", "translation", "(", "modname", ",", "locale_path", ",", "codeset", "=", "'utf-8'", ")", "lgettext", "=", "_trans", ".", "lgettext", "def", "translate_gettext", "(", "x", ")", ":", "if", "(", "(", "not", "PY3", ")", "and", "is_unicode", "(", "x", ")", ")", ":", "x", "=", "x", ".", "encode", "(", "'utf-8'", ")", "y", "=", "lgettext", "(", "x", ")", "if", "(", "is_text_string", "(", "y", ")", "and", "PY3", ")", ":", "return", "y", "else", ":", "return", "to_text_string", "(", "y", ",", "'utf-8'", ")", "return", "translate_gettext", "except", "IOError", "as", "_e", ":", "def", "translate_dumb", "(", "x", ")", ":", "if", "(", "not", "is_unicode", "(", "x", ")", ")", ":", "return", "to_text_string", "(", "x", ",", "'utf-8'", ")", "return", "x", "return", "translate_dumb" ]
returns translation matching parameters .
train
true
6,623
@step('I can specify html in the display name and save') def i_can_modify_the_display_name_with_html(_step): index = world.get_setting_entry_index(DISPLAY_NAME) world.set_field_value(index, "<script>alert('test')</script>") verify_modified_display_name_with_html() world.save_component()
[ "@", "step", "(", "'I can specify html in the display name and save'", ")", "def", "i_can_modify_the_display_name_with_html", "(", "_step", ")", ":", "index", "=", "world", ".", "get_setting_entry_index", "(", "DISPLAY_NAME", ")", "world", ".", "set_field_value", "(", "index", ",", "\"<script>alert('test')</script>\"", ")", "verify_modified_display_name_with_html", "(", ")", "world", ".", "save_component", "(", ")" ]
if alert appear on save then unexpectedalertpresentexception will occur and test will fail .
train
false
6,624
def try_address(host, port=0, family=socket.AF_INET): try: sock = socket.socket(family, socket.SOCK_STREAM) sock.bind((host, port)) except (socket.error, socket.gaierror): return False else: sock.close() return True
[ "def", "try_address", "(", "host", ",", "port", "=", "0", ",", "family", "=", "socket", ".", "AF_INET", ")", ":", "try", ":", "sock", "=", "socket", ".", "socket", "(", "family", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "bind", "(", "(", "host", ",", "port", ")", ")", "except", "(", "socket", ".", "error", ",", "socket", ".", "gaierror", ")", ":", "return", "False", "else", ":", "sock", ".", "close", "(", ")", "return", "True" ]
try to bind a socket on the given host:port and return true if that has been possible .
train
false
6,626
@register.simple_tag(takes_context=True) def header_dropdown_action_hooks(context): return action_hooks(context, HeaderDropdownActionHook, u'actions', u'extensions/header_action_dropdown.html')
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "header_dropdown_action_hooks", "(", "context", ")", ":", "return", "action_hooks", "(", "context", ",", "HeaderDropdownActionHook", ",", "u'actions'", ",", "u'extensions/header_action_dropdown.html'", ")" ]
displays all multi-entry action hooks for the header bar .
train
false
6,627
def test_has_eeg_average_ref_proj(): assert_true((not _has_eeg_average_ref_proj([]))) raw = read_raw_fif(raw_fname) raw.set_eeg_reference() assert_true(_has_eeg_average_ref_proj(raw.info['projs']))
[ "def", "test_has_eeg_average_ref_proj", "(", ")", ":", "assert_true", "(", "(", "not", "_has_eeg_average_ref_proj", "(", "[", "]", ")", ")", ")", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", "raw", ".", "set_eeg_reference", "(", ")", "assert_true", "(", "_has_eeg_average_ref_proj", "(", "raw", ".", "info", "[", "'projs'", "]", ")", ")" ]
test checking whether an eeg average reference exists .
train
false
6,628
def parse_mysql_cnf(dbinfo): read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file') if read_default_file: config = configparser.RawConfigParser({'user': '', 'password': '', 'database': '', 'host': '', 'port': '', 'socket': ''}) import os config.read(os.path.expanduser(read_default_file)) try: user = config.get('client', 'user') password = config.get('client', 'password') database_name = config.get('client', 'database') database_host = config.get('client', 'host') database_port = config.get('client', 'port') socket = config.get('client', 'socket') if ((database_host == 'localhost') and socket): database_host = socket return (user, password, database_name, database_host, database_port) except configparser.NoSectionError: pass return ('', '', '', '', '')
[ "def", "parse_mysql_cnf", "(", "dbinfo", ")", ":", "read_default_file", "=", "dbinfo", ".", "get", "(", "'OPTIONS'", ",", "{", "}", ")", ".", "get", "(", "'read_default_file'", ")", "if", "read_default_file", ":", "config", "=", "configparser", ".", "RawConfigParser", "(", "{", "'user'", ":", "''", ",", "'password'", ":", "''", ",", "'database'", ":", "''", ",", "'host'", ":", "''", ",", "'port'", ":", "''", ",", "'socket'", ":", "''", "}", ")", "import", "os", "config", ".", "read", "(", "os", ".", "path", ".", "expanduser", "(", "read_default_file", ")", ")", "try", ":", "user", "=", "config", ".", "get", "(", "'client'", ",", "'user'", ")", "password", "=", "config", ".", "get", "(", "'client'", ",", "'password'", ")", "database_name", "=", "config", ".", "get", "(", "'client'", ",", "'database'", ")", "database_host", "=", "config", ".", "get", "(", "'client'", ",", "'host'", ")", "database_port", "=", "config", ".", "get", "(", "'client'", ",", "'port'", ")", "socket", "=", "config", ".", "get", "(", "'client'", ",", "'socket'", ")", "if", "(", "(", "database_host", "==", "'localhost'", ")", "and", "socket", ")", ":", "database_host", "=", "socket", "return", "(", "user", ",", "password", ",", "database_name", ",", "database_host", ",", "database_port", ")", "except", "configparser", ".", "NoSectionError", ":", "pass", "return", "(", "''", ",", "''", ",", "''", ",", "''", ",", "''", ")" ]
attempt to parse mysql database config file for connection settings .
train
true
6,629
def assert_config_change_dict(actual_result, expected_result): change_diffs = {} for (file_path, actual_changes) in actual_result.items(): expected_changes = expected_result[file_path] actual_adds = actual_changes[0] actual_removes = actual_changes[1] expected_adds = expected_changes[0] expected_removes = expected_changes[1] unexpected_adds = sorted((set(actual_adds) - set(expected_adds))) not_present_adds = sorted((set(expected_adds) - set(actual_adds))) unexpected_removes = sorted((set(actual_removes) - set(expected_removes))) not_present_removes = sorted((set(expected_removes) - set(actual_removes))) change_diffs[file_path] = (unexpected_adds, not_present_adds, unexpected_removes, not_present_removes) return change_diffs
[ "def", "assert_config_change_dict", "(", "actual_result", ",", "expected_result", ")", ":", "change_diffs", "=", "{", "}", "for", "(", "file_path", ",", "actual_changes", ")", "in", "actual_result", ".", "items", "(", ")", ":", "expected_changes", "=", "expected_result", "[", "file_path", "]", "actual_adds", "=", "actual_changes", "[", "0", "]", "actual_removes", "=", "actual_changes", "[", "1", "]", "expected_adds", "=", "expected_changes", "[", "0", "]", "expected_removes", "=", "expected_changes", "[", "1", "]", "unexpected_adds", "=", "sorted", "(", "(", "set", "(", "actual_adds", ")", "-", "set", "(", "expected_adds", ")", ")", ")", "not_present_adds", "=", "sorted", "(", "(", "set", "(", "expected_adds", ")", "-", "set", "(", "actual_adds", ")", ")", ")", "unexpected_removes", "=", "sorted", "(", "(", "set", "(", "actual_removes", ")", "-", "set", "(", "expected_removes", ")", ")", ")", "not_present_removes", "=", "sorted", "(", "(", "set", "(", "expected_removes", ")", "-", "set", "(", "actual_removes", ")", ")", ")", "change_diffs", "[", "file_path", "]", "=", "(", "unexpected_adds", ",", "not_present_adds", ",", "unexpected_removes", ",", "not_present_removes", ")", "return", "change_diffs" ]
calculates unexpected line changes .
train
false
6,630
def get_installation_key(): try: return PersistentCacheEntry.objects.get(**INSTALLATION_KEY_KWARGS).data except ObjectDoesNotExist: key = get_random_string(48) PersistentCacheEntry.objects.create(data=key, **INSTALLATION_KEY_KWARGS) return key
[ "def", "get_installation_key", "(", ")", ":", "try", ":", "return", "PersistentCacheEntry", ".", "objects", ".", "get", "(", "**", "INSTALLATION_KEY_KWARGS", ")", ".", "data", "except", "ObjectDoesNotExist", ":", "key", "=", "get_random_string", "(", "48", ")", "PersistentCacheEntry", ".", "objects", ".", "create", "(", "data", "=", "key", ",", "**", "INSTALLATION_KEY_KWARGS", ")", "return", "key" ]
get the unique installation id for this shuup instance .
train
false
6,631
def submit_isrcs(recording_isrcs): rec2isrcs = dict() for (rec, isrcs) in recording_isrcs.items(): rec2isrcs[rec] = (isrcs if isinstance(isrcs, list) else [isrcs]) query = mbxml.make_isrc_request(rec2isrcs) return _do_mb_post('recording', query)
[ "def", "submit_isrcs", "(", "recording_isrcs", ")", ":", "rec2isrcs", "=", "dict", "(", ")", "for", "(", "rec", ",", "isrcs", ")", "in", "recording_isrcs", ".", "items", "(", ")", ":", "rec2isrcs", "[", "rec", "]", "=", "(", "isrcs", "if", "isinstance", "(", "isrcs", ",", "list", ")", "else", "[", "isrcs", "]", ")", "query", "=", "mbxml", ".", "make_isrc_request", "(", "rec2isrcs", ")", "return", "_do_mb_post", "(", "'recording'", ",", "query", ")" ]
submit isrcs .
train
false
6,632
def build_sparse(): data = {'di': sparblock([bool(x) for x in range(1, 100)]), 'ci': sparblock([bool((not x)) for x in range(1, 100)]), 'hr': sparblock([int(x) for x in range(1, 100)]), 'ir': sparblock([int((2 * x)) for x in range(1, 100)])} return data
[ "def", "build_sparse", "(", ")", ":", "data", "=", "{", "'di'", ":", "sparblock", "(", "[", "bool", "(", "x", ")", "for", "x", "in", "range", "(", "1", ",", "100", ")", "]", ")", ",", "'ci'", ":", "sparblock", "(", "[", "bool", "(", "(", "not", "x", ")", ")", "for", "x", "in", "range", "(", "1", ",", "100", ")", "]", ")", ",", "'hr'", ":", "sparblock", "(", "[", "int", "(", "x", ")", "for", "x", "in", "range", "(", "1", ",", "100", ")", "]", ")", ",", "'ir'", ":", "sparblock", "(", "[", "int", "(", "(", "2", "*", "x", ")", ")", "for", "x", "in", "range", "(", "1", ",", "100", ")", "]", ")", "}", "return", "data" ]
this builds a quick mock sparse datastore with 100 values for each discrete .
train
false
6,633
def _preprocess_context_html(context): for key in context: if isinstance(context[key], BaseForm): form = context[key] for fname in form.fields: field = form.fields[fname] try: if (field.widget.attrs and ('popuplink' in field.widget.attrs)): field.help_text += ('<a href="%s" field="id_%s" id="link-%s" class="inline-link add-link popup-link">%s</a>' % (field.widget.attrs['popuplink'], fname, fname, _('New'))) except Exception: pass return context
[ "def", "_preprocess_context_html", "(", "context", ")", ":", "for", "key", "in", "context", ":", "if", "isinstance", "(", "context", "[", "key", "]", ",", "BaseForm", ")", ":", "form", "=", "context", "[", "key", "]", "for", "fname", "in", "form", ".", "fields", ":", "field", "=", "form", ".", "fields", "[", "fname", "]", "try", ":", "if", "(", "field", ".", "widget", ".", "attrs", "and", "(", "'popuplink'", "in", "field", ".", "widget", ".", "attrs", ")", ")", ":", "field", ".", "help_text", "+=", "(", "'<a href=\"%s\" field=\"id_%s\" id=\"link-%s\" class=\"inline-link add-link popup-link\">%s</a>'", "%", "(", "field", ".", "widget", ".", "attrs", "[", "'popuplink'", "]", ",", "fname", ",", "fname", ",", "_", "(", "'New'", ")", ")", ")", "except", "Exception", ":", "pass", "return", "context" ]
prepares context to be rendered for html .
train
false
6,634
@pytest.mark.skipif(lb_enabled(), reason='Load Balancer enabled') def test_if_minuteman_disabled(dcos_api_session): data = check_output(['/usr/bin/env', 'ip', 'rule']) assert (str(data).find('9999') == (-1))
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "lb_enabled", "(", ")", ",", "reason", "=", "'Load Balancer enabled'", ")", "def", "test_if_minuteman_disabled", "(", "dcos_api_session", ")", ":", "data", "=", "check_output", "(", "[", "'/usr/bin/env'", ",", "'ip'", ",", "'rule'", "]", ")", "assert", "(", "str", "(", "data", ")", ".", "find", "(", "'9999'", ")", "==", "(", "-", "1", ")", ")" ]
test to make sure minuteman is disabled .
train
false
6,635
@pytest.mark.django_db def test_contributors_include_anon(member, anon_submission_unit): anon = User.objects.get(username='nobody') contribs = Contributors(include_anon=True) someuser = UserFactory() assert (contribs.include_anon is True) assert (list(contribs.user_qs) == list(User.objects.exclude(username__in=['system', 'default']))) assert (someuser in contribs.user_qs) assert (anon in contribs.user_qs) assert (sorted(contribs) == sorted(contribs.contributors) == sorted(set(contribs.user_qs.filter(contribs.user_filters).values_list('username', flat=True))) == sorted(set(contribs.user_qs.filter(submission__gt=0).values_list('username', flat=True)))) for username in contribs: assert (contribs[username] == contribs.contributors[username]) assert (username in contribs) assert (anon.username in contribs) assert (contribs.contributors == _contributors_list(contribs))
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_contributors_include_anon", "(", "member", ",", "anon_submission_unit", ")", ":", "anon", "=", "User", ".", "objects", ".", "get", "(", "username", "=", "'nobody'", ")", "contribs", "=", "Contributors", "(", "include_anon", "=", "True", ")", "someuser", "=", "UserFactory", "(", ")", "assert", "(", "contribs", ".", "include_anon", "is", "True", ")", "assert", "(", "list", "(", "contribs", ".", "user_qs", ")", "==", "list", "(", "User", ".", "objects", ".", "exclude", "(", "username__in", "=", "[", "'system'", ",", "'default'", "]", ")", ")", ")", "assert", "(", "someuser", "in", "contribs", ".", "user_qs", ")", "assert", "(", "anon", "in", "contribs", ".", "user_qs", ")", "assert", "(", "sorted", "(", "contribs", ")", "==", "sorted", "(", "contribs", ".", "contributors", ")", "==", "sorted", "(", "set", "(", "contribs", ".", "user_qs", ".", "filter", "(", "contribs", ".", "user_filters", ")", ".", "values_list", "(", "'username'", ",", "flat", "=", "True", ")", ")", ")", "==", "sorted", "(", "set", "(", "contribs", ".", "user_qs", ".", "filter", "(", "submission__gt", "=", "0", ")", ".", "values_list", "(", "'username'", ",", "flat", "=", "True", ")", ")", ")", ")", "for", "username", "in", "contribs", ":", "assert", "(", "contribs", "[", "username", "]", "==", "contribs", ".", "contributors", "[", "username", "]", ")", "assert", "(", "username", "in", "contribs", ")", "assert", "(", "anon", ".", "username", "in", "contribs", ")", "assert", "(", "contribs", ".", "contributors", "==", "_contributors_list", "(", "contribs", ")", ")" ]
contributors across the site .
train
false
6,636
def hash_password(password): return hash_password_PBKDF2(password)
[ "def", "hash_password", "(", "password", ")", ":", "return", "hash_password_PBKDF2", "(", "password", ")" ]
create a password hash from a given string for protecting a worksheet only .
train
false
6,637
def show_firewall_rule(firewall_rule, profile=None): conn = _auth(profile) return conn.show_firewall_rule(firewall_rule)
[ "def", "show_firewall_rule", "(", "firewall_rule", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "show_firewall_rule", "(", "firewall_rule", ")" ]
fetches information of a specific firewall rule cli example: .
train
false
6,638
def test_find_number_1(): s = 'jashlhl123sfs' r = find_number(s) assert (s[r[0]:r[1]] == '123')
[ "def", "test_find_number_1", "(", ")", ":", "s", "=", "'jashlhl123sfs'", "r", "=", "find_number", "(", "s", ")", "assert", "(", "s", "[", "r", "[", "0", "]", ":", "r", "[", "1", "]", "]", "==", "'123'", ")" ]
tests that we find an int among letters .
train
false
6,639
def CheckAccess(filename, clean_lines, linenum, nesting_state, error): line = clean_lines.elided[linenum] matched = Match('\\s*(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)', line) if (not matched): return if (nesting_state.stack and isinstance(nesting_state.stack[(-1)], _ClassInfo)): if (nesting_state.stack[(-1)].access != 'private'): error(filename, linenum, 'readability/constructors', 3, ('%s must be in the private: section' % matched.group(1))) else: pass
[ "def", "CheckAccess", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "nesting_state", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "matched", "=", "Match", "(", "'\\\\s*(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)'", ",", "line", ")", "if", "(", "not", "matched", ")", ":", "return", "if", "(", "nesting_state", ".", "stack", "and", "isinstance", "(", "nesting_state", ".", "stack", "[", "(", "-", "1", ")", "]", ",", "_ClassInfo", ")", ")", ":", "if", "(", "nesting_state", ".", "stack", "[", "(", "-", "1", ")", "]", ".", "access", "!=", "'private'", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/constructors'", ",", "3", ",", "(", "'%s must be in the private: section'", "%", "matched", ".", "group", "(", "1", ")", ")", ")", "else", ":", "pass" ]
checks for improper use of disallow* macros .
train
true
6,640
def master_compile(master_opts, minion_opts, grains, id_, saltenv): st_ = MasterHighState(master_opts, minion_opts, grains, id_, saltenv) return st_.compile_highstate()
[ "def", "master_compile", "(", "master_opts", ",", "minion_opts", ",", "grains", ",", "id_", ",", "saltenv", ")", ":", "st_", "=", "MasterHighState", "(", "master_opts", ",", "minion_opts", ",", "grains", ",", "id_", ",", "saltenv", ")", "return", "st_", ".", "compile_highstate", "(", ")" ]
compile the master side low state data .
train
true
6,641
def _stdin_ready_posix(): (infds, outfds, erfds) = select.select([sys.stdin], [], [], 0) return bool(infds)
[ "def", "_stdin_ready_posix", "(", ")", ":", "(", "infds", ",", "outfds", ",", "erfds", ")", "=", "select", ".", "select", "(", "[", "sys", ".", "stdin", "]", ",", "[", "]", ",", "[", "]", ",", "0", ")", "return", "bool", "(", "infds", ")" ]
return true if theres something to read on stdin .
train
true
6,642
def test_io_ascii_write(): from ...io.ascii.connect import _get_connectors_table t = QTable(MIXIN_COLS) for fmt in _get_connectors_table(): if ((fmt['Format'] == 'ascii.ecsv') and (not HAS_YAML)): continue if (fmt['Write'] and ('.fast_' not in fmt['Format'])): out = StringIO() t.write(out, format=fmt['Format'])
[ "def", "test_io_ascii_write", "(", ")", ":", "from", "...", "io", ".", "ascii", ".", "connect", "import", "_get_connectors_table", "t", "=", "QTable", "(", "MIXIN_COLS", ")", "for", "fmt", "in", "_get_connectors_table", "(", ")", ":", "if", "(", "(", "fmt", "[", "'Format'", "]", "==", "'ascii.ecsv'", ")", "and", "(", "not", "HAS_YAML", ")", ")", ":", "continue", "if", "(", "fmt", "[", "'Write'", "]", "and", "(", "'.fast_'", "not", "in", "fmt", "[", "'Format'", "]", ")", ")", ":", "out", "=", "StringIO", "(", ")", "t", ".", "write", "(", "out", ",", "format", "=", "fmt", "[", "'Format'", "]", ")" ]
test that table with mixin column can be written by io .
train
false
6,643
def _find_sources(sources, target, score_func): if isinstance(score_func, string_types): score_func = get_score_funcs().get(score_func, score_func) if (not callable(score_func)): raise ValueError(('%s is not a valid score_func.' % score_func)) scores = (score_func(sources, target) if (target is not None) else score_func(sources, 1)) return scores
[ "def", "_find_sources", "(", "sources", ",", "target", ",", "score_func", ")", ":", "if", "isinstance", "(", "score_func", ",", "string_types", ")", ":", "score_func", "=", "get_score_funcs", "(", ")", ".", "get", "(", "score_func", ",", "score_func", ")", "if", "(", "not", "callable", "(", "score_func", ")", ")", ":", "raise", "ValueError", "(", "(", "'%s is not a valid score_func.'", "%", "score_func", ")", ")", "scores", "=", "(", "score_func", "(", "sources", ",", "target", ")", "if", "(", "target", "is", "not", "None", ")", "else", "score_func", "(", "sources", ",", "1", ")", ")", "return", "scores" ]
aux function .
train
false
6,644
def getDiagonalSwitchedTetragridByRadians(angleRadians, diagonals): return getDiagonalSwitchedTetragridByPolar(diagonals, euclidean.getWiddershinsUnitPolar(angleRadians))
[ "def", "getDiagonalSwitchedTetragridByRadians", "(", "angleRadians", ",", "diagonals", ")", ":", "return", "getDiagonalSwitchedTetragridByPolar", "(", "diagonals", ",", "euclidean", ".", "getWiddershinsUnitPolar", "(", "angleRadians", ")", ")" ]
get the diagonals and switched matrix by radians .
train
false
6,645
def oauth2_dance(consumer_key, consumer_secret, token_filename=None): twitter = Twitter(auth=OAuth2(consumer_key=consumer_key, consumer_secret=consumer_secret), format='', api_version='') token = json.loads(twitter.oauth2.token(grant_type='client_credentials'))['access_token'] if token_filename: write_bearer_token_file(token_filename, token) return token
[ "def", "oauth2_dance", "(", "consumer_key", ",", "consumer_secret", ",", "token_filename", "=", "None", ")", ":", "twitter", "=", "Twitter", "(", "auth", "=", "OAuth2", "(", "consumer_key", "=", "consumer_key", ",", "consumer_secret", "=", "consumer_secret", ")", ",", "format", "=", "''", ",", "api_version", "=", "''", ")", "token", "=", "json", ".", "loads", "(", "twitter", ".", "oauth2", ".", "token", "(", "grant_type", "=", "'client_credentials'", ")", ")", "[", "'access_token'", "]", "if", "token_filename", ":", "write_bearer_token_file", "(", "token_filename", ",", "token", ")", "return", "token" ]
perform the oauth2 dance to transform a consumer key and secret into a bearer token .
train
false
6,646
@register.filter def username_or(user, attr): if (not settings.ACCOUNTS_NO_USERNAME): attr = u'username' value = getattr(user, attr) if callable(value): value = value() return value
[ "@", "register", ".", "filter", "def", "username_or", "(", "user", ",", "attr", ")", ":", "if", "(", "not", "settings", ".", "ACCOUNTS_NO_USERNAME", ")", ":", "attr", "=", "u'username'", "value", "=", "getattr", "(", "user", ",", "attr", ")", "if", "callable", "(", "value", ")", ":", "value", "=", "value", "(", ")", "return", "value" ]
returns the users username for display .
train
false
6,647
def onInit(isReload): DEBUG_MSG(('onInit::isReload:%s' % isReload))
[ "def", "onInit", "(", "isReload", ")", ":", "DEBUG_MSG", "(", "(", "'onInit::isReload:%s'", "%", "isReload", ")", ")" ]
kbengine method .
train
false
6,648
def do_pickle(data): return to_str(dumps(data, protocol=PICKLE_PROTOCOL))
[ "def", "do_pickle", "(", "data", ")", ":", "return", "to_str", "(", "dumps", "(", "data", ",", "protocol", "=", "PICKLE_PROTOCOL", ")", ")" ]
perform pickle to string .
train
false
6,649
def add_features_to_http_headers(features, headers): if features: for (k, v) in features.items(): if (k.lower() in FEATURE_BLACKLIST): raise exception.UnsupportedHeaderFeature(feature=k) if (v is not None): headers[k.lower()] = unicode(v)
[ "def", "add_features_to_http_headers", "(", "features", ",", "headers", ")", ":", "if", "features", ":", "for", "(", "k", ",", "v", ")", "in", "features", ".", "items", "(", ")", ":", "if", "(", "k", ".", "lower", "(", ")", "in", "FEATURE_BLACKLIST", ")", ":", "raise", "exception", ".", "UnsupportedHeaderFeature", "(", "feature", "=", "k", ")", "if", "(", "v", "is", "not", "None", ")", ":", "headers", "[", "k", ".", "lower", "(", ")", "]", "=", "unicode", "(", "v", ")" ]
adds additional headers representing glance features to be enabled .
train
false
6,651
def get_sequencer_id(migrate_engine, sequencer_info): cmd = 'SELECT sequencer.id, form_values.content FROM sequencer, form_values WHERE sequencer.form_values_id=form_values.id' result = migrate_engine.execute(cmd) for row in result: sequencer_id = row[0] values = str(row[1]) if (not values.strip()): continue values = loads(values) if (values and isinstance(values, dict)): if ((sequencer_info.get('host', '') == values.get('field_0', '')) and (sequencer_info.get('username', '') == values.get('field_1', '')) and (sequencer_info.get('password', '') == values.get('field_2', '')) and (sequencer_info.get('data_dir', '') == values.get('field_3', '')) and (sequencer_info.get('rename_dataset', '') == values.get('field_4', ''))): return sequencer_id return None
[ "def", "get_sequencer_id", "(", "migrate_engine", ",", "sequencer_info", ")", ":", "cmd", "=", "'SELECT sequencer.id, form_values.content FROM sequencer, form_values WHERE sequencer.form_values_id=form_values.id'", "result", "=", "migrate_engine", ".", "execute", "(", "cmd", ")", "for", "row", "in", "result", ":", "sequencer_id", "=", "row", "[", "0", "]", "values", "=", "str", "(", "row", "[", "1", "]", ")", "if", "(", "not", "values", ".", "strip", "(", ")", ")", ":", "continue", "values", "=", "loads", "(", "values", ")", "if", "(", "values", "and", "isinstance", "(", "values", ",", "dict", ")", ")", ":", "if", "(", "(", "sequencer_info", ".", "get", "(", "'host'", ",", "''", ")", "==", "values", ".", "get", "(", "'field_0'", ",", "''", ")", ")", "and", "(", "sequencer_info", ".", "get", "(", "'username'", ",", "''", ")", "==", "values", ".", "get", "(", "'field_1'", ",", "''", ")", ")", "and", "(", "sequencer_info", ".", "get", "(", "'password'", ",", "''", ")", "==", "values", ".", "get", "(", "'field_2'", ",", "''", ")", ")", "and", "(", "sequencer_info", ".", "get", "(", "'data_dir'", ",", "''", ")", "==", "values", ".", "get", "(", "'field_3'", ",", "''", ")", ")", "and", "(", "sequencer_info", ".", "get", "(", "'rename_dataset'", ",", "''", ")", "==", "values", ".", "get", "(", "'field_4'", ",", "''", ")", ")", ")", ":", "return", "sequencer_id", "return", "None" ]
get the sequencer id corresponding to the sequencer information .
train
false
6,652
def axis_slice(a, start=None, stop=None, step=None, axis=(-1)): a_slice = ([slice(None)] * a.ndim) a_slice[axis] = slice(start, stop, step) b = a[a_slice] return b
[ "def", "axis_slice", "(", "a", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "step", "=", "None", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "a_slice", "=", "(", "[", "slice", "(", "None", ")", "]", "*", "a", ".", "ndim", ")", "a_slice", "[", "axis", "]", "=", "slice", "(", "start", ",", "stop", ",", "step", ")", "b", "=", "a", "[", "a_slice", "]", "return", "b" ]
take a slice along axis axis from a .
train
false
6,653
@onlyif_cmds_exist('latex', 'dvipng') def test_latex_to_png_dvipng_runs(): def mock_kpsewhich(filename): nt.assert_equal(filename, 'breqn.sty') return None for (s, wrap) in [(u'$$x^2$$', False), (u'x^2', True)]: (yield (latextools.latex_to_png_dvipng, s, wrap)) with patch.object(latextools, 'kpsewhich', mock_kpsewhich): (yield (latextools.latex_to_png_dvipng, s, wrap))
[ "@", "onlyif_cmds_exist", "(", "'latex'", ",", "'dvipng'", ")", "def", "test_latex_to_png_dvipng_runs", "(", ")", ":", "def", "mock_kpsewhich", "(", "filename", ")", ":", "nt", ".", "assert_equal", "(", "filename", ",", "'breqn.sty'", ")", "return", "None", "for", "(", "s", ",", "wrap", ")", "in", "[", "(", "u'$$x^2$$'", ",", "False", ")", ",", "(", "u'x^2'", ",", "True", ")", "]", ":", "(", "yield", "(", "latextools", ".", "latex_to_png_dvipng", ",", "s", ",", "wrap", ")", ")", "with", "patch", ".", "object", "(", "latextools", ",", "'kpsewhich'", ",", "mock_kpsewhich", ")", ":", "(", "yield", "(", "latextools", ".", "latex_to_png_dvipng", ",", "s", ",", "wrap", ")", ")" ]
test that latex_to_png_dvipng just runs without error .
train
false
6,656
def loopUntil(predicate, interval=0): from twisted.internet import task d = defer.Deferred() def check(): res = predicate() if res: d.callback(res) call = task.LoopingCall(check) def stop(result): call.stop() return result d.addCallback(stop) d2 = call.start(interval) d2.addErrback(d.errback) return d
[ "def", "loopUntil", "(", "predicate", ",", "interval", "=", "0", ")", ":", "from", "twisted", ".", "internet", "import", "task", "d", "=", "defer", ".", "Deferred", "(", ")", "def", "check", "(", ")", ":", "res", "=", "predicate", "(", ")", "if", "res", ":", "d", ".", "callback", "(", "res", ")", "call", "=", "task", ".", "LoopingCall", "(", "check", ")", "def", "stop", "(", "result", ")", ":", "call", ".", "stop", "(", ")", "return", "result", "d", ".", "addCallback", "(", "stop", ")", "d2", "=", "call", ".", "start", "(", "interval", ")", "d2", ".", "addErrback", "(", "d", ".", "errback", ")", "return", "d" ]
poor excuse for an event notification helper .
train
false
6,657
def find_job_type(guest, instance): try: if guest.is_active(): LOG.debug('VM running on src, migration failed', instance=instance) return libvirt.VIR_DOMAIN_JOB_FAILED else: LOG.debug('VM is shutoff, migration finished', instance=instance) return libvirt.VIR_DOMAIN_JOB_COMPLETED except libvirt.libvirtError as ex: LOG.debug('Error checking domain status %(ex)s', {'ex': ex}, instance=instance) if (ex.get_error_code() == libvirt.VIR_ERR_NO_DOMAIN): LOG.debug('VM is missing, migration finished', instance=instance) return libvirt.VIR_DOMAIN_JOB_COMPLETED else: LOG.info(_LI('Error %(ex)s, migration failed'), {'ex': ex}, instance=instance) return libvirt.VIR_DOMAIN_JOB_FAILED
[ "def", "find_job_type", "(", "guest", ",", "instance", ")", ":", "try", ":", "if", "guest", ".", "is_active", "(", ")", ":", "LOG", ".", "debug", "(", "'VM running on src, migration failed'", ",", "instance", "=", "instance", ")", "return", "libvirt", ".", "VIR_DOMAIN_JOB_FAILED", "else", ":", "LOG", ".", "debug", "(", "'VM is shutoff, migration finished'", ",", "instance", "=", "instance", ")", "return", "libvirt", ".", "VIR_DOMAIN_JOB_COMPLETED", "except", "libvirt", ".", "libvirtError", "as", "ex", ":", "LOG", ".", "debug", "(", "'Error checking domain status %(ex)s'", ",", "{", "'ex'", ":", "ex", "}", ",", "instance", "=", "instance", ")", "if", "(", "ex", ".", "get_error_code", "(", ")", "==", "libvirt", ".", "VIR_ERR_NO_DOMAIN", ")", ":", "LOG", ".", "debug", "(", "'VM is missing, migration finished'", ",", "instance", "=", "instance", ")", "return", "libvirt", ".", "VIR_DOMAIN_JOB_COMPLETED", "else", ":", "LOG", ".", "info", "(", "_LI", "(", "'Error %(ex)s, migration failed'", ")", ",", "{", "'ex'", ":", "ex", "}", ",", "instance", "=", "instance", ")", "return", "libvirt", ".", "VIR_DOMAIN_JOB_FAILED" ]
determine the current migration job type .
train
false
6,659
def assert_bool(dist, attr, value): if (bool(value) != value): raise DistutilsSetupError(('%r must be a boolean value (got %r)' % (attr, value)))
[ "def", "assert_bool", "(", "dist", ",", "attr", ",", "value", ")", ":", "if", "(", "bool", "(", "value", ")", "!=", "value", ")", ":", "raise", "DistutilsSetupError", "(", "(", "'%r must be a boolean value (got %r)'", "%", "(", "attr", ",", "value", ")", ")", ")" ]
verify that value is true .
train
true
6,660
@register.tag('include') def do_include(parser, token): bits = token.split_contents() if (len(bits) < 2): raise TemplateSyntaxError(('%r tag takes at least one argument: the name of the template to be included.' % bits[0])) options = {} remaining_bits = bits[2:] while remaining_bits: option = remaining_bits.pop(0) if (option in options): raise TemplateSyntaxError(('The %r option was specified more than once.' % option)) if (option == 'with'): value = token_kwargs(remaining_bits, parser, support_legacy=False) if (not value): raise TemplateSyntaxError(('"with" in %r tag needs at least one keyword argument.' % bits[0])) elif (option == 'only'): value = True else: raise TemplateSyntaxError(('Unknown argument for %r tag: %r.' % (bits[0], option))) options[option] = value isolated_context = options.get('only', False) namemap = options.get('with', {}) path = bits[1] if ((path[0] in ('"', "'")) and (path[(-1)] == path[0])): return ConstantIncludeNode(path[1:(-1)], extra_context=namemap, isolated_context=isolated_context) return IncludeNode(parser.compile_filter(bits[1]), extra_context=namemap, isolated_context=isolated_context)
[ "@", "register", ".", "tag", "(", "'include'", ")", "def", "do_include", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "<", "2", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "'%r tag takes at least one argument: the name of the template to be included.'", "%", "bits", "[", "0", "]", ")", ")", "options", "=", "{", "}", "remaining_bits", "=", "bits", "[", "2", ":", "]", "while", "remaining_bits", ":", "option", "=", "remaining_bits", ".", "pop", "(", "0", ")", "if", "(", "option", "in", "options", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "'The %r option was specified more than once.'", "%", "option", ")", ")", "if", "(", "option", "==", "'with'", ")", ":", "value", "=", "token_kwargs", "(", "remaining_bits", ",", "parser", ",", "support_legacy", "=", "False", ")", "if", "(", "not", "value", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "'\"with\" in %r tag needs at least one keyword argument.'", "%", "bits", "[", "0", "]", ")", ")", "elif", "(", "option", "==", "'only'", ")", ":", "value", "=", "True", "else", ":", "raise", "TemplateSyntaxError", "(", "(", "'Unknown argument for %r tag: %r.'", "%", "(", "bits", "[", "0", "]", ",", "option", ")", ")", ")", "options", "[", "option", "]", "=", "value", "isolated_context", "=", "options", ".", "get", "(", "'only'", ",", "False", ")", "namemap", "=", "options", ".", "get", "(", "'with'", ",", "{", "}", ")", "path", "=", "bits", "[", "1", "]", "if", "(", "(", "path", "[", "0", "]", "in", "(", "'\"'", ",", "\"'\"", ")", ")", "and", "(", "path", "[", "(", "-", "1", ")", "]", "==", "path", "[", "0", "]", ")", ")", ":", "return", "ConstantIncludeNode", "(", "path", "[", "1", ":", "(", "-", "1", ")", "]", ",", "extra_context", "=", "namemap", ",", "isolated_context", "=", "isolated_context", ")", "return", "IncludeNode", "(", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", ",", "extra_context", "=", "namemap", ",", "isolated_context", "=", "isolated_context", ")" ]
loads a template and renders it with the current context .
train
false
6,661
def make_led_brightness_message(brightness): raise ((0 <= brightness <= 127) or AssertionError) return make_message(6, (brightness,))
[ "def", "make_led_brightness_message", "(", "brightness", ")", ":", "raise", "(", "(", "0", "<=", "brightness", "<=", "127", ")", "or", "AssertionError", ")", "return", "make_message", "(", "6", ",", "(", "brightness", ",", ")", ")" ]
sets a new brightness and reapplies the color palette .
train
false
6,663
def _check_frozen(method, indent=u''): def wrapped(self, *args, **kwargs): if self._frozen: raise ValueError(_FROZEN_ERROR) else: return method(self, *args, **kwargs) wrapped.__name__ = method.__name__ wrapped.__doc__ = ((method.__doc__ or u'') + (_FROZEN_NOTICE % indent)) return wrapped
[ "def", "_check_frozen", "(", "method", ",", "indent", "=", "u''", ")", ":", "def", "wrapped", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "self", ".", "_frozen", ":", "raise", "ValueError", "(", "_FROZEN_ERROR", ")", "else", ":", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "wrapped", ".", "__name__", "=", "method", ".", "__name__", "wrapped", ".", "__doc__", "=", "(", "(", "method", ".", "__doc__", "or", "u''", ")", "+", "(", "_FROZEN_NOTICE", "%", "indent", ")", ")", "return", "wrapped" ]
given a method function .
train
false
6,665
def factory(type): return ArrowFactory(type)
[ "def", "factory", "(", "type", ")", ":", "return", "ArrowFactory", "(", "type", ")" ]
factory method .
train
false
6,667
def getTagBracketedLine(tagName, value): return ('(<%s> %s </%s>)' % (tagName, value, tagName))
[ "def", "getTagBracketedLine", "(", "tagName", ",", "value", ")", ":", "return", "(", "'(<%s> %s </%s>)'", "%", "(", "tagName", ",", "value", ",", "tagName", ")", ")" ]
get line with a begin tag .
train
false
6,668
def _quota_reservations_query(session, context, reservations): return model_query(context, models.Reservation, read_deleted='no', session=session).filter(models.Reservation.uuid.in_(reservations)).with_lockmode('update')
[ "def", "_quota_reservations_query", "(", "session", ",", "context", ",", "reservations", ")", ":", "return", "model_query", "(", "context", ",", "models", ".", "Reservation", ",", "read_deleted", "=", "'no'", ",", "session", "=", "session", ")", ".", "filter", "(", "models", ".", "Reservation", ".", "uuid", ".", "in_", "(", "reservations", ")", ")", ".", "with_lockmode", "(", "'update'", ")" ]
return the relevant reservations .
train
false
6,669
def thumbnail(infile, thumbfile, scale=0.1, interpolation='bilinear', preview=False): (basedir, basename) = os.path.split(infile) (baseout, extout) = os.path.splitext(thumbfile) im = imread(infile) (rows, cols, depth) = im.shape dpi = 100 height = ((float(rows) / dpi) * scale) width = ((float(cols) / dpi) * scale) extension = extout.lower() if preview: import matplotlib.pyplot as plt fig = plt.figure(figsize=(width, height), dpi=dpi) else: if (extension == '.png'): from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas elif (extension == '.pdf'): from matplotlib.backends.backend_pdf import FigureCanvasPDF as FigureCanvas elif (extension == '.svg'): from matplotlib.backends.backend_svg import FigureCanvasSVG as FigureCanvas else: raise ValueError("Can only handle extensions 'png', 'svg' or 'pdf'") from matplotlib.figure import Figure fig = Figure(figsize=(width, height), dpi=dpi) canvas = FigureCanvas(fig) ax = fig.add_axes([0, 0, 1, 1], aspect='auto', frameon=False, xticks=[], yticks=[]) (basename, ext) = os.path.splitext(basename) ax.imshow(im, aspect='auto', resample=True, interpolation='bilinear') fig.savefig(thumbfile, dpi=dpi) return fig
[ "def", "thumbnail", "(", "infile", ",", "thumbfile", ",", "scale", "=", "0.1", ",", "interpolation", "=", "'bilinear'", ",", "preview", "=", "False", ")", ":", "(", "basedir", ",", "basename", ")", "=", "os", ".", "path", ".", "split", "(", "infile", ")", "(", "baseout", ",", "extout", ")", "=", "os", ".", "path", ".", "splitext", "(", "thumbfile", ")", "im", "=", "imread", "(", "infile", ")", "(", "rows", ",", "cols", ",", "depth", ")", "=", "im", ".", "shape", "dpi", "=", "100", "height", "=", "(", "(", "float", "(", "rows", ")", "/", "dpi", ")", "*", "scale", ")", "width", "=", "(", "(", "float", "(", "cols", ")", "/", "dpi", ")", "*", "scale", ")", "extension", "=", "extout", ".", "lower", "(", ")", "if", "preview", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "width", ",", "height", ")", ",", "dpi", "=", "dpi", ")", "else", ":", "if", "(", "extension", "==", "'.png'", ")", ":", "from", "matplotlib", ".", "backends", ".", "backend_agg", "import", "FigureCanvasAgg", "as", "FigureCanvas", "elif", "(", "extension", "==", "'.pdf'", ")", ":", "from", "matplotlib", ".", "backends", ".", "backend_pdf", "import", "FigureCanvasPDF", "as", "FigureCanvas", "elif", "(", "extension", "==", "'.svg'", ")", ":", "from", "matplotlib", ".", "backends", ".", "backend_svg", "import", "FigureCanvasSVG", "as", "FigureCanvas", "else", ":", "raise", "ValueError", "(", "\"Can only handle extensions 'png', 'svg' or 'pdf'\"", ")", "from", "matplotlib", ".", "figure", "import", "Figure", "fig", "=", "Figure", "(", "figsize", "=", "(", "width", ",", "height", ")", ",", "dpi", "=", "dpi", ")", "canvas", "=", "FigureCanvas", "(", "fig", ")", "ax", "=", "fig", ".", "add_axes", "(", "[", "0", ",", "0", ",", "1", ",", "1", "]", ",", "aspect", "=", "'auto'", ",", "frameon", "=", "False", ",", "xticks", "=", "[", "]", ",", "yticks", "=", "[", "]", ")", "(", "basename", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "basename", ")", "ax", ".", "imshow", "(", "im", ",", "aspect", "=", "'auto'", ",", "resample", "=", "True", ",", "interpolation", "=", "'bilinear'", ")", "fig", ".", "savefig", "(", "thumbfile", ",", "dpi", "=", "dpi", ")", "return", "fig" ]
proxy to solrs get_thumbnail .
train
false
6,673
def get_group_counts(group_by, header_groups=None, fixed_headers=None, extra_select_fields=None, **filter_data): query = models.TestView.objects.get_query_set_with_joins(filter_data) query = models.TestView.query_objects(filter_data, initial_query=query, apply_presentation=False) (count_alias, count_sql) = models.TestView.objects.get_count_sql(query) query = query.extra(select={count_alias: count_sql}) if extra_select_fields: query = query.extra(select=extra_select_fields) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, (header_groups or []), (fixed_headers or {})) group_processor.process_group_dicts() return rpc_utils.prepare_for_serialization(group_processor.get_info_dict())
[ "def", "get_group_counts", "(", "group_by", ",", "header_groups", "=", "None", ",", "fixed_headers", "=", "None", ",", "extra_select_fields", "=", "None", ",", "**", "filter_data", ")", ":", "query", "=", "models", ".", "TestView", ".", "objects", ".", "get_query_set_with_joins", "(", "filter_data", ")", "query", "=", "models", ".", "TestView", ".", "query_objects", "(", "filter_data", ",", "initial_query", "=", "query", ",", "apply_presentation", "=", "False", ")", "(", "count_alias", ",", "count_sql", ")", "=", "models", ".", "TestView", ".", "objects", ".", "get_count_sql", "(", "query", ")", "query", "=", "query", ".", "extra", "(", "select", "=", "{", "count_alias", ":", "count_sql", "}", ")", "if", "extra_select_fields", ":", "query", "=", "query", ".", "extra", "(", "select", "=", "extra_select_fields", ")", "query", "=", "models", ".", "TestView", ".", "apply_presentation", "(", "query", ",", "filter_data", ")", "group_processor", "=", "tko_rpc_utils", ".", "GroupDataProcessor", "(", "query", ",", "group_by", ",", "(", "header_groups", "or", "[", "]", ")", ",", "(", "fixed_headers", "or", "{", "}", ")", ")", "group_processor", ".", "process_group_dicts", "(", ")", "return", "rpc_utils", ".", "prepare_for_serialization", "(", "group_processor", ".", "get_info_dict", "(", ")", ")" ]
queries against testview grouping by the specified fields and computings counts for each group .
train
false
6,674
def save_lang_conf(value): with open(LANG_FILE, 'w') as f: f.write(value)
[ "def", "save_lang_conf", "(", "value", ")", ":", "with", "open", "(", "LANG_FILE", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "value", ")" ]
save language setting to language config file .
train
false
6,675
def serialize_struct_type(struct_type, accessor, types): fields = [] if (struct_type == dereference_type(struct_type)): memeber_operator = '.' else: memeber_operator = '->' structure = type_description(struct_type, types)['struct'] for (field_name, field_type) in structure.iteritems(): fields.append(serialize_type(field_type, ((('((%s)(%s))' % (struct_type, accessor)) + memeber_operator) + field_name), types)) return ', '.join(fields)
[ "def", "serialize_struct_type", "(", "struct_type", ",", "accessor", ",", "types", ")", ":", "fields", "=", "[", "]", "if", "(", "struct_type", "==", "dereference_type", "(", "struct_type", ")", ")", ":", "memeber_operator", "=", "'.'", "else", ":", "memeber_operator", "=", "'->'", "structure", "=", "type_description", "(", "struct_type", ",", "types", ")", "[", "'struct'", "]", "for", "(", "field_name", ",", "field_type", ")", "in", "structure", ".", "iteritems", "(", ")", ":", "fields", ".", "append", "(", "serialize_type", "(", "field_type", ",", "(", "(", "(", "'((%s)(%s))'", "%", "(", "struct_type", ",", "accessor", ")", ")", "+", "memeber_operator", ")", "+", "field_name", ")", ",", "types", ")", ")", "return", "', '", ".", "join", "(", "fields", ")" ]
returns a serialization statement for the given structure type .
train
false
6,676
def template(): def prep(r): if r.component: if (r.component_name == 'translate'): table = s3db.survey_translate if (r.component_id == None): table.file.readable = False table.file.writable = False else: table.language.writable = False table.code.writable = False s3db.configure('survey_translate', deletable=False) else: table = r.table s3_action_buttons(r) rows = db((table.status == 1)).select(table.id) try: s3.actions[1]['restrict'].extend((str(row.id) for row in rows)) except KeyError: s3.actions[1]['restrict'] = [str(row.id) for row in rows] except IndexError: pass s3.dataTableStyleAlert = [str(row.id) for row in rows] rows = db((table.status == 3)).select(table.id) s3.dataTableStyleDisabled = [str(row.id) for row in rows] s3.dataTableStyleWarning = [str(row.id) for row in rows] rows = db((table.status == 4)).select(table.id) s3.dataTableStyleWarning.extend((str(row.id) for row in rows)) s3db.configure('survey_template', orderby='survey_template.status', create_next=URL(c='survey', f='template'), update_next=URL(c='survey', f='template')) return True s3.prep = prep def postp(r, output): if r.component: template_id = r.id if (r.component_name == 'translate'): s3_action_buttons(r) s3.actions.extend([dict(label=str(T('Download')), _class='action-btn', url=r.url(method='translate_download', component='translate', component_id='[id]', representation='xls')), dict(label=str(T('Upload')), _class='action-btn', url=URL(c=module, f='template', args=[template_id, 'translate', '[id]']))]) return output s3.postp = postp if request.ajax: post = request.post_vars action = post.get('action') template_id = post.get('parent_id') section_id = post.get('section_id') section_text = post.get('section_text') if ((action == 'section') and (template_id != None)): id = db.survey_section.insert(name=section_text, template_id=template_id, cloned_section_id=section_id) if (id is None): print 'Failed to insert record' return s3db.configure('survey_template', listadd=False) output = s3_rest_controller(rheader=s3db.survey_template_rheader) return output
[ "def", "template", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "r", ".", "component", ":", "if", "(", "r", ".", "component_name", "==", "'translate'", ")", ":", "table", "=", "s3db", ".", "survey_translate", "if", "(", "r", ".", "component_id", "==", "None", ")", ":", "table", ".", "file", ".", "readable", "=", "False", "table", ".", "file", ".", "writable", "=", "False", "else", ":", "table", ".", "language", ".", "writable", "=", "False", "table", ".", "code", ".", "writable", "=", "False", "s3db", ".", "configure", "(", "'survey_translate'", ",", "deletable", "=", "False", ")", "else", ":", "table", "=", "r", ".", "table", "s3_action_buttons", "(", "r", ")", "rows", "=", "db", "(", "(", "table", ".", "status", "==", "1", ")", ")", ".", "select", "(", "table", ".", "id", ")", "try", ":", "s3", ".", "actions", "[", "1", "]", "[", "'restrict'", "]", ".", "extend", "(", "(", "str", "(", "row", ".", "id", ")", "for", "row", "in", "rows", ")", ")", "except", "KeyError", ":", "s3", ".", "actions", "[", "1", "]", "[", "'restrict'", "]", "=", "[", "str", "(", "row", ".", "id", ")", "for", "row", "in", "rows", "]", "except", "IndexError", ":", "pass", "s3", ".", "dataTableStyleAlert", "=", "[", "str", "(", "row", ".", "id", ")", "for", "row", "in", "rows", "]", "rows", "=", "db", "(", "(", "table", ".", "status", "==", "3", ")", ")", ".", "select", "(", "table", ".", "id", ")", "s3", ".", "dataTableStyleDisabled", "=", "[", "str", "(", "row", ".", "id", ")", "for", "row", "in", "rows", "]", "s3", ".", "dataTableStyleWarning", "=", "[", "str", "(", "row", ".", "id", ")", "for", "row", "in", "rows", "]", "rows", "=", "db", "(", "(", "table", ".", "status", "==", "4", ")", ")", ".", "select", "(", "table", ".", "id", ")", "s3", ".", "dataTableStyleWarning", ".", "extend", "(", "(", "str", "(", "row", ".", "id", ")", "for", "row", "in", "rows", ")", ")", "s3db", ".", "configure", "(", "'survey_template'", ",", "orderby", "=", "'survey_template.status'", ",", "create_next", "=", "URL", "(", "c", "=", "'survey'", ",", "f", "=", "'template'", ")", ",", "update_next", "=", "URL", "(", "c", "=", "'survey'", ",", "f", "=", "'template'", ")", ")", "return", "True", "s3", ".", "prep", "=", "prep", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "r", ".", "component", ":", "template_id", "=", "r", ".", "id", "if", "(", "r", ".", "component_name", "==", "'translate'", ")", ":", "s3_action_buttons", "(", "r", ")", "s3", ".", "actions", ".", "extend", "(", "[", "dict", "(", "label", "=", "str", "(", "T", "(", "'Download'", ")", ")", ",", "_class", "=", "'action-btn'", ",", "url", "=", "r", ".", "url", "(", "method", "=", "'translate_download'", ",", "component", "=", "'translate'", ",", "component_id", "=", "'[id]'", ",", "representation", "=", "'xls'", ")", ")", ",", "dict", "(", "label", "=", "str", "(", "T", "(", "'Upload'", ")", ")", ",", "_class", "=", "'action-btn'", ",", "url", "=", "URL", "(", "c", "=", "module", ",", "f", "=", "'template'", ",", "args", "=", "[", "template_id", ",", "'translate'", ",", "'[id]'", "]", ")", ")", "]", ")", "return", "output", "s3", ".", "postp", "=", "postp", "if", "request", ".", "ajax", ":", "post", "=", "request", ".", "post_vars", "action", "=", "post", ".", "get", "(", "'action'", ")", "template_id", "=", "post", ".", "get", "(", "'parent_id'", ")", "section_id", "=", "post", ".", "get", "(", "'section_id'", ")", "section_text", "=", "post", ".", "get", "(", "'section_text'", ")", "if", "(", "(", "action", "==", "'section'", ")", "and", "(", "template_id", "!=", "None", ")", ")", ":", "id", "=", "db", ".", "survey_section", ".", "insert", "(", "name", "=", "section_text", ",", "template_id", "=", "template_id", ",", "cloned_section_id", "=", "section_id", ")", "if", "(", "id", "is", "None", ")", ":", "print", "'Failed to insert record'", "return", "s3db", ".", "configure", "(", "'survey_template'", ",", "listadd", "=", "False", ")", "output", "=", "s3_rest_controller", "(", "rheader", "=", "s3db", ".", "survey_template_rheader", ")", "return", "output" ]
require an openvz os template .
train
false
6,677
def getSimCountryIso(): try: mContext = autoclass('android.content.Context') pythonActivity = autoclass('org.renpy.android.PythonService') telephonyManager = cast('android.telephony.TelephonyManager', pythonActivity.mService.getSystemService(mContext.TELEPHONY_SERVICE)) simCountryIso = telephonyManager.getSimCountryIso() return simCountryIso except Exception as e: return None
[ "def", "getSimCountryIso", "(", ")", ":", "try", ":", "mContext", "=", "autoclass", "(", "'android.content.Context'", ")", "pythonActivity", "=", "autoclass", "(", "'org.renpy.android.PythonService'", ")", "telephonyManager", "=", "cast", "(", "'android.telephony.TelephonyManager'", ",", "pythonActivity", ".", "mService", ".", "getSystemService", "(", "mContext", ".", "TELEPHONY_SERVICE", ")", ")", "simCountryIso", "=", "telephonyManager", ".", "getSimCountryIso", "(", ")", "return", "simCountryIso", "except", "Exception", "as", "e", ":", "return", "None" ]
returns the iso country code equivalent for the sim providers country code otherwise .
train
false
6,678
@config.command() @click.argument('pattern', default='*', required=False) @configuration def list(pattern): from fnmatch import fnmatch from sentry.options import default_manager as manager for key in manager.all(): if fnmatch(key.name, pattern): click.echo(('%s %s' % (key.name, key.type.name.upper())))
[ "@", "config", ".", "command", "(", ")", "@", "click", ".", "argument", "(", "'pattern'", ",", "default", "=", "'*'", ",", "required", "=", "False", ")", "@", "configuration", "def", "list", "(", "pattern", ")", ":", "from", "fnmatch", "import", "fnmatch", "from", "sentry", ".", "options", "import", "default_manager", "as", "manager", "for", "key", "in", "manager", ".", "all", "(", ")", ":", "if", "fnmatch", "(", "key", ".", "name", ",", "pattern", ")", ":", "click", ".", "echo", "(", "(", "'%s %s'", "%", "(", "key", ".", "name", ",", "key", ".", "type", ".", "name", ".", "upper", "(", ")", ")", ")", ")" ]
list all installed plugins .
train
false
6,680
def GetVmodlType(name): if isinstance(name, type): return name typ = vmodlTypes.get(name) if typ: return typ isArray = name.endswith('[]') if isArray: name = name[:(-2)] (ns, wsdlName) = _GetWsdlInfo(name) try: typ = GetWsdlType(ns, wsdlName) except KeyError: raise KeyError(name) if typ: return ((isArray and typ.Array) or typ) else: raise KeyError(name)
[ "def", "GetVmodlType", "(", "name", ")", ":", "if", "isinstance", "(", "name", ",", "type", ")", ":", "return", "name", "typ", "=", "vmodlTypes", ".", "get", "(", "name", ")", "if", "typ", ":", "return", "typ", "isArray", "=", "name", ".", "endswith", "(", "'[]'", ")", "if", "isArray", ":", "name", "=", "name", "[", ":", "(", "-", "2", ")", "]", "(", "ns", ",", "wsdlName", ")", "=", "_GetWsdlInfo", "(", "name", ")", "try", ":", "typ", "=", "GetWsdlType", "(", "ns", ",", "wsdlName", ")", "except", "KeyError", ":", "raise", "KeyError", "(", "name", ")", "if", "typ", ":", "return", "(", "(", "isArray", "and", "typ", ".", "Array", ")", "or", "typ", ")", "else", ":", "raise", "KeyError", "(", "name", ")" ]
get type from vmodl name .
train
true
6,681
def track(): return s3_rest_controller()
[ "def", "track", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
enables previously disabled instance tracking of plugin .
train
false
6,682
def dump_processes(): output_file = (PROFILING_OUTPUT_FMT % get_filename_fmt()) data = {} for child in multiprocessing.active_children(): pid = child._popen.pid child_data = {'name': child.name, 'daemon': child.daemon, 'exitcode': child.exitcode, 'target': child._target.__name__, 'args': [], 'kwargs': {}} for arg in child._args: try: json.dumps(arg) except (TypeError, UnicodeDecodeError): try: child_data['args'].append(arg.__class__.__name__) except: child_data['args'].append('undefined') else: child_data['args'].append(arg) for (key, value) in child._kwargs.iteritems(): try: json.dumps(value) except (TypeError, UnicodeDecodeError): try: child_data['kwargs'][key] = value.__class__.__name__ except: child_data['kwargs'][key] = 'undefined' else: child_data['kwargs'][key] = value data[pid] = child_data json.dump(data, file(output_file, 'w'), indent=4)
[ "def", "dump_processes", "(", ")", ":", "output_file", "=", "(", "PROFILING_OUTPUT_FMT", "%", "get_filename_fmt", "(", ")", ")", "data", "=", "{", "}", "for", "child", "in", "multiprocessing", ".", "active_children", "(", ")", ":", "pid", "=", "child", ".", "_popen", ".", "pid", "child_data", "=", "{", "'name'", ":", "child", ".", "name", ",", "'daemon'", ":", "child", ".", "daemon", ",", "'exitcode'", ":", "child", ".", "exitcode", ",", "'target'", ":", "child", ".", "_target", ".", "__name__", ",", "'args'", ":", "[", "]", ",", "'kwargs'", ":", "{", "}", "}", "for", "arg", "in", "child", ".", "_args", ":", "try", ":", "json", ".", "dumps", "(", "arg", ")", "except", "(", "TypeError", ",", "UnicodeDecodeError", ")", ":", "try", ":", "child_data", "[", "'args'", "]", ".", "append", "(", "arg", ".", "__class__", ".", "__name__", ")", "except", ":", "child_data", "[", "'args'", "]", ".", "append", "(", "'undefined'", ")", "else", ":", "child_data", "[", "'args'", "]", ".", "append", "(", "arg", ")", "for", "(", "key", ",", "value", ")", "in", "child", ".", "_kwargs", ".", "iteritems", "(", ")", ":", "try", ":", "json", ".", "dumps", "(", "value", ")", "except", "(", "TypeError", ",", "UnicodeDecodeError", ")", ":", "try", ":", "child_data", "[", "'kwargs'", "]", "[", "key", "]", "=", "value", ".", "__class__", ".", "__name__", "except", ":", "child_data", "[", "'kwargs'", "]", "[", "key", "]", "=", "'undefined'", "else", ":", "child_data", "[", "'kwargs'", "]", "[", "key", "]", "=", "value", "data", "[", "pid", "]", "=", "child_data", "json", ".", "dump", "(", "data", ",", "file", "(", "output_file", ",", "'w'", ")", ",", "indent", "=", "4", ")" ]
dumps sub-process information to a file .
train
false
6,684
def call_highest_priority(method_name): def priority_decorator(func): @wraps(func) def binary_op_wrapper(self, other): if hasattr(other, '_op_priority'): if (other._op_priority > self._op_priority): try: f = getattr(other, method_name) except AttributeError: pass else: return f(self) return func(self, other) return binary_op_wrapper return priority_decorator
[ "def", "call_highest_priority", "(", "method_name", ")", ":", "def", "priority_decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "binary_op_wrapper", "(", "self", ",", "other", ")", ":", "if", "hasattr", "(", "other", ",", "'_op_priority'", ")", ":", "if", "(", "other", ".", "_op_priority", ">", "self", ".", "_op_priority", ")", ":", "try", ":", "f", "=", "getattr", "(", "other", ",", "method_name", ")", "except", "AttributeError", ":", "pass", "else", ":", "return", "f", "(", "self", ")", "return", "func", "(", "self", ",", "other", ")", "return", "binary_op_wrapper", "return", "priority_decorator" ]
a decorator for binary special methods to handle _op_priority .
train
false
6,685
def reverse_tuple(n): return tuple(reversed(n))
[ "def", "reverse_tuple", "(", "n", ")", ":", "return", "tuple", "(", "reversed", "(", "n", ")", ")" ]
reverses a tuple returns: tuple .
train
false
6,686
def simpleVerifyHostname(connection, hostname): commonName = connection.get_peer_certificate().get_subject().commonName if (commonName != hostname): raise SimpleVerificationError(((repr(commonName) + '!=') + repr(hostname)))
[ "def", "simpleVerifyHostname", "(", "connection", ",", "hostname", ")", ":", "commonName", "=", "connection", ".", "get_peer_certificate", "(", ")", ".", "get_subject", "(", ")", ".", "commonName", "if", "(", "commonName", "!=", "hostname", ")", ":", "raise", "SimpleVerificationError", "(", "(", "(", "repr", "(", "commonName", ")", "+", "'!='", ")", "+", "repr", "(", "hostname", ")", ")", ")" ]
check only the common name in the certificate presented by the peer and only for an exact match .
train
false
6,689
def _read_rospack_cache(cache, ros_root, ros_package_path): try: with open(os.path.join(rospkg.get_ros_home(), 'rospack_cache')) as f: for l in f.readlines(): l = l[:(-1)] if (not len(l)): continue if (l[0] == '#'): if l.startswith('#ROS_ROOT='): if (not (l[len('#ROS_ROOT='):] == ros_root)): return False elif l.startswith('#ROS_PACKAGE_PATH='): if (not (l[len('#ROS_PACKAGE_PATH='):] == ros_package_path)): return False else: cache[os.path.basename(l)] = (l, ros_root, ros_package_path) return True except: pass
[ "def", "_read_rospack_cache", "(", "cache", ",", "ros_root", ",", "ros_package_path", ")", ":", "try", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "rospkg", ".", "get_ros_home", "(", ")", ",", "'rospack_cache'", ")", ")", "as", "f", ":", "for", "l", "in", "f", ".", "readlines", "(", ")", ":", "l", "=", "l", "[", ":", "(", "-", "1", ")", "]", "if", "(", "not", "len", "(", "l", ")", ")", ":", "continue", "if", "(", "l", "[", "0", "]", "==", "'#'", ")", ":", "if", "l", ".", "startswith", "(", "'#ROS_ROOT='", ")", ":", "if", "(", "not", "(", "l", "[", "len", "(", "'#ROS_ROOT='", ")", ":", "]", "==", "ros_root", ")", ")", ":", "return", "False", "elif", "l", ".", "startswith", "(", "'#ROS_PACKAGE_PATH='", ")", ":", "if", "(", "not", "(", "l", "[", "len", "(", "'#ROS_PACKAGE_PATH='", ")", ":", "]", "==", "ros_package_path", ")", ")", ":", "return", "False", "else", ":", "cache", "[", "os", ".", "path", ".", "basename", "(", "l", ")", "]", "=", "(", "l", ",", "ros_root", ",", "ros_package_path", ")", "return", "True", "except", ":", "pass" ]
read in rospack_cache data into cache .
train
false
6,690
def incorrect_policy_index(info, remote_info): if ('storage_policy_index' not in remote_info): return False if (remote_info['storage_policy_index'] == info['storage_policy_index']): return False return (info['storage_policy_index'] != sorted([info, remote_info], cmp=cmp_policy_info)[0]['storage_policy_index'])
[ "def", "incorrect_policy_index", "(", "info", ",", "remote_info", ")", ":", "if", "(", "'storage_policy_index'", "not", "in", "remote_info", ")", ":", "return", "False", "if", "(", "remote_info", "[", "'storage_policy_index'", "]", "==", "info", "[", "'storage_policy_index'", "]", ")", ":", "return", "False", "return", "(", "info", "[", "'storage_policy_index'", "]", "!=", "sorted", "(", "[", "info", ",", "remote_info", "]", ",", "cmp", "=", "cmp_policy_info", ")", "[", "0", "]", "[", "'storage_policy_index'", "]", ")" ]
compare remote_info to info and decide if the remote storage policy index should be used instead of ours .
train
false
6,691
def update_license_file(args, top_level, year=datetime.now(UTC).year): license_template = top_level.child('admin').child('LICENSE.template') with license_template.open() as input_file: with top_level.child('LICENSE').open('w') as output_file: output_file.write(input_file.read().format(current_year=year))
[ "def", "update_license_file", "(", "args", ",", "top_level", ",", "year", "=", "datetime", ".", "now", "(", "UTC", ")", ".", "year", ")", ":", "license_template", "=", "top_level", ".", "child", "(", "'admin'", ")", ".", "child", "(", "'LICENSE.template'", ")", "with", "license_template", ".", "open", "(", ")", "as", "input_file", ":", "with", "top_level", ".", "child", "(", "'LICENSE'", ")", ".", "open", "(", "'w'", ")", "as", "output_file", ":", "output_file", ".", "write", "(", "input_file", ".", "read", "(", ")", ".", "format", "(", "current_year", "=", "year", ")", ")" ]
update the license file to include the current year .
train
false
6,692
def skip_template(condition=True, *args): if condition: raise SkipTemplate(*args)
[ "def", "skip_template", "(", "condition", "=", "True", ",", "*", "args", ")", ":", "if", "condition", ":", "raise", "SkipTemplate", "(", "*", "args", ")" ]
raise skiptemplate .
train
false
6,693
def test_needs_scorelog(): submission = SubmissionFactory.build(field=SubmissionFields.STATE, type=SubmissionTypes.NORMAL, old_value=UNTRANSLATED, new_value=TRANSLATED) assert (not submission.needs_scorelog()) submission = SubmissionFactory.build(field=SubmissionFields.STATE, type=SubmissionTypes.NORMAL, old_value=TRANSLATED, new_value=UNTRANSLATED) assert submission.needs_scorelog() submission = SubmissionFactory.build(field=SubmissionFields.TARGET, type=SubmissionTypes.SUGG_ADD, old_value=u'', new_value=u'') assert submission.needs_scorelog()
[ "def", "test_needs_scorelog", "(", ")", ":", "submission", "=", "SubmissionFactory", ".", "build", "(", "field", "=", "SubmissionFields", ".", "STATE", ",", "type", "=", "SubmissionTypes", ".", "NORMAL", ",", "old_value", "=", "UNTRANSLATED", ",", "new_value", "=", "TRANSLATED", ")", "assert", "(", "not", "submission", ".", "needs_scorelog", "(", ")", ")", "submission", "=", "SubmissionFactory", ".", "build", "(", "field", "=", "SubmissionFields", ".", "STATE", ",", "type", "=", "SubmissionTypes", ".", "NORMAL", ",", "old_value", "=", "TRANSLATED", ",", "new_value", "=", "UNTRANSLATED", ")", "assert", "submission", ".", "needs_scorelog", "(", ")", "submission", "=", "SubmissionFactory", ".", "build", "(", "field", "=", "SubmissionFields", ".", "TARGET", ",", "type", "=", "SubmissionTypes", ".", "SUGG_ADD", ",", "old_value", "=", "u''", ",", "new_value", "=", "u''", ")", "assert", "submission", ".", "needs_scorelog", "(", ")" ]
tests if the submission needs to be logged or not .
train
false
6,695
def _save_modified_value(_config_vars, cv, newvalue): oldvalue = _config_vars.get(cv, '') if ((oldvalue != newvalue) and ((_INITPRE + cv) not in _config_vars)): _config_vars[(_INITPRE + cv)] = oldvalue _config_vars[cv] = newvalue
[ "def", "_save_modified_value", "(", "_config_vars", ",", "cv", ",", "newvalue", ")", ":", "oldvalue", "=", "_config_vars", ".", "get", "(", "cv", ",", "''", ")", "if", "(", "(", "oldvalue", "!=", "newvalue", ")", "and", "(", "(", "_INITPRE", "+", "cv", ")", "not", "in", "_config_vars", ")", ")", ":", "_config_vars", "[", "(", "_INITPRE", "+", "cv", ")", "]", "=", "oldvalue", "_config_vars", "[", "cv", "]", "=", "newvalue" ]
save modified and original unmodified value of configuration var .
train
false
6,696
def parse_bootstrap_support(lines): bootstraps = {} for line in lines: if (line[0] == '#'): continue wordlist = line.strip().split() bootstraps[wordlist[0]] = float(wordlist[1]) return bootstraps
[ "def", "parse_bootstrap_support", "(", "lines", ")", ":", "bootstraps", "=", "{", "}", "for", "line", "in", "lines", ":", "if", "(", "line", "[", "0", "]", "==", "'#'", ")", ":", "continue", "wordlist", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "bootstraps", "[", "wordlist", "[", "0", "]", "]", "=", "float", "(", "wordlist", "[", "1", "]", ")", "return", "bootstraps" ]
parser for a bootstrap/jackknife support in tab delimited text .
train
false
6,697
def _check_write_command_response(results): errors = [res for res in results if (('writeErrors' in res[1]) or ('writeConcernError' in res[1]))] if errors: (offset, result) = errors[(-1)] write_errors = result.get('writeErrors') if write_errors: error = write_errors[(-1)] error['index'] += offset if (error.get('code') == 11000): raise DuplicateKeyError(error.get('errmsg'), 11000, error) raise WriteError(error.get('errmsg'), error.get('code'), error) else: error = result['writeConcernError'] if (('errInfo' in error) and error['errInfo'].get('wtimeout')): raise WTimeoutError(error.get('errmsg'), error.get('code'), error) raise WriteConcernError(error.get('errmsg'), error.get('code'), error)
[ "def", "_check_write_command_response", "(", "results", ")", ":", "errors", "=", "[", "res", "for", "res", "in", "results", "if", "(", "(", "'writeErrors'", "in", "res", "[", "1", "]", ")", "or", "(", "'writeConcernError'", "in", "res", "[", "1", "]", ")", ")", "]", "if", "errors", ":", "(", "offset", ",", "result", ")", "=", "errors", "[", "(", "-", "1", ")", "]", "write_errors", "=", "result", ".", "get", "(", "'writeErrors'", ")", "if", "write_errors", ":", "error", "=", "write_errors", "[", "(", "-", "1", ")", "]", "error", "[", "'index'", "]", "+=", "offset", "if", "(", "error", ".", "get", "(", "'code'", ")", "==", "11000", ")", ":", "raise", "DuplicateKeyError", "(", "error", ".", "get", "(", "'errmsg'", ")", ",", "11000", ",", "error", ")", "raise", "WriteError", "(", "error", ".", "get", "(", "'errmsg'", ")", ",", "error", ".", "get", "(", "'code'", ")", ",", "error", ")", "else", ":", "error", "=", "result", "[", "'writeConcernError'", "]", "if", "(", "(", "'errInfo'", "in", "error", ")", "and", "error", "[", "'errInfo'", "]", ".", "get", "(", "'wtimeout'", ")", ")", ":", "raise", "WTimeoutError", "(", "error", ".", "get", "(", "'errmsg'", ")", ",", "error", ".", "get", "(", "'code'", ")", ",", "error", ")", "raise", "WriteConcernError", "(", "error", ".", "get", "(", "'errmsg'", ")", ",", "error", ".", "get", "(", "'code'", ")", ",", "error", ")" ]
backward compatibility helper for write command error handling .
train
true
6,698
def mangle_identifier(ident): splitted = (''.join(map(_encode, x)) for x in ident.split('.')) parts = [('%d%s' % (len(x), x)) for x in splitted] if (len(parts) > 1): return ('N%sE' % ''.join(parts)) else: return parts[0]
[ "def", "mangle_identifier", "(", "ident", ")", ":", "splitted", "=", "(", "''", ".", "join", "(", "map", "(", "_encode", ",", "x", ")", ")", "for", "x", "in", "ident", ".", "split", "(", "'.'", ")", ")", "parts", "=", "[", "(", "'%d%s'", "%", "(", "len", "(", "x", ")", ",", "x", ")", ")", "for", "x", "in", "splitted", "]", "if", "(", "len", "(", "parts", ")", ">", "1", ")", ":", "return", "(", "'N%sE'", "%", "''", ".", "join", "(", "parts", ")", ")", "else", ":", "return", "parts", "[", "0", "]" ]
mangle the identifier this treats .
train
false
6,699
def build_tool_dependencies_select_field(app, tool_shed_repository, name, multiple=True, display='checkboxes', uninstalled_only=False): tool_dependencies_select_field = SelectField(name=name, multiple=multiple, display=display) for tool_dependency in tool_shed_repository.tool_dependencies: if uninstalled_only: if (tool_dependency.status not in [app.install_model.ToolDependency.installation_status.NEVER_INSTALLED, app.install_model.ToolDependency.installation_status.UNINSTALLED]): continue elif (tool_dependency.status in [app.install_model.ToolDependency.installation_status.NEVER_INSTALLED, app.install_model.ToolDependency.installation_status.UNINSTALLED]): continue option_label = ('%s version %s' % (str(tool_dependency.name), str(tool_dependency.version))) option_value = app.security.encode_id(tool_dependency.id) tool_dependencies_select_field.add_option(option_label, option_value) return tool_dependencies_select_field
[ "def", "build_tool_dependencies_select_field", "(", "app", ",", "tool_shed_repository", ",", "name", ",", "multiple", "=", "True", ",", "display", "=", "'checkboxes'", ",", "uninstalled_only", "=", "False", ")", ":", "tool_dependencies_select_field", "=", "SelectField", "(", "name", "=", "name", ",", "multiple", "=", "multiple", ",", "display", "=", "display", ")", "for", "tool_dependency", "in", "tool_shed_repository", ".", "tool_dependencies", ":", "if", "uninstalled_only", ":", "if", "(", "tool_dependency", ".", "status", "not", "in", "[", "app", ".", "install_model", ".", "ToolDependency", ".", "installation_status", ".", "NEVER_INSTALLED", ",", "app", ".", "install_model", ".", "ToolDependency", ".", "installation_status", ".", "UNINSTALLED", "]", ")", ":", "continue", "elif", "(", "tool_dependency", ".", "status", "in", "[", "app", ".", "install_model", ".", "ToolDependency", ".", "installation_status", ".", "NEVER_INSTALLED", ",", "app", ".", "install_model", ".", "ToolDependency", ".", "installation_status", ".", "UNINSTALLED", "]", ")", ":", "continue", "option_label", "=", "(", "'%s version %s'", "%", "(", "str", "(", "tool_dependency", ".", "name", ")", ",", "str", "(", "tool_dependency", ".", "version", ")", ")", ")", "option_value", "=", "app", ".", "security", ".", "encode_id", "(", "tool_dependency", ".", "id", ")", "tool_dependencies_select_field", ".", "add_option", "(", "option_label", ",", "option_value", ")", "return", "tool_dependencies_select_field" ]
generate a selectfield consisting of the current list of tool dependency ids for an installed tool shed repository .
train
false
6,700
def gf_gcd(f, g, p, K): while g: (f, g) = (g, gf_rem(f, g, p, K)) return gf_monic(f, p, K)[1]
[ "def", "gf_gcd", "(", "f", ",", "g", ",", "p", ",", "K", ")", ":", "while", "g", ":", "(", "f", ",", "g", ")", "=", "(", "g", ",", "gf_rem", "(", "f", ",", "g", ",", "p", ",", "K", ")", ")", "return", "gf_monic", "(", "f", ",", "p", ",", "K", ")", "[", "1", "]" ]
euclidean algorithm in gf(p)[x] .
train
false
6,701
def reversion_register(model_class, fields=None, follow=(), format='json', exclude_fields=None): if (not is_installed('reversion')): return if (fields and exclude_fields): raise ValueError('Just one of fields, exclude_fields arguments can be passed.') opts = model_class._meta local_fields = (opts.local_fields + opts.local_many_to_many) if (fields is None): fields = [field.name for field in local_fields] exclude_fields = (exclude_fields or []) fields = filter((lambda name: (not (name in exclude_fields))), fields) from cms.utils import reversion_hacks reversion_hacks.register_draft_only(model_class, fields, follow, format)
[ "def", "reversion_register", "(", "model_class", ",", "fields", "=", "None", ",", "follow", "=", "(", ")", ",", "format", "=", "'json'", ",", "exclude_fields", "=", "None", ")", ":", "if", "(", "not", "is_installed", "(", "'reversion'", ")", ")", ":", "return", "if", "(", "fields", "and", "exclude_fields", ")", ":", "raise", "ValueError", "(", "'Just one of fields, exclude_fields arguments can be passed.'", ")", "opts", "=", "model_class", ".", "_meta", "local_fields", "=", "(", "opts", ".", "local_fields", "+", "opts", ".", "local_many_to_many", ")", "if", "(", "fields", "is", "None", ")", ":", "fields", "=", "[", "field", ".", "name", "for", "field", "in", "local_fields", "]", "exclude_fields", "=", "(", "exclude_fields", "or", "[", "]", ")", "fields", "=", "filter", "(", "(", "lambda", "name", ":", "(", "not", "(", "name", "in", "exclude_fields", ")", ")", ")", ",", "fields", ")", "from", "cms", ".", "utils", "import", "reversion_hacks", "reversion_hacks", ".", "register_draft_only", "(", "model_class", ",", "fields", ",", "follow", ",", "format", ")" ]
cms interface to reversion api - helper function .
train
false
6,703
def get_legacy_qos_policy(extra_specs): external_policy_name = extra_specs.get('netapp:qos_policy_group') if (external_policy_name is None): return None return dict(policy_name=external_policy_name)
[ "def", "get_legacy_qos_policy", "(", "extra_specs", ")", ":", "external_policy_name", "=", "extra_specs", ".", "get", "(", "'netapp:qos_policy_group'", ")", "if", "(", "external_policy_name", "is", "None", ")", ":", "return", "None", "return", "dict", "(", "policy_name", "=", "external_policy_name", ")" ]
return legacy qos policy information if present in extra specs .
train
false
6,704
def yield_translation_csv_messages(file, no_header=False): csvreader = csv.reader(file, lineterminator='\n') if (not no_header): columns = next(csvreader) assert (columns == 'language_id,table,id,column,source_crc,string'.split(',')) for (language_id, table, id, column, source_crc, string) in csvreader: if isinstance(string, bytes): string = string.decode('utf-8') (yield Message(table, int(id), column, string, origin='target CSV', source_crc=source_crc, language_id=int(language_id)))
[ "def", "yield_translation_csv_messages", "(", "file", ",", "no_header", "=", "False", ")", ":", "csvreader", "=", "csv", ".", "reader", "(", "file", ",", "lineterminator", "=", "'\\n'", ")", "if", "(", "not", "no_header", ")", ":", "columns", "=", "next", "(", "csvreader", ")", "assert", "(", "columns", "==", "'language_id,table,id,column,source_crc,string'", ".", "split", "(", "','", ")", ")", "for", "(", "language_id", ",", "table", ",", "id", ",", "column", ",", "source_crc", ",", "string", ")", "in", "csvreader", ":", "if", "isinstance", "(", "string", ",", "bytes", ")", ":", "string", "=", "string", ".", "decode", "(", "'utf-8'", ")", "(", "yield", "Message", "(", "table", ",", "int", "(", "id", ")", ",", "column", ",", "string", ",", "origin", "=", "'target CSV'", ",", "source_crc", "=", "source_crc", ",", "language_id", "=", "int", "(", "language_id", ")", ")", ")" ]
yield messages from a translation csv file .
train
false
6,705
def create_files(paths, chroot): (dirs, files) = (set(), set()) for path in paths: path = osp.join(chroot, path) filename = osp.basename(path) if (filename == ''): dirs.add(path) else: dirs.add(osp.dirname(path)) files.add(path) for dirpath in dirs: if (not osp.isdir(dirpath)): os.makedirs(dirpath) for filepath in files: open(filepath, 'w').close()
[ "def", "create_files", "(", "paths", ",", "chroot", ")", ":", "(", "dirs", ",", "files", ")", "=", "(", "set", "(", ")", ",", "set", "(", ")", ")", "for", "path", "in", "paths", ":", "path", "=", "osp", ".", "join", "(", "chroot", ",", "path", ")", "filename", "=", "osp", ".", "basename", "(", "path", ")", "if", "(", "filename", "==", "''", ")", ":", "dirs", ".", "add", "(", "path", ")", "else", ":", "dirs", ".", "add", "(", "osp", ".", "dirname", "(", "path", ")", ")", "files", ".", "add", "(", "path", ")", "for", "dirpath", "in", "dirs", ":", "if", "(", "not", "osp", ".", "isdir", "(", "dirpath", ")", ")", ":", "os", ".", "makedirs", "(", "dirpath", ")", "for", "filepath", "in", "files", ":", "open", "(", "filepath", ",", "'w'", ")", ".", "close", "(", ")" ]
creates directories and files found in <path> .
train
false
6,706
@register.filter def to_js(value): return mark_safe(('JSON.parse("%s")' % escapejs(jsonify(value))))
[ "@", "register", ".", "filter", "def", "to_js", "(", "value", ")", ":", "return", "mark_safe", "(", "(", "'JSON.parse(\"%s\")'", "%", "escapejs", "(", "jsonify", "(", "value", ")", ")", ")", ")" ]
returns a string which leaves the value readily available for js consumption .
train
false
6,707
def parse_taxonomy(infile): res = {} for line in infile: if ((not line) or line.startswith('#')): continue line = line.rstrip('\n') fields = line.split(' DCTB ') otu = fields[0].split(' ')[0] res[otu] = taxa_split(fields[1]) return res
[ "def", "parse_taxonomy", "(", "infile", ")", ":", "res", "=", "{", "}", "for", "line", "in", "infile", ":", "if", "(", "(", "not", "line", ")", "or", "line", ".", "startswith", "(", "'#'", ")", ")", ":", "continue", "line", "=", "line", ".", "rstrip", "(", "'\\n'", ")", "fields", "=", "line", ".", "split", "(", "' DCTB '", ")", "otu", "=", "fields", "[", "0", "]", ".", "split", "(", "' '", ")", "[", "0", "]", "res", "[", "otu", "]", "=", "taxa_split", "(", "fields", "[", "1", "]", ")", "return", "res" ]
parse a taxonomy file .
train
false
6,708
def build_single_handler_applications(paths, argvs=None): applications = {} argvs = ({} or argvs) for path in paths: application = build_single_handler_application(path, argvs.get(path, [])) route = application.handlers[0].url_path() if (not route): if ('/' in applications): raise RuntimeError(("Don't know the URL path to use for %s" % path)) route = '/' applications[route] = application return applications
[ "def", "build_single_handler_applications", "(", "paths", ",", "argvs", "=", "None", ")", ":", "applications", "=", "{", "}", "argvs", "=", "(", "{", "}", "or", "argvs", ")", "for", "path", "in", "paths", ":", "application", "=", "build_single_handler_application", "(", "path", ",", "argvs", ".", "get", "(", "path", ",", "[", "]", ")", ")", "route", "=", "application", ".", "handlers", "[", "0", "]", ".", "url_path", "(", ")", "if", "(", "not", "route", ")", ":", "if", "(", "'/'", "in", "applications", ")", ":", "raise", "RuntimeError", "(", "(", "\"Don't know the URL path to use for %s\"", "%", "path", ")", ")", "route", "=", "'/'", "applications", "[", "route", "]", "=", "application", "return", "applications" ]
return a dictionary mapping routes to bokeh applications built using single handlers .
train
true
6,710
def _write_services(service_descriptors, out): for service in (service_descriptors or []): (out << '') (out << '') (out << ('class %s(remote.Service):' % service.name)) with out.indent(): if service.methods: _write_methods(service.methods, out) else: (out << '') (out << 'pass')
[ "def", "_write_services", "(", "service_descriptors", ",", "out", ")", ":", "for", "service", "in", "(", "service_descriptors", "or", "[", "]", ")", ":", "(", "out", "<<", "''", ")", "(", "out", "<<", "''", ")", "(", "out", "<<", "(", "'class %s(remote.Service):'", "%", "service", ".", "name", ")", ")", "with", "out", ".", "indent", "(", ")", ":", "if", "service", ".", "methods", ":", "_write_methods", "(", "service", ".", "methods", ",", "out", ")", "else", ":", "(", "out", "<<", "''", ")", "(", "out", "<<", "'pass'", ")" ]
write service types .
train
false
6,711
def processElementNode(elementNode): path.convertElementNode(elementNode, getGeometryOutput(None, elementNode))
[ "def", "processElementNode", "(", "elementNode", ")", ":", "path", ".", "convertElementNode", "(", "elementNode", ",", "getGeometryOutput", "(", "None", ",", "elementNode", ")", ")" ]
process the xml element .
train
false
6,712
def make_zip_handler(zipfilename, max_age=None, public=None): class CustomZipHandler(ZipHandler, ): def get(self, name): self.ServeFromZipFile(self.ZIPFILENAME, name) ZIPFILENAME = zipfilename if (max_age is not None): MAX_AGE = max_age if (public is not None): PUBLIC = public return CustomZipHandler
[ "def", "make_zip_handler", "(", "zipfilename", ",", "max_age", "=", "None", ",", "public", "=", "None", ")", ":", "class", "CustomZipHandler", "(", "ZipHandler", ",", ")", ":", "def", "get", "(", "self", ",", "name", ")", ":", "self", ".", "ServeFromZipFile", "(", "self", ".", "ZIPFILENAME", ",", "name", ")", "ZIPFILENAME", "=", "zipfilename", "if", "(", "max_age", "is", "not", "None", ")", ":", "MAX_AGE", "=", "max_age", "if", "(", "public", "is", "not", "None", ")", ":", "PUBLIC", "=", "public", "return", "CustomZipHandler" ]
factory function to construct a custom ziphandler instance .
train
false
6,714
def mountpoint_to_number(mountpoint): if mountpoint.startswith('/dev/'): mountpoint = mountpoint[5:] if re.match('^[hsv]d[a-p]$', mountpoint): return (ord(mountpoint[2:3]) - ord('a')) elif re.match('^[0-9]+$', mountpoint): return string.atoi(mountpoint, 10) else: LOG.warn((_('Mountpoint cannot be translated: %s') % mountpoint)) return (-1)
[ "def", "mountpoint_to_number", "(", "mountpoint", ")", ":", "if", "mountpoint", ".", "startswith", "(", "'/dev/'", ")", ":", "mountpoint", "=", "mountpoint", "[", "5", ":", "]", "if", "re", ".", "match", "(", "'^[hsv]d[a-p]$'", ",", "mountpoint", ")", ":", "return", "(", "ord", "(", "mountpoint", "[", "2", ":", "3", "]", ")", "-", "ord", "(", "'a'", ")", ")", "elif", "re", ".", "match", "(", "'^[0-9]+$'", ",", "mountpoint", ")", ":", "return", "string", ".", "atoi", "(", "mountpoint", ",", "10", ")", "else", ":", "LOG", ".", "warn", "(", "(", "_", "(", "'Mountpoint cannot be translated: %s'", ")", "%", "mountpoint", ")", ")", "return", "(", "-", "1", ")" ]
translate a mountpoint like /dev/sdc into a numeric .
train
false
6,715
def heappop(heap): lastelt = heap.pop() if heap: returnitem = heap[0] heap[0] = lastelt _siftup(heap, 0) else: returnitem = lastelt return returnitem
[ "def", "heappop", "(", "heap", ")", ":", "lastelt", "=", "heap", ".", "pop", "(", ")", "if", "heap", ":", "returnitem", "=", "heap", "[", "0", "]", "heap", "[", "0", "]", "=", "lastelt", "_siftup", "(", "heap", ",", "0", ")", "else", ":", "returnitem", "=", "lastelt", "return", "returnitem" ]
pop the smallest item off the heap .
train
true
6,716
def _retry_all(_): return True
[ "def", "_retry_all", "(", "_", ")", ":", "return", "True" ]
retry all caught exceptions .
train
false
6,717
def _service_path(name): if (not SERVICE_DIR): raise CommandExecutionError('Could not find service directory.') return '{0}/{1}'.format(SERVICE_DIR, name)
[ "def", "_service_path", "(", "name", ")", ":", "if", "(", "not", "SERVICE_DIR", ")", ":", "raise", "CommandExecutionError", "(", "'Could not find service directory.'", ")", "return", "'{0}/{1}'", ".", "format", "(", "SERVICE_DIR", ",", "name", ")" ]
return service_dir+name if possible name the services name to work on .
train
false
6,718
@skip_without('xxlimited') def test_pprint_heap_allocated_type(): import xxlimited output = pretty.pretty(xxlimited.Null) nt.assert_equal(output, 'xxlimited.Null')
[ "@", "skip_without", "(", "'xxlimited'", ")", "def", "test_pprint_heap_allocated_type", "(", ")", ":", "import", "xxlimited", "output", "=", "pretty", ".", "pretty", "(", "xxlimited", ".", "Null", ")", "nt", ".", "assert_equal", "(", "output", ",", "'xxlimited.Null'", ")" ]
test that pprint works for heap allocated types .
train
false
6,719
def check_config_sanity(config): if (config.http01_port == config.tls_sni_01_port): raise errors.ConfigurationError('Trying to run http-01 and tls-sni-01 on the same port ({0})'.format(config.tls_sni_01_port)) if (config.namespace.domains is not None): for domain in config.namespace.domains: util.enforce_domain_sanity(domain)
[ "def", "check_config_sanity", "(", "config", ")", ":", "if", "(", "config", ".", "http01_port", "==", "config", ".", "tls_sni_01_port", ")", ":", "raise", "errors", ".", "ConfigurationError", "(", "'Trying to run http-01 and tls-sni-01 on the same port ({0})'", ".", "format", "(", "config", ".", "tls_sni_01_port", ")", ")", "if", "(", "config", ".", "namespace", ".", "domains", "is", "not", "None", ")", ":", "for", "domain", "in", "config", ".", "namespace", ".", "domains", ":", "util", ".", "enforce_domain_sanity", "(", "domain", ")" ]
validate command line options and display error message if requirements are not met .
train
false
6,720
def help_message(): help_msg = '\n' help_msg += (('Usage: ' + sickbeard.MY_FULLNAME) + ' <option> <another option>\n') help_msg += '\n' help_msg += 'Options:\n' help_msg += '\n' help_msg += ' -h --help Prints this message\n' help_msg += ' -f --forceupdate Force update all shows in the DB (from tvdb) on startup\n' help_msg += ' -q --quiet Disables logging to console\n' help_msg += ' --nolaunch Suppress launching web browser on startup\n' if (sys.platform == 'win32'): help_msg += ' -d --daemon Running as real daemon is not supported on Windows\n' help_msg += ' On Windows, --daemon is substituted with: --quiet --nolaunch\n' else: help_msg += ' -d --daemon Run as double forked daemon (includes options --quiet --nolaunch)\n' help_msg += ' --pidfile=<path> Combined with --daemon creates a pidfile (full path including filename)\n' help_msg += ' -p <port> --port=<port> Override default/configured port to listen on\n' help_msg += ' --datadir=<path> Override folder (full path) as location for\n' help_msg += ' storing database, configfile, cache, logfiles \n' help_msg += ((' Default: ' + sickbeard.PROG_DIR) + '\n') help_msg += ' --config=<path> Override config filename (full path including filename)\n' help_msg += ' to load configuration from \n' help_msg += ((' Default: config.ini in ' + sickbeard.PROG_DIR) + ' or --datadir location\n') help_msg += ' --noresize Prevent resizing of the banner/posters even if PIL is installed\n' return help_msg
[ "def", "help_message", "(", ")", ":", "help_msg", "=", "'\\n'", "help_msg", "+=", "(", "(", "'Usage: '", "+", "sickbeard", ".", "MY_FULLNAME", ")", "+", "' <option> <another option>\\n'", ")", "help_msg", "+=", "'\\n'", "help_msg", "+=", "'Options:\\n'", "help_msg", "+=", "'\\n'", "help_msg", "+=", "' -h --help Prints this message\\n'", "help_msg", "+=", "' -f --forceupdate Force update all shows in the DB (from tvdb) on startup\\n'", "help_msg", "+=", "' -q --quiet Disables logging to console\\n'", "help_msg", "+=", "' --nolaunch Suppress launching web browser on startup\\n'", "if", "(", "sys", ".", "platform", "==", "'win32'", ")", ":", "help_msg", "+=", "' -d --daemon Running as real daemon is not supported on Windows\\n'", "help_msg", "+=", "' On Windows, --daemon is substituted with: --quiet --nolaunch\\n'", "else", ":", "help_msg", "+=", "' -d --daemon Run as double forked daemon (includes options --quiet --nolaunch)\\n'", "help_msg", "+=", "' --pidfile=<path> Combined with --daemon creates a pidfile (full path including filename)\\n'", "help_msg", "+=", "' -p <port> --port=<port> Override default/configured port to listen on\\n'", "help_msg", "+=", "' --datadir=<path> Override folder (full path) as location for\\n'", "help_msg", "+=", "' storing database, configfile, cache, logfiles \\n'", "help_msg", "+=", "(", "(", "' Default: '", "+", "sickbeard", ".", "PROG_DIR", ")", "+", "'\\n'", ")", "help_msg", "+=", "' --config=<path> Override config filename (full path including filename)\\n'", "help_msg", "+=", "' to load configuration from \\n'", "help_msg", "+=", "(", "(", "' Default: config.ini in '", "+", "sickbeard", ".", "PROG_DIR", ")", "+", "' or --datadir location\\n'", ")", "help_msg", "+=", "' --noresize Prevent resizing of the banner/posters even if PIL is installed\\n'", "return", "help_msg" ]
print help message for commandline options .
train
false
6,721
def dump_data(app_labels, format='json', indent=None): from django.db.models import get_app, get_apps, get_models from django.core import serializers if (len(app_labels) == 0): app_list = get_apps() else: app_list = [get_app(app_label) for app_label in app_labels] try: serializers.get_serializer(format) except KeyError: sys.stderr.write(style.ERROR(('Unknown serialization format: %s\n' % format))) objects = [] for app in app_list: for model in get_models(app): objects.extend(model.objects.all()) try: return serializers.serialize(format, objects, indent=indent) except Exception as e: sys.stderr.write(style.ERROR(('Unable to serialize database: %s\n' % e)))
[ "def", "dump_data", "(", "app_labels", ",", "format", "=", "'json'", ",", "indent", "=", "None", ")", ":", "from", "django", ".", "db", ".", "models", "import", "get_app", ",", "get_apps", ",", "get_models", "from", "django", ".", "core", "import", "serializers", "if", "(", "len", "(", "app_labels", ")", "==", "0", ")", ":", "app_list", "=", "get_apps", "(", ")", "else", ":", "app_list", "=", "[", "get_app", "(", "app_label", ")", "for", "app_label", "in", "app_labels", "]", "try", ":", "serializers", ".", "get_serializer", "(", "format", ")", "except", "KeyError", ":", "sys", ".", "stderr", ".", "write", "(", "style", ".", "ERROR", "(", "(", "'Unknown serialization format: %s\\n'", "%", "format", ")", ")", ")", "objects", "=", "[", "]", "for", "app", "in", "app_list", ":", "for", "model", "in", "get_models", "(", "app", ")", ":", "objects", ".", "extend", "(", "model", ".", "objects", ".", "all", "(", ")", ")", "try", ":", "return", "serializers", ".", "serialize", "(", "format", ",", "objects", ",", "indent", "=", "indent", ")", "except", "Exception", "as", "e", ":", "sys", ".", "stderr", ".", "write", "(", "style", ".", "ERROR", "(", "(", "'Unable to serialize database: %s\\n'", "%", "e", ")", ")", ")" ]
output the current contents of the database as a fixture of the given format .
train
false
6,723
def cache_size(mb=True): numtotal = [0] classdict = {} def get_recurse(submodels): for submodel in submodels: subclasses = submodel.__subclasses__() if (not subclasses): num = len(submodel.get_all_cached_instances()) numtotal[0] += num classdict[submodel.__name__] = num else: get_recurse(subclasses) get_recurse(SharedMemoryModel.__subclasses__()) return (numtotal[0], classdict)
[ "def", "cache_size", "(", "mb", "=", "True", ")", ":", "numtotal", "=", "[", "0", "]", "classdict", "=", "{", "}", "def", "get_recurse", "(", "submodels", ")", ":", "for", "submodel", "in", "submodels", ":", "subclasses", "=", "submodel", ".", "__subclasses__", "(", ")", "if", "(", "not", "subclasses", ")", ":", "num", "=", "len", "(", "submodel", ".", "get_all_cached_instances", "(", ")", ")", "numtotal", "[", "0", "]", "+=", "num", "classdict", "[", "submodel", ".", "__name__", "]", "=", "num", "else", ":", "get_recurse", "(", "subclasses", ")", "get_recurse", "(", "SharedMemoryModel", ".", "__subclasses__", "(", ")", ")", "return", "(", "numtotal", "[", "0", "]", ",", "classdict", ")" ]
calculate statistics about the cache .
train
false
6,724
def clientFromString(reactor, description): (args, kwargs) = _parse(description) aname = args.pop(0) name = aname.upper() if (name not in _clientParsers): plugin = _matchPluginToPrefix(getPlugins(IStreamClientEndpointStringParserWithReactor), name) return plugin.parseStreamClient(reactor, *args, **kwargs) kwargs = _clientParsers[name](*args, **kwargs) return _endpointClientFactories[name](reactor, **kwargs)
[ "def", "clientFromString", "(", "reactor", ",", "description", ")", ":", "(", "args", ",", "kwargs", ")", "=", "_parse", "(", "description", ")", "aname", "=", "args", ".", "pop", "(", "0", ")", "name", "=", "aname", ".", "upper", "(", ")", "if", "(", "name", "not", "in", "_clientParsers", ")", ":", "plugin", "=", "_matchPluginToPrefix", "(", "getPlugins", "(", "IStreamClientEndpointStringParserWithReactor", ")", ",", "name", ")", "return", "plugin", ".", "parseStreamClient", "(", "reactor", ",", "*", "args", ",", "**", "kwargs", ")", "kwargs", "=", "_clientParsers", "[", "name", "]", "(", "*", "args", ",", "**", "kwargs", ")", "return", "_endpointClientFactories", "[", "name", "]", "(", "reactor", ",", "**", "kwargs", ")" ]
construct a client endpoint from a description string .
train
false
6,725
@treeio_login_required @handle_response_format def source_delete(request, source_id, response_format='html'): source = get_object_or_404(SaleSource, pk=source_id) if ((not request.user.profile.has_permission(source, mode='w')) and (not request.user.profile.is_admin('treeio.sales'))): return user_denied(request, "You don't have access to this Sale Status", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): source.trash = True source.save() else: source.delete() return HttpResponseRedirect(reverse('sales_settings_view')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('sales_source_view', args=[source.id])) all_products = Object.filter_by_request(request, Product.objects.filter(parent__isnull=True)) all_sources = Object.filter_by_request(request, SaleSource.objects) return render_to_response('sales/source_delete', {'source': source, 'sources': all_sources, 'products': all_products}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "source_delete", "(", "request", ",", "source_id", ",", "response_format", "=", "'html'", ")", ":", "source", "=", "get_object_or_404", "(", "SaleSource", ",", "pk", "=", "source_id", ")", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "source", ",", "mode", "=", "'w'", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio.sales'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Sale Status\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "source", ".", "trash", "=", "True", "source", ".", "save", "(", ")", "else", ":", "source", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_settings_view'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_source_view'", ",", "args", "=", "[", "source", ".", "id", "]", ")", ")", "all_products", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Product", ".", "objects", ".", "filter", "(", "parent__isnull", "=", "True", ")", ")", "all_sources", "=", "Object", ".", "filter_by_request", "(", "request", ",", "SaleSource", ".", "objects", ")", "return", "render_to_response", "(", "'sales/source_delete'", ",", "{", "'source'", ":", "source", ",", "'sources'", ":", "all_sources", ",", "'products'", ":", "all_products", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
salesource delete .
train
false
6,727
def _salt_cipher_secret(secret): salt = _get_new_csrf_string() chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in salt)) cipher = ''.join((chars[((x + y) % len(chars))] for (x, y) in pairs)) return (salt + cipher)
[ "def", "_salt_cipher_secret", "(", "secret", ")", ":", "salt", "=", "_get_new_csrf_string", "(", ")", "chars", "=", "CSRF_ALLOWED_CHARS", "pairs", "=", "zip", "(", "(", "chars", ".", "index", "(", "x", ")", "for", "x", "in", "secret", ")", ",", "(", "chars", ".", "index", "(", "x", ")", "for", "x", "in", "salt", ")", ")", "cipher", "=", "''", ".", "join", "(", "(", "chars", "[", "(", "(", "x", "+", "y", ")", "%", "len", "(", "chars", ")", ")", "]", "for", "(", "x", ",", "y", ")", "in", "pairs", ")", ")", "return", "(", "salt", "+", "cipher", ")" ]
given a secret .
train
false
6,728
def test_sd_started_state(name, path=None): qstate = systemd_running_state(name, path=path) if (qstate in ('initializing', 'starting')): return False elif (qstate == ''): return None else: return True
[ "def", "test_sd_started_state", "(", "name", ",", "path", "=", "None", ")", ":", "qstate", "=", "systemd_running_state", "(", "name", ",", "path", "=", "path", ")", "if", "(", "qstate", "in", "(", "'initializing'", ",", "'starting'", ")", ")", ":", "return", "False", "elif", "(", "qstate", "==", "''", ")", ":", "return", "None", "else", ":", "return", "True" ]
test if a systemd container is fully started path path to the container parent default: /var/lib/lxc .
train
true
6,729
def rss_mail(feed, jobs): parm = {'amount': len(jobs), 'feed': feed, 'jobs': jobs} return send_with_template('rss', parm)
[ "def", "rss_mail", "(", "feed", ",", "jobs", ")", ":", "parm", "=", "{", "'amount'", ":", "len", "(", "jobs", ")", ",", "'feed'", ":", "feed", ",", "'jobs'", ":", "jobs", "}", "return", "send_with_template", "(", "'rss'", ",", "parm", ")" ]
send notification email containing list of files .
train
false
6,730
@sync_performer def perform_sudo(dispatcher, intent): return Effect(Run(command=('sudo ' + intent.command), log_command_filter=identity))
[ "@", "sync_performer", "def", "perform_sudo", "(", "dispatcher", ",", "intent", ")", ":", "return", "Effect", "(", "Run", "(", "command", "=", "(", "'sudo '", "+", "intent", ".", "command", ")", ",", "log_command_filter", "=", "identity", ")", ")" ]
default implementation of sudo .
train
false
6,731
def substitute_indices(t, *index_tuples): if (not isinstance(t, TensExpr)): return t free = t.free free1 = [] for (j, ipos, cpos) in free: for (i, v) in index_tuples: if ((i._name == j._name) and (i._tensortype == j._tensortype)): if (i._is_up == j._is_up): free1.append((v, ipos, cpos)) else: free1.append(((- v), ipos, cpos)) break else: free1.append((j, ipos, cpos)) t = TensMul.from_data(t.coeff, t.components, free1, t.dum) return t
[ "def", "substitute_indices", "(", "t", ",", "*", "index_tuples", ")", ":", "if", "(", "not", "isinstance", "(", "t", ",", "TensExpr", ")", ")", ":", "return", "t", "free", "=", "t", ".", "free", "free1", "=", "[", "]", "for", "(", "j", ",", "ipos", ",", "cpos", ")", "in", "free", ":", "for", "(", "i", ",", "v", ")", "in", "index_tuples", ":", "if", "(", "(", "i", ".", "_name", "==", "j", ".", "_name", ")", "and", "(", "i", ".", "_tensortype", "==", "j", ".", "_tensortype", ")", ")", ":", "if", "(", "i", ".", "_is_up", "==", "j", ".", "_is_up", ")", ":", "free1", ".", "append", "(", "(", "v", ",", "ipos", ",", "cpos", ")", ")", "else", ":", "free1", ".", "append", "(", "(", "(", "-", "v", ")", ",", "ipos", ",", "cpos", ")", ")", "break", "else", ":", "free1", ".", "append", "(", "(", "j", ",", "ipos", ",", "cpos", ")", ")", "t", "=", "TensMul", ".", "from_data", "(", "t", ".", "coeff", ",", "t", ".", "components", ",", "free1", ",", "t", ".", "dum", ")", "return", "t" ]
return a tensor with free indices substituted according to index_tuples index_types list of tuples note: this method will neither raise or lower the indices .
train
false
6,732
def parsePEMKey(s, private=False, public=False, passwordCallback=None, implementations=['openssl', 'python']): for implementation in implementations: if ((implementation == 'openssl') and cryptomath.m2cryptoLoaded): key = OpenSSL_RSAKey.parse(s, passwordCallback) break elif (implementation == 'python'): key = Python_RSAKey.parsePEM(s) break else: raise ValueError('No acceptable implementations') return _parseKeyHelper(key, private, public)
[ "def", "parsePEMKey", "(", "s", ",", "private", "=", "False", ",", "public", "=", "False", ",", "passwordCallback", "=", "None", ",", "implementations", "=", "[", "'openssl'", ",", "'python'", "]", ")", ":", "for", "implementation", "in", "implementations", ":", "if", "(", "(", "implementation", "==", "'openssl'", ")", "and", "cryptomath", ".", "m2cryptoLoaded", ")", ":", "key", "=", "OpenSSL_RSAKey", ".", "parse", "(", "s", ",", "passwordCallback", ")", "break", "elif", "(", "implementation", "==", "'python'", ")", ":", "key", "=", "Python_RSAKey", ".", "parsePEM", "(", "s", ")", "break", "else", ":", "raise", "ValueError", "(", "'No acceptable implementations'", ")", "return", "_parseKeyHelper", "(", "key", ",", "private", ",", "public", ")" ]
parse a pem-format key .
train
false
6,734
def IndexToKey(index): props = [] if (index.properties is not None): for prop in index.properties: if (prop.direction == 'asc'): direction = ASCENDING else: direction = DESCENDING props.append((prop.name, direction)) return (index.kind, index.ancestor, tuple(props))
[ "def", "IndexToKey", "(", "index", ")", ":", "props", "=", "[", "]", "if", "(", "index", ".", "properties", "is", "not", "None", ")", ":", "for", "prop", "in", "index", ".", "properties", ":", "if", "(", "prop", ".", "direction", "==", "'asc'", ")", ":", "direction", "=", "ASCENDING", "else", ":", "direction", "=", "DESCENDING", "props", ".", "append", "(", "(", "prop", ".", "name", ",", "direction", ")", ")", "return", "(", "index", ".", "kind", ",", "index", ".", "ancestor", ",", "tuple", "(", "props", ")", ")" ]
convert index to key .
train
false
6,739
def make_comparison_tests(klass, kwargs1, kwargs2): def instance(kwargs): if isinstance(kwargs, dict): return klass(**kwargs) return klass(**kwargs()) class Tests(TestCase, ): def test_equality(self): '\n Instances with the same arguments are equal.\n ' self.assertTrue((instance(kwargs1) == instance(kwargs1))) self.assertFalse((instance(kwargs1) == instance(kwargs2))) def test_notequality(self): '\n Instance with different arguments are not equal.\n ' self.assertTrue((instance(kwargs1) != instance(kwargs2))) self.assertFalse((instance(kwargs1) != instance(kwargs1))) Tests.__name__ = (klass.__name__ + 'ComparisonTests') return Tests
[ "def", "make_comparison_tests", "(", "klass", ",", "kwargs1", ",", "kwargs2", ")", ":", "def", "instance", "(", "kwargs", ")", ":", "if", "isinstance", "(", "kwargs", ",", "dict", ")", ":", "return", "klass", "(", "**", "kwargs", ")", "return", "klass", "(", "**", "kwargs", "(", ")", ")", "class", "Tests", "(", "TestCase", ",", ")", ":", "def", "test_equality", "(", "self", ")", ":", "self", ".", "assertTrue", "(", "(", "instance", "(", "kwargs1", ")", "==", "instance", "(", "kwargs1", ")", ")", ")", "self", ".", "assertFalse", "(", "(", "instance", "(", "kwargs1", ")", "==", "instance", "(", "kwargs2", ")", ")", ")", "def", "test_notequality", "(", "self", ")", ":", "self", ".", "assertTrue", "(", "(", "instance", "(", "kwargs1", ")", "!=", "instance", "(", "kwargs2", ")", ")", ")", "self", ".", "assertFalse", "(", "(", "instance", "(", "kwargs1", ")", "!=", "instance", "(", "kwargs1", ")", ")", ")", "Tests", ".", "__name__", "=", "(", "klass", ".", "__name__", "+", "'ComparisonTests'", ")", "return", "Tests" ]
create tests to verify a class provides standard == and != behavior .
train
false
6,740
def _all_block_types(descriptors, aside_types): block_types = set() for descriptor in descriptors: block_types.add(BlockTypeKeyV1(descriptor.entry_point, descriptor.scope_ids.block_type)) for aside_type in aside_types: block_types.add(BlockTypeKeyV1(XBlockAside.entry_point, aside_type)) return block_types
[ "def", "_all_block_types", "(", "descriptors", ",", "aside_types", ")", ":", "block_types", "=", "set", "(", ")", "for", "descriptor", "in", "descriptors", ":", "block_types", ".", "add", "(", "BlockTypeKeyV1", "(", "descriptor", ".", "entry_point", ",", "descriptor", ".", "scope_ids", ".", "block_type", ")", ")", "for", "aside_type", "in", "aside_types", ":", "block_types", ".", "add", "(", "BlockTypeKeyV1", "(", "XBlockAside", ".", "entry_point", ",", "aside_type", ")", ")", "return", "block_types" ]
return a set of all block_types for the supplied descriptors and for the asides types in aside_types associated with those descriptors .
train
false
6,741
def check_dns_name(bucket_name): if ('.' in bucket_name): return False n = len(bucket_name) if ((n < 3) or (n > 63)): return False if (n == 1): if (not bucket_name.isalnum()): return False match = LABEL_RE.match(bucket_name) if ((match is None) or (match.end() != len(bucket_name))): return False return True
[ "def", "check_dns_name", "(", "bucket_name", ")", ":", "if", "(", "'.'", "in", "bucket_name", ")", ":", "return", "False", "n", "=", "len", "(", "bucket_name", ")", "if", "(", "(", "n", "<", "3", ")", "or", "(", "n", ">", "63", ")", ")", ":", "return", "False", "if", "(", "n", "==", "1", ")", ":", "if", "(", "not", "bucket_name", ".", "isalnum", "(", ")", ")", ":", "return", "False", "match", "=", "LABEL_RE", ".", "match", "(", "bucket_name", ")", "if", "(", "(", "match", "is", "None", ")", "or", "(", "match", ".", "end", "(", ")", "!=", "len", "(", "bucket_name", ")", ")", ")", ":", "return", "False", "return", "True" ]
check to see if the bucket_name complies with the restricted dns naming conventions necessary to allow access via virtual-hosting style .
train
false
6,742
def caller_name(skip=2): stack = inspect.stack() start = (0 + skip) if (len(stack) < (start + 1)): return '' parentframe = stack[start][0] name = [] module = inspect.getmodule(parentframe) if module: name.append(module.__name__) if ('self' in parentframe.f_locals): name.append(parentframe.f_locals['self'].__class__.__name__) codename = parentframe.f_code.co_name if (codename != '<module>'): name.append(codename) del parentframe return '.'.join(name)
[ "def", "caller_name", "(", "skip", "=", "2", ")", ":", "stack", "=", "inspect", ".", "stack", "(", ")", "start", "=", "(", "0", "+", "skip", ")", "if", "(", "len", "(", "stack", ")", "<", "(", "start", "+", "1", ")", ")", ":", "return", "''", "parentframe", "=", "stack", "[", "start", "]", "[", "0", "]", "name", "=", "[", "]", "module", "=", "inspect", ".", "getmodule", "(", "parentframe", ")", "if", "module", ":", "name", ".", "append", "(", "module", ".", "__name__", ")", "if", "(", "'self'", "in", "parentframe", ".", "f_locals", ")", ":", "name", ".", "append", "(", "parentframe", ".", "f_locals", "[", "'self'", "]", ".", "__class__", ".", "__name__", ")", "codename", "=", "parentframe", ".", "f_code", ".", "co_name", "if", "(", "codename", "!=", "'<module>'", ")", ":", "name", ".", "append", "(", "codename", ")", "del", "parentframe", "return", "'.'", ".", "join", "(", "name", ")" ]
get name of a caller in the format module .
train
true
6,743
def init_modules(machinery=True): log.debug('Importing modules...') import modules.auxiliary import_package(modules.auxiliary) import modules.processing import_package(modules.processing) import modules.signatures import_package(modules.signatures) delete_file('modules', 'reporting', 'maec40.pyc') delete_file('modules', 'reporting', 'maec41.pyc') delete_file('modules', 'reporting', 'mmdef.pyc') import modules.reporting import_package(modules.reporting) if machinery: import_plugin(('modules.machinery.' + Config().cuckoo.machinery)) for (category, entries) in list_plugins().items(): log.debug('Imported "%s" modules:', category) for entry in entries: if (entry == entries[(-1)]): log.debug(' DCTB `-- %s', entry.__name__) else: log.debug(' DCTB |-- %s', entry.__name__)
[ "def", "init_modules", "(", "machinery", "=", "True", ")", ":", "log", ".", "debug", "(", "'Importing modules...'", ")", "import", "modules", ".", "auxiliary", "import_package", "(", "modules", ".", "auxiliary", ")", "import", "modules", ".", "processing", "import_package", "(", "modules", ".", "processing", ")", "import", "modules", ".", "signatures", "import_package", "(", "modules", ".", "signatures", ")", "delete_file", "(", "'modules'", ",", "'reporting'", ",", "'maec40.pyc'", ")", "delete_file", "(", "'modules'", ",", "'reporting'", ",", "'maec41.pyc'", ")", "delete_file", "(", "'modules'", ",", "'reporting'", ",", "'mmdef.pyc'", ")", "import", "modules", ".", "reporting", "import_package", "(", "modules", ".", "reporting", ")", "if", "machinery", ":", "import_plugin", "(", "(", "'modules.machinery.'", "+", "Config", "(", ")", ".", "cuckoo", ".", "machinery", ")", ")", "for", "(", "category", ",", "entries", ")", "in", "list_plugins", "(", ")", ".", "items", "(", ")", ":", "log", ".", "debug", "(", "'Imported \"%s\" modules:'", ",", "category", ")", "for", "entry", "in", "entries", ":", "if", "(", "entry", "==", "entries", "[", "(", "-", "1", ")", "]", ")", ":", "log", ".", "debug", "(", "' DCTB `-- %s'", ",", "entry", ".", "__name__", ")", "else", ":", "log", ".", "debug", "(", "' DCTB |-- %s'", ",", "entry", ".", "__name__", ")" ]
initializes plugins .
train
false