id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
14,202
def qt_menu_nib_dir(namespace): if (namespace not in ['PyQt4', 'PyQt5', 'PySide']): raise Exception('Invalid namespace: {0}'.format(namespace)) menu_dir = None path = exec_statement("\n from {0}.QtCore import QLibraryInfo\n path = QLibraryInfo.location(QLibraryInfo.LibrariesPath)\n str = getattr(__builtins__, 'unicode', str) # for Python 2\n print(str(path))\n ".format(namespace)) anaconda_path = os.path.join(sys.exec_prefix, 'python.app', 'Contents', 'Resources') paths = [os.path.join(path, 'Resources'), os.path.join(path, 'QtGui.framework', 'Resources'), anaconda_path] for location in paths: path = os.path.join(location, 'qt_menu.nib') if os.path.exists(path): menu_dir = path logger.debug('Found qt_menu.nib for {0} at {1}'.format(namespace, path)) break if (not menu_dir): raise Exception('\n Cannot find qt_menu.nib for {0}\n Path checked: {1}\n '.format(namespace, ', '.join(paths))) return menu_dir
[ "def", "qt_menu_nib_dir", "(", "namespace", ")", ":", "if", "(", "namespace", "not", "in", "[", "'PyQt4'", ",", "'PyQt5'", ",", "'PySide'", "]", ")", ":", "raise", "Exception", "(", "'Invalid namespace: {0}'", ".", "format", "(", "namespace", ")", ")", "menu_dir", "=", "None", "path", "=", "exec_statement", "(", "\"\\n from {0}.QtCore import QLibraryInfo\\n path = QLibraryInfo.location(QLibraryInfo.LibrariesPath)\\n str = getattr(__builtins__, 'unicode', str) # for Python 2\\n print(str(path))\\n \"", ".", "format", "(", "namespace", ")", ")", "anaconda_path", "=", "os", ".", "path", ".", "join", "(", "sys", ".", "exec_prefix", ",", "'python.app'", ",", "'Contents'", ",", "'Resources'", ")", "paths", "=", "[", "os", ".", "path", ".", "join", "(", "path", ",", "'Resources'", ")", ",", "os", ".", "path", ".", "join", "(", "path", ",", "'QtGui.framework'", ",", "'Resources'", ")", ",", "anaconda_path", "]", "for", "location", "in", "paths", ":", "path", "=", "os", ".", "path", ".", "join", "(", "location", ",", "'qt_menu.nib'", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "menu_dir", "=", "path", "logger", ".", "debug", "(", "'Found qt_menu.nib for {0} at {1}'", ".", "format", "(", "namespace", ",", "path", ")", ")", "break", "if", "(", "not", "menu_dir", ")", ":", "raise", "Exception", "(", "'\\n Cannot find qt_menu.nib for {0}\\n Path checked: {1}\\n '", ".", "format", "(", "namespace", ",", "', '", ".", "join", "(", "paths", ")", ")", ")", "return", "menu_dir" ]
return path to qt resource dir qt_menu .
train
false
14,204
def _transcripts_logic(transcripts_presence, videos): command = None subs = '' if (transcripts_presence['youtube_diff'] and transcripts_presence['youtube_local'] and transcripts_presence['youtube_server']): command = 'replace' subs = videos['youtube'] elif transcripts_presence['youtube_local']: command = 'found' subs = videos['youtube'] elif transcripts_presence['youtube_server']: command = 'import' elif transcripts_presence['html5_local']: if ((len(transcripts_presence['html5_local']) == 1) or transcripts_presence['html5_equal']): command = 'found' subs = transcripts_presence['html5_local'][0] else: command = 'choose' subs = transcripts_presence['html5_local'][0] elif (transcripts_presence['current_item_subs'] and (not transcripts_presence['is_youtube_mode'])): log.debug('Command is use existing %s subs', transcripts_presence['current_item_subs']) command = 'use_existing' else: command = 'not_found' log.debug('Resulted command: %s, current transcripts: %s, youtube mode: %s', command, transcripts_presence['current_item_subs'], transcripts_presence['is_youtube_mode']) return (command, subs)
[ "def", "_transcripts_logic", "(", "transcripts_presence", ",", "videos", ")", ":", "command", "=", "None", "subs", "=", "''", "if", "(", "transcripts_presence", "[", "'youtube_diff'", "]", "and", "transcripts_presence", "[", "'youtube_local'", "]", "and", "transcripts_presence", "[", "'youtube_server'", "]", ")", ":", "command", "=", "'replace'", "subs", "=", "videos", "[", "'youtube'", "]", "elif", "transcripts_presence", "[", "'youtube_local'", "]", ":", "command", "=", "'found'", "subs", "=", "videos", "[", "'youtube'", "]", "elif", "transcripts_presence", "[", "'youtube_server'", "]", ":", "command", "=", "'import'", "elif", "transcripts_presence", "[", "'html5_local'", "]", ":", "if", "(", "(", "len", "(", "transcripts_presence", "[", "'html5_local'", "]", ")", "==", "1", ")", "or", "transcripts_presence", "[", "'html5_equal'", "]", ")", ":", "command", "=", "'found'", "subs", "=", "transcripts_presence", "[", "'html5_local'", "]", "[", "0", "]", "else", ":", "command", "=", "'choose'", "subs", "=", "transcripts_presence", "[", "'html5_local'", "]", "[", "0", "]", "elif", "(", "transcripts_presence", "[", "'current_item_subs'", "]", "and", "(", "not", "transcripts_presence", "[", "'is_youtube_mode'", "]", ")", ")", ":", "log", ".", "debug", "(", "'Command is use existing %s subs'", ",", "transcripts_presence", "[", "'current_item_subs'", "]", ")", "command", "=", "'use_existing'", "else", ":", "command", "=", "'not_found'", "log", ".", "debug", "(", "'Resulted command: %s, current transcripts: %s, youtube mode: %s'", ",", "command", ",", "transcripts_presence", "[", "'current_item_subs'", "]", ",", "transcripts_presence", "[", "'is_youtube_mode'", "]", ")", "return", "(", "command", ",", "subs", ")" ]
by transcripts_presence content .
train
false
14,208
def plugin_schemas(**kwargs): return {u'type': u'object', u'properties': dict(((p.name, {u'$ref': p.schema[u'id']}) for p in get_plugins(**kwargs))), u'additionalProperties': False, u'error_additionalProperties': u'{{message}} Only known plugin names are valid keys.', u'patternProperties': {u'^_': {u'title': u'Disabled Plugin'}}}
[ "def", "plugin_schemas", "(", "**", "kwargs", ")", ":", "return", "{", "u'type'", ":", "u'object'", ",", "u'properties'", ":", "dict", "(", "(", "(", "p", ".", "name", ",", "{", "u'$ref'", ":", "p", ".", "schema", "[", "u'id'", "]", "}", ")", "for", "p", "in", "get_plugins", "(", "**", "kwargs", ")", ")", ")", ",", "u'additionalProperties'", ":", "False", ",", "u'error_additionalProperties'", ":", "u'{{message}} Only known plugin names are valid keys.'", ",", "u'patternProperties'", ":", "{", "u'^_'", ":", "{", "u'title'", ":", "u'Disabled Plugin'", "}", "}", "}" ]
create a dict schema that matches plugins specified by kwargs .
train
false
14,209
def _explain_decl_node(decl_node): storage = ((' '.join(decl_node.storage) + ' ') if decl_node.storage else '') return (((decl_node.name + ' is a ') + storage) + _explain_type(decl_node.type))
[ "def", "_explain_decl_node", "(", "decl_node", ")", ":", "storage", "=", "(", "(", "' '", ".", "join", "(", "decl_node", ".", "storage", ")", "+", "' '", ")", "if", "decl_node", ".", "storage", "else", "''", ")", "return", "(", "(", "(", "decl_node", ".", "name", "+", "' is a '", ")", "+", "storage", ")", "+", "_explain_type", "(", "decl_node", ".", "type", ")", ")" ]
receives a c_ast .
train
false
14,211
def get_unique_id(): unique_id = ''.join((random.choice((string.ascii_lowercase + string.digits)) for _ in range(CACHE_SLUG_PROD_LENGTH))) return unique_id
[ "def", "get_unique_id", "(", ")", ":", "unique_id", "=", "''", ".", "join", "(", "(", "random", ".", "choice", "(", "(", "string", ".", "ascii_lowercase", "+", "string", ".", "digits", ")", ")", "for", "_", "in", "range", "(", "CACHE_SLUG_PROD_LENGTH", ")", ")", ")", "return", "unique_id" ]
returns a unique id .
train
false
14,212
def matching_subdomains(new_value, old_value): if ((new_value is None) and (old_value is not None)): return False if (new_value.lower() == old_value.lower()): return True new_domain = naked_domain(new_value) old_domain = naked_domain(old_value) if (new_domain == old_domain): return True new_parent_domain = parent_domain(new_value) old_parent_domain = parent_domain(old_value) if (old_parent_domain is None): log.error('old_parent_domain is None', old_value=old_value, new_value=new_value) return False if (new_parent_domain is None): log.error('new_parent_domain is None', old_value=old_value, new_value=new_value) return False if (new_parent_domain != old_parent_domain): log.error("Domains aren't matching", new_value=new_value, old_value=old_value) return False new_ip = resolve_hostname(new_value) old_ip = resolve_hostname(old_value) if ((new_ip is None) or (old_ip is None) or (new_ip != old_ip)): log.error("IP addresses aren't matching", new_value=new_value, old_Value=old_value) return False return True
[ "def", "matching_subdomains", "(", "new_value", ",", "old_value", ")", ":", "if", "(", "(", "new_value", "is", "None", ")", "and", "(", "old_value", "is", "not", "None", ")", ")", ":", "return", "False", "if", "(", "new_value", ".", "lower", "(", ")", "==", "old_value", ".", "lower", "(", ")", ")", ":", "return", "True", "new_domain", "=", "naked_domain", "(", "new_value", ")", "old_domain", "=", "naked_domain", "(", "old_value", ")", "if", "(", "new_domain", "==", "old_domain", ")", ":", "return", "True", "new_parent_domain", "=", "parent_domain", "(", "new_value", ")", "old_parent_domain", "=", "parent_domain", "(", "old_value", ")", "if", "(", "old_parent_domain", "is", "None", ")", ":", "log", ".", "error", "(", "'old_parent_domain is None'", ",", "old_value", "=", "old_value", ",", "new_value", "=", "new_value", ")", "return", "False", "if", "(", "new_parent_domain", "is", "None", ")", ":", "log", ".", "error", "(", "'new_parent_domain is None'", ",", "old_value", "=", "old_value", ",", "new_value", "=", "new_value", ")", "return", "False", "if", "(", "new_parent_domain", "!=", "old_parent_domain", ")", ":", "log", ".", "error", "(", "\"Domains aren't matching\"", ",", "new_value", "=", "new_value", ",", "old_value", "=", "old_value", ")", "return", "False", "new_ip", "=", "resolve_hostname", "(", "new_value", ")", "old_ip", "=", "resolve_hostname", "(", "old_value", ")", "if", "(", "(", "new_ip", "is", "None", ")", "or", "(", "old_ip", "is", "None", ")", "or", "(", "new_ip", "!=", "old_ip", ")", ")", ":", "log", ".", "error", "(", "\"IP addresses aren't matching\"", ",", "new_value", "=", "new_value", ",", "old_Value", "=", "old_value", ")", "return", "False", "return", "True" ]
we allow our customers to update their server addresses .
train
false
14,214
def task_open_control_firewall(distribution): if is_centos_or_rhel(distribution): open_firewall = open_firewalld elif is_ubuntu(distribution): open_firewall = open_ufw else: raise DistributionNotSupported(distribution=distribution) return sequence([open_firewall(service) for service in ['flocker-control-api', 'flocker-control-agent']])
[ "def", "task_open_control_firewall", "(", "distribution", ")", ":", "if", "is_centos_or_rhel", "(", "distribution", ")", ":", "open_firewall", "=", "open_firewalld", "elif", "is_ubuntu", "(", "distribution", ")", ":", "open_firewall", "=", "open_ufw", "else", ":", "raise", "DistributionNotSupported", "(", "distribution", "=", "distribution", ")", "return", "sequence", "(", "[", "open_firewall", "(", "service", ")", "for", "service", "in", "[", "'flocker-control-api'", ",", "'flocker-control-agent'", "]", "]", ")" ]
open the firewall for flocker-control .
train
false
14,215
def ensure_directory_exists(dirname, context=None): real_dirname = dirname if context: real_dirname = realpath_with_context(dirname, context) if (not os.path.exists(real_dirname)): mas_iterations = 2 if sys.platform.startswith('win'): mas_iterations = 10 ensure_makedirs(real_dirname, mas_iterations) assert os.path.exists(real_dirname), ('ENSURE dir exists: %s' % dirname) assert os.path.isdir(real_dirname), ('ENSURE isa dir: %s' % dirname)
[ "def", "ensure_directory_exists", "(", "dirname", ",", "context", "=", "None", ")", ":", "real_dirname", "=", "dirname", "if", "context", ":", "real_dirname", "=", "realpath_with_context", "(", "dirname", ",", "context", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "real_dirname", ")", ")", ":", "mas_iterations", "=", "2", "if", "sys", ".", "platform", ".", "startswith", "(", "'win'", ")", ":", "mas_iterations", "=", "10", "ensure_makedirs", "(", "real_dirname", ",", "mas_iterations", ")", "assert", "os", ".", "path", ".", "exists", "(", "real_dirname", ")", ",", "(", "'ENSURE dir exists: %s'", "%", "dirname", ")", "assert", "os", ".", "path", ".", "isdir", "(", "real_dirname", ")", ",", "(", "'ENSURE isa dir: %s'", "%", "dirname", ")" ]
creates the given directory if it does not already exist .
train
false
14,217
def dup_div(f, g, K): if K.has_Field: return dup_ff_div(f, g, K) else: return dup_rr_div(f, g, K)
[ "def", "dup_div", "(", "f", ",", "g", ",", "K", ")", ":", "if", "K", ".", "has_Field", ":", "return", "dup_ff_div", "(", "f", ",", "g", ",", "K", ")", "else", ":", "return", "dup_rr_div", "(", "f", ",", "g", ",", "K", ")" ]
polynomial division with remainder in k[x] .
train
false
14,218
def AFTER_SETUP(): admin.command('enablesharding', ShardMONGODB_DB) for (collection, keystr) in COLLECTION_KEYS.iteritems(): key = SON(((k, 1) for k in keystr.split(','))) admin.command('shardcollection', ((ShardMONGODB_DB + '.') + collection), key=key) admin.command('shardcollection', (((ShardMONGODB_DB + '.') + GridFs_Collection) + '.files'), key={'_id': 1}) admin.command('shardcollection', (((ShardMONGODB_DB + '.') + GridFs_Collection) + '.chunks'), key={'files_id': 1}) for (k, v) in INDEX.items(): for (key, kwargs) in v.items(): conn[ShardMONGODB_DB][k].ensure_index((list(key) if (type(key) == types.TupleType) else key), **kwargs)
[ "def", "AFTER_SETUP", "(", ")", ":", "admin", ".", "command", "(", "'enablesharding'", ",", "ShardMONGODB_DB", ")", "for", "(", "collection", ",", "keystr", ")", "in", "COLLECTION_KEYS", ".", "iteritems", "(", ")", ":", "key", "=", "SON", "(", "(", "(", "k", ",", "1", ")", "for", "k", "in", "keystr", ".", "split", "(", "','", ")", ")", ")", "admin", ".", "command", "(", "'shardcollection'", ",", "(", "(", "ShardMONGODB_DB", "+", "'.'", ")", "+", "collection", ")", ",", "key", "=", "key", ")", "admin", ".", "command", "(", "'shardcollection'", ",", "(", "(", "(", "ShardMONGODB_DB", "+", "'.'", ")", "+", "GridFs_Collection", ")", "+", "'.files'", ")", ",", "key", "=", "{", "'_id'", ":", "1", "}", ")", "admin", ".", "command", "(", "'shardcollection'", ",", "(", "(", "(", "ShardMONGODB_DB", "+", "'.'", ")", "+", "GridFs_Collection", ")", "+", "'.chunks'", ")", ",", "key", "=", "{", "'files_id'", ":", "1", "}", ")", "for", "(", "k", ",", "v", ")", "in", "INDEX", ".", "items", "(", ")", ":", "for", "(", "key", ",", "kwargs", ")", "in", "v", ".", "items", "(", ")", ":", "conn", "[", "ShardMONGODB_DB", "]", "[", "k", "]", ".", "ensure_index", "(", "(", "list", "(", "key", ")", "if", "(", "type", "(", "key", ")", "==", "types", ".", "TupleType", ")", "else", "key", ")", ",", "**", "kwargs", ")" ]
make index and shard keys .
train
false
14,219
def column_mapped_collection(mapping_spec): cols = [expression._only_column_elements(q, 'mapping_spec') for q in util.to_list(mapping_spec)] keyfunc = _PlainColumnGetter(cols) return (lambda : MappedCollection(keyfunc))
[ "def", "column_mapped_collection", "(", "mapping_spec", ")", ":", "cols", "=", "[", "expression", ".", "_only_column_elements", "(", "q", ",", "'mapping_spec'", ")", "for", "q", "in", "util", ".", "to_list", "(", "mapping_spec", ")", "]", "keyfunc", "=", "_PlainColumnGetter", "(", "cols", ")", "return", "(", "lambda", ":", "MappedCollection", "(", "keyfunc", ")", ")" ]
a dictionary-based collection type with column-based keying .
train
false
14,220
def upgrade_tools_all(call=None): if (call != 'function'): raise SaltCloudSystemExit('The upgrade_tools_all function must be called with -f or --function.') ret = {} vm_properties = ['name'] vm_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.VirtualMachine, vm_properties) for vm in vm_list: ret[vm['name']] = _upg_tools_helper(vm['object']) return ret
[ "def", "upgrade_tools_all", "(", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The upgrade_tools_all function must be called with -f or --function.'", ")", "ret", "=", "{", "}", "vm_properties", "=", "[", "'name'", "]", "vm_list", "=", "salt", ".", "utils", ".", "vmware", ".", "get_mors_with_properties", "(", "_get_si", "(", ")", ",", "vim", ".", "VirtualMachine", ",", "vm_properties", ")", "for", "vm", "in", "vm_list", ":", "ret", "[", "vm", "[", "'name'", "]", "]", "=", "_upg_tools_helper", "(", "vm", "[", "'object'", "]", ")", "return", "ret" ]
to upgrade vmware tools on all virtual machines present in the specified provider .
train
true
14,221
def delete_orphaned_thumbs(): documents_path = os.path.join(settings.MEDIA_ROOT, 'thumbs') for filename in os.listdir(documents_path): fn = os.path.join(documents_path, filename) model = filename.split('-')[0] uuid = filename.replace(model, '').replace('-thumb.png', '')[1:] if (ResourceBase.objects.filter(uuid=uuid).count() == 0): print ('Removing orphan thumb %s' % fn) try: os.remove(fn) except OSError: print ('Could not delete file %s' % fn)
[ "def", "delete_orphaned_thumbs", "(", ")", ":", "documents_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "'thumbs'", ")", "for", "filename", "in", "os", ".", "listdir", "(", "documents_path", ")", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "documents_path", ",", "filename", ")", "model", "=", "filename", ".", "split", "(", "'-'", ")", "[", "0", "]", "uuid", "=", "filename", ".", "replace", "(", "model", ",", "''", ")", ".", "replace", "(", "'-thumb.png'", ",", "''", ")", "[", "1", ":", "]", "if", "(", "ResourceBase", ".", "objects", ".", "filter", "(", "uuid", "=", "uuid", ")", ".", "count", "(", ")", "==", "0", ")", ":", "print", "(", "'Removing orphan thumb %s'", "%", "fn", ")", "try", ":", "os", ".", "remove", "(", "fn", ")", "except", "OSError", ":", "print", "(", "'Could not delete file %s'", "%", "fn", ")" ]
deletes orphaned thumbnails .
train
false
14,223
def NATR(barDs, count, timeperiod=(- (2 ** 31))): return call_talib_with_hlc(barDs, count, talib.NATR, timeperiod)
[ "def", "NATR", "(", "barDs", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_hlc", "(", "barDs", ",", "count", ",", "talib", ".", "NATR", ",", "timeperiod", ")" ]
normalized average true range .
train
false
14,225
def disable_password_auth(sshd_config='/etc/ssh/sshd_config'): _update_ssh_setting(sshd_config, 'PasswordAuthentication', 'no')
[ "def", "disable_password_auth", "(", "sshd_config", "=", "'/etc/ssh/sshd_config'", ")", ":", "_update_ssh_setting", "(", "sshd_config", ",", "'PasswordAuthentication'", ",", "'no'", ")" ]
do not allow users to use passwords to login via ssh .
train
false
14,228
def export_set(dataset): return yaml.safe_dump(dataset._package(ordered=False))
[ "def", "export_set", "(", "dataset", ")", ":", "return", "yaml", ".", "safe_dump", "(", "dataset", ".", "_package", "(", "ordered", "=", "False", ")", ")" ]
html representation of a dataset .
train
false
14,229
def allow_port(port, proto='tcp', direction='both'): ports = get_ports(proto=proto, direction=direction) direction = direction.upper() _validate_direction_and_proto(direction, proto) directions = build_directions(direction) results = [] for direction in directions: _ports = ports[direction] _ports.append(port) results += allow_ports(_ports, proto=proto, direction=direction) return results
[ "def", "allow_port", "(", "port", ",", "proto", "=", "'tcp'", ",", "direction", "=", "'both'", ")", ":", "ports", "=", "get_ports", "(", "proto", "=", "proto", ",", "direction", "=", "direction", ")", "direction", "=", "direction", ".", "upper", "(", ")", "_validate_direction_and_proto", "(", "direction", ",", "proto", ")", "directions", "=", "build_directions", "(", "direction", ")", "results", "=", "[", "]", "for", "direction", "in", "directions", ":", "_ports", "=", "ports", "[", "direction", "]", "_ports", ".", "append", "(", "port", ")", "results", "+=", "allow_ports", "(", "_ports", ",", "proto", "=", "proto", ",", "direction", "=", "direction", ")", "return", "results" ]
like allow_ports .
train
true
14,231
@gen.engine def SetupAndDispatch(callback): EmailManager.SetInstance(SendGridEmailManager()) ThrottleUsage() client = db_client.DBClient.Instance() (yield gen.Task(Dispatch, client)) callback()
[ "@", "gen", ".", "engine", "def", "SetupAndDispatch", "(", "callback", ")", ":", "EmailManager", ".", "SetInstance", "(", "SendGridEmailManager", "(", ")", ")", "ThrottleUsage", "(", ")", "client", "=", "db_client", ".", "DBClient", ".", "Instance", "(", ")", "(", "yield", "gen", ".", "Task", "(", "Dispatch", ",", "client", ")", ")", "callback", "(", ")" ]
sets the environment and dispatches according to command-line options .
train
false
14,233
def _integer_basis(poly): (monoms, coeffs) = list(zip(*poly.terms())) (monoms,) = list(zip(*monoms)) coeffs = list(map(abs, coeffs)) if (coeffs[0] < coeffs[(-1)]): coeffs = list(reversed(coeffs)) n = monoms[0] monoms = [(n - i) for i in reversed(monoms)] else: return None monoms = monoms[:(-1)] coeffs = coeffs[:(-1)] divs = reversed(divisors(gcd_list(coeffs))[1:]) try: div = next(divs) except StopIteration: return None while True: for (monom, coeff) in zip(monoms, coeffs): if ((coeff % (div ** monom)) != 0): try: div = next(divs) except StopIteration: return None else: break else: return div
[ "def", "_integer_basis", "(", "poly", ")", ":", "(", "monoms", ",", "coeffs", ")", "=", "list", "(", "zip", "(", "*", "poly", ".", "terms", "(", ")", ")", ")", "(", "monoms", ",", ")", "=", "list", "(", "zip", "(", "*", "monoms", ")", ")", "coeffs", "=", "list", "(", "map", "(", "abs", ",", "coeffs", ")", ")", "if", "(", "coeffs", "[", "0", "]", "<", "coeffs", "[", "(", "-", "1", ")", "]", ")", ":", "coeffs", "=", "list", "(", "reversed", "(", "coeffs", ")", ")", "n", "=", "monoms", "[", "0", "]", "monoms", "=", "[", "(", "n", "-", "i", ")", "for", "i", "in", "reversed", "(", "monoms", ")", "]", "else", ":", "return", "None", "monoms", "=", "monoms", "[", ":", "(", "-", "1", ")", "]", "coeffs", "=", "coeffs", "[", ":", "(", "-", "1", ")", "]", "divs", "=", "reversed", "(", "divisors", "(", "gcd_list", "(", "coeffs", ")", ")", "[", "1", ":", "]", ")", "try", ":", "div", "=", "next", "(", "divs", ")", "except", "StopIteration", ":", "return", "None", "while", "True", ":", "for", "(", "monom", ",", "coeff", ")", "in", "zip", "(", "monoms", ",", "coeffs", ")", ":", "if", "(", "(", "coeff", "%", "(", "div", "**", "monom", ")", ")", "!=", "0", ")", ":", "try", ":", "div", "=", "next", "(", "divs", ")", "except", "StopIteration", ":", "return", "None", "else", ":", "break", "else", ":", "return", "div" ]
compute coefficient basis for a polynomial over integers .
train
false
14,236
def node_prereqs_installation(): cb_error_text = 'Subprocess return code: 1' sh('test `npm config get registry` = "{reg}" || (echo setting registry; npm config set registry {reg})'.format(reg=NPM_REGISTRY)) try: sh('npm install') except BuildFailure as error_text: if (cb_error_text in error_text): print 'npm install error detected. Retrying...' sh('npm install') else: raise BuildFailure(error_text)
[ "def", "node_prereqs_installation", "(", ")", ":", "cb_error_text", "=", "'Subprocess return code: 1'", "sh", "(", "'test `npm config get registry` = \"{reg}\" || (echo setting registry; npm config set registry {reg})'", ".", "format", "(", "reg", "=", "NPM_REGISTRY", ")", ")", "try", ":", "sh", "(", "'npm install'", ")", "except", "BuildFailure", "as", "error_text", ":", "if", "(", "cb_error_text", "in", "error_text", ")", ":", "print", "'npm install error detected. Retrying...'", "sh", "(", "'npm install'", ")", "else", ":", "raise", "BuildFailure", "(", "error_text", ")" ]
configures npm and installs node prerequisites .
train
false
14,237
def list_templates(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The list_templates function must be called with -f or --function.') return {'Templates': avail_images(call='function')}
[ "def", "list_templates", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_templates function must be called with -f or --function.'", ")", "return", "{", "'Templates'", ":", "avail_images", "(", "call", "=", "'function'", ")", "}" ]
lists all templates available to the user and the users groups .
train
false
14,238
def _process_docstring(app, what, name, obj, options, lines): result_lines = lines docstring = None if app.config.napoleon_numpy_docstring: docstring = NumpyDocstring(result_lines, app.config, app, what, name, obj, options) result_lines = docstring.lines() if app.config.napoleon_google_docstring: docstring = GoogleDocstring(result_lines, app.config, app, what, name, obj, options) result_lines = docstring.lines() lines[:] = result_lines[:]
[ "def", "_process_docstring", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ",", "lines", ")", ":", "result_lines", "=", "lines", "docstring", "=", "None", "if", "app", ".", "config", ".", "napoleon_numpy_docstring", ":", "docstring", "=", "NumpyDocstring", "(", "result_lines", ",", "app", ".", "config", ",", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ")", "result_lines", "=", "docstring", ".", "lines", "(", ")", "if", "app", ".", "config", ".", "napoleon_google_docstring", ":", "docstring", "=", "GoogleDocstring", "(", "result_lines", ",", "app", ".", "config", ",", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ")", "result_lines", "=", "docstring", ".", "lines", "(", ")", "lines", "[", ":", "]", "=", "result_lines", "[", ":", "]" ]
process the docstring for a given python object .
train
false
14,240
def _is_exposed(minion): return salt.utils.check_whitelist_blacklist(minion, whitelist=__opts__['minionfs_whitelist'], blacklist=__opts__['minionfs_blacklist'])
[ "def", "_is_exposed", "(", "minion", ")", ":", "return", "salt", ".", "utils", ".", "check_whitelist_blacklist", "(", "minion", ",", "whitelist", "=", "__opts__", "[", "'minionfs_whitelist'", "]", ",", "blacklist", "=", "__opts__", "[", "'minionfs_blacklist'", "]", ")" ]
check if the minion is exposed .
train
true
14,241
def dmp_slice(f, m, n, u, K): return dmp_slice_in(f, m, n, 0, u, K)
[ "def", "dmp_slice", "(", "f", ",", "m", ",", "n", ",", "u", ",", "K", ")", ":", "return", "dmp_slice_in", "(", "f", ",", "m", ",", "n", ",", "0", ",", "u", ",", "K", ")" ]
take a continuous subsequence of terms of f in k[x] .
train
false
14,242
def popd(): from twill import commands where = _dirstack.pop() os.chdir(where) print >>commands.OUT, ('popped back to directory "%s"' % (where,)) commands.setglobal('__dir__', where)
[ "def", "popd", "(", ")", ":", "from", "twill", "import", "commands", "where", "=", "_dirstack", ".", "pop", "(", ")", "os", ".", "chdir", "(", "where", ")", "print", ">>", "commands", ".", "OUT", ",", "(", "'popped back to directory \"%s\"'", "%", "(", "where", ",", ")", ")", "commands", ".", "setglobal", "(", "'__dir__'", ",", "where", ")" ]
should behave like python2 .
train
false
14,243
def count_total_params(layers, layer_set=None): if (layer_set is None): layer_set = set() trainable_count = 0 non_trainable_count = 0 for layer in layers: if (layer in layer_set): continue layer_set.add(layer) if isinstance(layer, (Model, Sequential)): (t, nt) = count_total_params(layer.layers, layer_set) trainable_count += t non_trainable_count += nt else: trainable_count += sum([K.count_params(p) for p in layer.trainable_weights]) non_trainable_count += sum([K.count_params(p) for p in layer.non_trainable_weights]) return (trainable_count, non_trainable_count)
[ "def", "count_total_params", "(", "layers", ",", "layer_set", "=", "None", ")", ":", "if", "(", "layer_set", "is", "None", ")", ":", "layer_set", "=", "set", "(", ")", "trainable_count", "=", "0", "non_trainable_count", "=", "0", "for", "layer", "in", "layers", ":", "if", "(", "layer", "in", "layer_set", ")", ":", "continue", "layer_set", ".", "add", "(", "layer", ")", "if", "isinstance", "(", "layer", ",", "(", "Model", ",", "Sequential", ")", ")", ":", "(", "t", ",", "nt", ")", "=", "count_total_params", "(", "layer", ".", "layers", ",", "layer_set", ")", "trainable_count", "+=", "t", "non_trainable_count", "+=", "nt", "else", ":", "trainable_count", "+=", "sum", "(", "[", "K", ".", "count_params", "(", "p", ")", "for", "p", "in", "layer", ".", "trainable_weights", "]", ")", "non_trainable_count", "+=", "sum", "(", "[", "K", ".", "count_params", "(", "p", ")", "for", "p", "in", "layer", ".", "non_trainable_weights", "]", ")", "return", "(", "trainable_count", ",", "non_trainable_count", ")" ]
counts the number of parameters in a list of layers .
train
false
14,244
def _nbits(n, correction={'0': 4, '1': 3, '2': 2, '3': 2, '4': 1, '5': 1, '6': 1, '7': 1, '8': 0, '9': 0, 'a': 0, 'b': 0, 'c': 0, 'd': 0, 'e': 0, 'f': 0}): if (n < 0): raise ValueError('The argument to _nbits should be nonnegative.') hex_n = ('%x' % n) return ((4 * len(hex_n)) - correction[hex_n[0]])
[ "def", "_nbits", "(", "n", ",", "correction", "=", "{", "'0'", ":", "4", ",", "'1'", ":", "3", ",", "'2'", ":", "2", ",", "'3'", ":", "2", ",", "'4'", ":", "1", ",", "'5'", ":", "1", ",", "'6'", ":", "1", ",", "'7'", ":", "1", ",", "'8'", ":", "0", ",", "'9'", ":", "0", ",", "'a'", ":", "0", ",", "'b'", ":", "0", ",", "'c'", ":", "0", ",", "'d'", ":", "0", ",", "'e'", ":", "0", ",", "'f'", ":", "0", "}", ")", ":", "if", "(", "n", "<", "0", ")", ":", "raise", "ValueError", "(", "'The argument to _nbits should be nonnegative.'", ")", "hex_n", "=", "(", "'%x'", "%", "n", ")", "return", "(", "(", "4", "*", "len", "(", "hex_n", ")", ")", "-", "correction", "[", "hex_n", "[", "0", "]", "]", ")" ]
number of bits in binary representation of the positive integer n .
train
false
14,245
def _swap_settings(new): settings = django.conf.settings old = {} for (key, value) in new.iteritems(): old[key] = getattr(settings, key, None) setattr(settings, key, value) return old
[ "def", "_swap_settings", "(", "new", ")", ":", "settings", "=", "django", ".", "conf", ".", "settings", "old", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "new", ".", "iteritems", "(", ")", ":", "old", "[", "key", "]", "=", "getattr", "(", "settings", ",", "key", ",", "None", ")", "setattr", "(", "settings", ",", "key", ",", "value", ")", "return", "old" ]
swap in selected django settings .
train
false
14,246
def proc_rheader(r): if (r.representation == 'html'): plan = r.record if plan: T = current.T tabs = [(T('Edit Details'), None), (T('Items'), 'plan_item')] rheader_tabs = s3_rheader_tabs(r, tabs) table = r.table rheader = DIV(TABLE(TR(TH(('%s: ' % table.site_id.label)), table.site_id.represent(plan.site_id)), TR(TH(('%s: ' % table.order_date.label)), table.order_date.represent(plan.order_date)), TR(TH(('%s: ' % table.eta.label)), table.eta.represent(plan.eta)), TR(TH(('%s: ' % table.shipping.label)), table.shipping.represent(plan.shipping))), rheader_tabs) return rheader return None
[ "def", "proc_rheader", "(", "r", ")", ":", "if", "(", "r", ".", "representation", "==", "'html'", ")", ":", "plan", "=", "r", ".", "record", "if", "plan", ":", "T", "=", "current", ".", "T", "tabs", "=", "[", "(", "T", "(", "'Edit Details'", ")", ",", "None", ")", ",", "(", "T", "(", "'Items'", ")", ",", "'plan_item'", ")", "]", "rheader_tabs", "=", "s3_rheader_tabs", "(", "r", ",", "tabs", ")", "table", "=", "r", ".", "table", "rheader", "=", "DIV", "(", "TABLE", "(", "TR", "(", "TH", "(", "(", "'%s: '", "%", "table", ".", "site_id", ".", "label", ")", ")", ",", "table", ".", "site_id", ".", "represent", "(", "plan", ".", "site_id", ")", ")", ",", "TR", "(", "TH", "(", "(", "'%s: '", "%", "table", ".", "order_date", ".", "label", ")", ")", ",", "table", ".", "order_date", ".", "represent", "(", "plan", ".", "order_date", ")", ")", ",", "TR", "(", "TH", "(", "(", "'%s: '", "%", "table", ".", "eta", ".", "label", ")", ")", ",", "table", ".", "eta", ".", "represent", "(", "plan", ".", "eta", ")", ")", ",", "TR", "(", "TH", "(", "(", "'%s: '", "%", "table", ".", "shipping", ".", "label", ")", ")", ",", "table", ".", "shipping", ".", "represent", "(", "plan", ".", "shipping", ")", ")", ")", ",", "rheader_tabs", ")", "return", "rheader", "return", "None" ]
resource header for procurements .
train
false
14,247
def test_json(): test_data = BytesIO('{"a": "b"}') assert (hug.input_format.json(test_data) == {'a': 'b'})
[ "def", "test_json", "(", ")", ":", "test_data", "=", "BytesIO", "(", "'{\"a\": \"b\"}'", ")", "assert", "(", "hug", ".", "input_format", ".", "json", "(", "test_data", ")", "==", "{", "'a'", ":", "'b'", "}", ")" ]
test to ensure that the json type correctly handles url encoded json .
train
false
14,248
def parse_dir_entries(decoded_str): doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) entries = list() for (event, node) in doc: if ((event == 'START_ELEMENT') and (node.nodeName == 'entry')): doc.expandNode(node) if (not _get_entry_schedule(node).startswith('delete')): entries.append((node.getAttribute('path'), node.getAttribute('kind'))) return entries[1:]
[ "def", "parse_dir_entries", "(", "decoded_str", ")", ":", "doc", "=", "xml", ".", "dom", ".", "pulldom", ".", "parseString", "(", "_get_xml_data", "(", "decoded_str", ")", ")", "entries", "=", "list", "(", ")", "for", "(", "event", ",", "node", ")", "in", "doc", ":", "if", "(", "(", "event", "==", "'START_ELEMENT'", ")", "and", "(", "node", ".", "nodeName", "==", "'entry'", ")", ")", ":", "doc", ".", "expandNode", "(", "node", ")", "if", "(", "not", "_get_entry_schedule", "(", "node", ")", ".", "startswith", "(", "'delete'", ")", ")", ":", "entries", ".", "append", "(", "(", "node", ".", "getAttribute", "(", "'path'", ")", ",", "node", ".", "getAttribute", "(", "'kind'", ")", ")", ")", "return", "entries", "[", "1", ":", "]" ]
parse the entries from a recursive info xml .
train
false
14,250
def write_weighted_edgelist(G, path, comments='#', delimiter=' ', encoding='utf-8'): write_edgelist(G, path, comments=comments, delimiter=delimiter, data=('weight',), encoding=encoding)
[ "def", "write_weighted_edgelist", "(", "G", ",", "path", ",", "comments", "=", "'#'", ",", "delimiter", "=", "' '", ",", "encoding", "=", "'utf-8'", ")", ":", "write_edgelist", "(", "G", ",", "path", ",", "comments", "=", "comments", ",", "delimiter", "=", "delimiter", ",", "data", "=", "(", "'weight'", ",", ")", ",", "encoding", "=", "encoding", ")" ]
write graph g as a list of edges with numeric weights .
train
false
14,252
def _flip_codons(codon_seq, target_seq): (a, b) = ('', '') for (char1, char2) in zip(codon_seq, target_seq): if (char1 == ' '): a += char1 b += char2 else: a += char2 b += char1 return (a, b)
[ "def", "_flip_codons", "(", "codon_seq", ",", "target_seq", ")", ":", "(", "a", ",", "b", ")", "=", "(", "''", ",", "''", ")", "for", "(", "char1", ",", "char2", ")", "in", "zip", "(", "codon_seq", ",", "target_seq", ")", ":", "if", "(", "char1", "==", "' '", ")", ":", "a", "+=", "char1", "b", "+=", "char2", "else", ":", "a", "+=", "char2", "b", "+=", "char1", "return", "(", "a", ",", "b", ")" ]
flips the codon characters from one seq to another .
train
false
14,253
def try_parse_time(value): if (value is None): return None try: return parse_time(value) except ValueError: return None
[ "def", "try_parse_time", "(", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "None", "try", ":", "return", "parse_time", "(", "value", ")", "except", "ValueError", ":", "return", "None" ]
tries to make a time out of the value .
train
false
14,254
def returns_arg(function): def call_and_assert(arg, context=None): if (context is None): context = {} result = function(arg, context=context) assert (result == arg), 'Should return the argument that was passed to it, unchanged ({arg})'.format(arg=repr(arg)) return result return call_and_assert
[ "def", "returns_arg", "(", "function", ")", ":", "def", "call_and_assert", "(", "arg", ",", "context", "=", "None", ")", ":", "if", "(", "context", "is", "None", ")", ":", "context", "=", "{", "}", "result", "=", "function", "(", "arg", ",", "context", "=", "context", ")", "assert", "(", "result", "==", "arg", ")", ",", "'Should return the argument that was passed to it, unchanged ({arg})'", ".", "format", "(", "arg", "=", "repr", "(", "arg", ")", ")", "return", "result", "return", "call_and_assert" ]
a decorator that tests that the decorated function returns the argument that it is called with .
train
false
14,256
def touched_ae(dst): pardir = os.path.split(dst)[0] if (not pardir): pardir = os.curdir import Finder f = Finder.Finder() f.update(File.FSRef(pardir))
[ "def", "touched_ae", "(", "dst", ")", ":", "pardir", "=", "os", ".", "path", ".", "split", "(", "dst", ")", "[", "0", "]", "if", "(", "not", "pardir", ")", ":", "pardir", "=", "os", ".", "curdir", "import", "Finder", "f", "=", "Finder", ".", "Finder", "(", ")", "f", ".", "update", "(", "File", ".", "FSRef", "(", "pardir", ")", ")" ]
tell the finder a file has changed .
train
false
14,258
def get_category_by_name(app, name): sa_session = app.model.context.current try: return sa_session.query(app.model.Category).filter_by(name=name).one() except sqlalchemy.orm.exc.NoResultFound: return None
[ "def", "get_category_by_name", "(", "app", ",", "name", ")", ":", "sa_session", "=", "app", ".", "model", ".", "context", ".", "current", "try", ":", "return", "sa_session", ".", "query", "(", "app", ".", "model", ".", "Category", ")", ".", "filter_by", "(", "name", "=", "name", ")", ".", "one", "(", ")", "except", "sqlalchemy", ".", "orm", ".", "exc", ".", "NoResultFound", ":", "return", "None" ]
get a category from the database via name .
train
false
14,259
def non_translated_locals(): global _non_translated_locals if (not _non_translated_locals): locales = config.get('ckan.locale_order', '').split() _non_translated_locals = [x for x in locales if (x not in get_locales())] return _non_translated_locals
[ "def", "non_translated_locals", "(", ")", ":", "global", "_non_translated_locals", "if", "(", "not", "_non_translated_locals", ")", ":", "locales", "=", "config", ".", "get", "(", "'ckan.locale_order'", ",", "''", ")", ".", "split", "(", ")", "_non_translated_locals", "=", "[", "x", "for", "x", "in", "locales", "if", "(", "x", "not", "in", "get_locales", "(", ")", ")", "]", "return", "_non_translated_locals" ]
these are the locales that are available but for which there are no translations .
train
false
14,260
def shell_env(**kw): return _setenv({'shell_env': kw})
[ "def", "shell_env", "(", "**", "kw", ")", ":", "return", "_setenv", "(", "{", "'shell_env'", ":", "kw", "}", ")" ]
set shell environment variables for wrapped commands .
train
false
14,261
@scope.define def call(fn, args=(), kwargs={}): return fn(*args, **kwargs)
[ "@", "scope", ".", "define", "def", "call", "(", "fn", ",", "args", "=", "(", ")", ",", "kwargs", "=", "{", "}", ")", ":", "return", "fn", "(", "*", "args", ",", "**", "kwargs", ")" ]
invoke a remote method that returns something .
train
false
14,262
def complete(y): return linkage(y, method='complete', metric='euclidean')
[ "def", "complete", "(", "y", ")", ":", "return", "linkage", "(", "y", ",", "method", "=", "'complete'", ",", "metric", "=", "'euclidean'", ")" ]
authentication complete process .
train
false
14,263
def record_user_started_state_editor_tutorial(user_id): user_settings = get_user_settings(user_id, strict=True) user_settings.last_started_state_editor_tutorial = datetime.datetime.utcnow() _save_user_settings(user_settings)
[ "def", "record_user_started_state_editor_tutorial", "(", "user_id", ")", ":", "user_settings", "=", "get_user_settings", "(", "user_id", ",", "strict", "=", "True", ")", "user_settings", ".", "last_started_state_editor_tutorial", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "_save_user_settings", "(", "user_settings", ")" ]
updates last_started_state_editor_tutorial to the current datetime for the user with given user_id .
train
false
14,264
@pytest.mark.network def test_run_method_should_return_success_when_find_packages(): command = SearchCommand() cmdline = '--index=https://pypi.python.org/pypi pip' (options, args) = command.parse_args(cmdline.split()) status = command.run(options, args) assert (status == SUCCESS)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_run_method_should_return_success_when_find_packages", "(", ")", ":", "command", "=", "SearchCommand", "(", ")", "cmdline", "=", "'--index=https://pypi.python.org/pypi pip'", "(", "options", ",", "args", ")", "=", "command", ".", "parse_args", "(", "cmdline", ".", "split", "(", ")", ")", "status", "=", "command", ".", "run", "(", "options", ",", "args", ")", "assert", "(", "status", "==", "SUCCESS", ")" ]
test searchcommand .
train
false
14,265
def clear_override_for_user(user, block, name): try: StudentFieldOverride.objects.get(course_id=block.runtime.course_id, student_id=user.id, location=block.location, field=name).delete() except StudentFieldOverride.DoesNotExist: pass
[ "def", "clear_override_for_user", "(", "user", ",", "block", ",", "name", ")", ":", "try", ":", "StudentFieldOverride", ".", "objects", ".", "get", "(", "course_id", "=", "block", ".", "runtime", ".", "course_id", ",", "student_id", "=", "user", ".", "id", ",", "location", "=", "block", ".", "location", ",", "field", "=", "name", ")", ".", "delete", "(", ")", "except", "StudentFieldOverride", ".", "DoesNotExist", ":", "pass" ]
clears a previously set field override for the user .
train
false
14,266
def word_count_old(documents): return Counter((word for document in documents for word in tokenize(document)))
[ "def", "word_count_old", "(", "documents", ")", ":", "return", "Counter", "(", "(", "word", "for", "document", "in", "documents", "for", "word", "in", "tokenize", "(", "document", ")", ")", ")" ]
word count not using mapreduce .
train
false
14,267
def _parse_master(path=MASTER_CF): with salt.utils.fopen(path, 'r') as fh_: full_conf = fh_.read() conf_list = [] conf_dict = {} for line in full_conf.splitlines(): if ((not line.strip()) or line.strip().startswith('#')): conf_list.append(line) continue comps = line.strip().split() conf_line = {'service': comps[0], 'conn_type': comps[1], 'private': comps[2], 'unpriv': comps[3], 'chroot': comps[4], 'wakeup': comps[5], 'maxproc': comps[6], 'command': ' '.join(comps[7:])} dict_key = '{0} {1}'.format(comps[0], comps[1]) conf_list.append(conf_line) conf_dict[dict_key] = conf_line return (conf_dict, conf_list)
[ "def", "_parse_master", "(", "path", "=", "MASTER_CF", ")", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "path", ",", "'r'", ")", "as", "fh_", ":", "full_conf", "=", "fh_", ".", "read", "(", ")", "conf_list", "=", "[", "]", "conf_dict", "=", "{", "}", "for", "line", "in", "full_conf", ".", "splitlines", "(", ")", ":", "if", "(", "(", "not", "line", ".", "strip", "(", ")", ")", "or", "line", ".", "strip", "(", ")", ".", "startswith", "(", "'#'", ")", ")", ":", "conf_list", ".", "append", "(", "line", ")", "continue", "comps", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "conf_line", "=", "{", "'service'", ":", "comps", "[", "0", "]", ",", "'conn_type'", ":", "comps", "[", "1", "]", ",", "'private'", ":", "comps", "[", "2", "]", ",", "'unpriv'", ":", "comps", "[", "3", "]", ",", "'chroot'", ":", "comps", "[", "4", "]", ",", "'wakeup'", ":", "comps", "[", "5", "]", ",", "'maxproc'", ":", "comps", "[", "6", "]", ",", "'command'", ":", "' '", ".", "join", "(", "comps", "[", "7", ":", "]", ")", "}", "dict_key", "=", "'{0} {1}'", ".", "format", "(", "comps", "[", "0", "]", ",", "comps", "[", "1", "]", ")", "conf_list", ".", "append", "(", "conf_line", ")", "conf_dict", "[", "dict_key", "]", "=", "conf_line", "return", "(", "conf_dict", ",", "conf_list", ")" ]
parse the master .
train
true
14,268
@pick_context_manager_writer def instance_fault_create(context, values): fault_ref = models.InstanceFault() fault_ref.update(values) fault_ref.save(context.session) return dict(fault_ref)
[ "@", "pick_context_manager_writer", "def", "instance_fault_create", "(", "context", ",", "values", ")", ":", "fault_ref", "=", "models", ".", "InstanceFault", "(", ")", "fault_ref", ".", "update", "(", "values", ")", "fault_ref", ".", "save", "(", "context", ".", "session", ")", "return", "dict", "(", "fault_ref", ")" ]
create a new instancefault .
train
false
14,270
def _normalize_spec(spec, non_group_columns): if (not isinstance(spec, dict)): spec = collections.OrderedDict(zip(non_group_columns, it.repeat(spec))) res = [] if isinstance(spec, dict): for (input_column, subspec) in spec.items(): if isinstance(subspec, dict): res.extend((((input_column, result_column), func, input_column) for (result_column, func) in subspec.items())) else: if (not isinstance(subspec, list)): subspec = [subspec] res.extend((((input_column, funcname(func)), func, input_column) for func in subspec)) else: raise ValueError('unsupported agg spec of type {}'.format(type(spec))) compounds = (list, tuple, dict) use_flat_columns = (not any((isinstance(subspec, compounds) for subspec in spec.values()))) if use_flat_columns: res = [(input_col, func, input_col) for (_, func, input_col) in res] return res
[ "def", "_normalize_spec", "(", "spec", ",", "non_group_columns", ")", ":", "if", "(", "not", "isinstance", "(", "spec", ",", "dict", ")", ")", ":", "spec", "=", "collections", ".", "OrderedDict", "(", "zip", "(", "non_group_columns", ",", "it", ".", "repeat", "(", "spec", ")", ")", ")", "res", "=", "[", "]", "if", "isinstance", "(", "spec", ",", "dict", ")", ":", "for", "(", "input_column", ",", "subspec", ")", "in", "spec", ".", "items", "(", ")", ":", "if", "isinstance", "(", "subspec", ",", "dict", ")", ":", "res", ".", "extend", "(", "(", "(", "(", "input_column", ",", "result_column", ")", ",", "func", ",", "input_column", ")", "for", "(", "result_column", ",", "func", ")", "in", "subspec", ".", "items", "(", ")", ")", ")", "else", ":", "if", "(", "not", "isinstance", "(", "subspec", ",", "list", ")", ")", ":", "subspec", "=", "[", "subspec", "]", "res", ".", "extend", "(", "(", "(", "(", "input_column", ",", "funcname", "(", "func", ")", ")", ",", "func", ",", "input_column", ")", "for", "func", "in", "subspec", ")", ")", "else", ":", "raise", "ValueError", "(", "'unsupported agg spec of type {}'", ".", "format", "(", "type", "(", "spec", ")", ")", ")", "compounds", "=", "(", "list", ",", "tuple", ",", "dict", ")", "use_flat_columns", "=", "(", "not", "any", "(", "(", "isinstance", "(", "subspec", ",", "compounds", ")", "for", "subspec", "in", "spec", ".", "values", "(", ")", ")", ")", ")", "if", "use_flat_columns", ":", "res", "=", "[", "(", "input_col", ",", "func", ",", "input_col", ")", "for", "(", "_", ",", "func", ",", "input_col", ")", "in", "res", "]", "return", "res" ]
return a list of tuples .
train
false
14,271
def xontribs_main(args=None, stdin=None): if ((not args) or ((args[0] not in _MAIN_XONTRIB_ACTIONS) and (args[0] not in {'-h', '--help'}))): args.insert(0, 'load') parser = _create_xontrib_parser() ns = parser.parse_args(args) if (ns.action is None): ns = parser.parse_args((['load'] + args)) return _MAIN_XONTRIB_ACTIONS[ns.action](ns)
[ "def", "xontribs_main", "(", "args", "=", "None", ",", "stdin", "=", "None", ")", ":", "if", "(", "(", "not", "args", ")", "or", "(", "(", "args", "[", "0", "]", "not", "in", "_MAIN_XONTRIB_ACTIONS", ")", "and", "(", "args", "[", "0", "]", "not", "in", "{", "'-h'", ",", "'--help'", "}", ")", ")", ")", ":", "args", ".", "insert", "(", "0", ",", "'load'", ")", "parser", "=", "_create_xontrib_parser", "(", ")", "ns", "=", "parser", ".", "parse_args", "(", "args", ")", "if", "(", "ns", ".", "action", "is", "None", ")", ":", "ns", "=", "parser", ".", "parse_args", "(", "(", "[", "'load'", "]", "+", "args", ")", ")", "return", "_MAIN_XONTRIB_ACTIONS", "[", "ns", ".", "action", "]", "(", "ns", ")" ]
alias that loads xontribs .
train
false
14,273
def check_allclose(actual, desired, rtol=1e-07, atol=0, err_msg='', verbose=True): if (type(actual) != type(desired)): raise AssertionError(('%s != %s' % (type(actual), type(desired)))) return assert_allclose(actual, desired, atol=atol, rtol=rtol, err_msg=err_msg, verbose=verbose)
[ "def", "check_allclose", "(", "actual", ",", "desired", ",", "rtol", "=", "1e-07", ",", "atol", "=", "0", ",", "err_msg", "=", "''", ",", "verbose", "=", "True", ")", ":", "if", "(", "type", "(", "actual", ")", "!=", "type", "(", "desired", ")", ")", ":", "raise", "AssertionError", "(", "(", "'%s != %s'", "%", "(", "type", "(", "actual", ")", ",", "type", "(", "desired", ")", ")", ")", ")", "return", "assert_allclose", "(", "actual", ",", "desired", ",", "atol", "=", "atol", ",", "rtol", "=", "rtol", ",", "err_msg", "=", "err_msg", ",", "verbose", "=", "verbose", ")" ]
wrapper around np .
train
false
14,274
def register_repository(name, location, installation_policy=None): flags = [(u'Name', name)] flags.append((u'SourceLocation', location)) if (installation_policy is not None): flags.append((u'InstallationPolicy', installation_policy)) params = u'' for (flag, value) in flags: params += u'-{0} {1} '.format(flag, value) cmd = u'Register-PSRepository {0}'.format(params) no_ret = _pshell(cmd) return (name not in list_modules())
[ "def", "register_repository", "(", "name", ",", "location", ",", "installation_policy", "=", "None", ")", ":", "flags", "=", "[", "(", "u'Name'", ",", "name", ")", "]", "flags", ".", "append", "(", "(", "u'SourceLocation'", ",", "location", ")", ")", "if", "(", "installation_policy", "is", "not", "None", ")", ":", "flags", ".", "append", "(", "(", "u'InstallationPolicy'", ",", "installation_policy", ")", ")", "params", "=", "u''", "for", "(", "flag", ",", "value", ")", "in", "flags", ":", "params", "+=", "u'-{0} {1} '", ".", "format", "(", "flag", ",", "value", ")", "cmd", "=", "u'Register-PSRepository {0}'", ".", "format", "(", "params", ")", "no_ret", "=", "_pshell", "(", "cmd", ")", "return", "(", "name", "not", "in", "list_modules", "(", ")", ")" ]
register a psget repository on the local machine .
train
false
14,275
def concatenate_raws(raws, preload=None, events_list=None): if (events_list is not None): if (len(events_list) != len(raws)): raise ValueError('`raws` and `event_list` are required to be of the same length') (first, last) = zip(*[(r.first_samp, r.last_samp) for r in raws]) events = concatenate_events(events_list, first, last) raws[0].append(raws[1:], preload) if (events_list is None): return raws[0] else: return (raws[0], events)
[ "def", "concatenate_raws", "(", "raws", ",", "preload", "=", "None", ",", "events_list", "=", "None", ")", ":", "if", "(", "events_list", "is", "not", "None", ")", ":", "if", "(", "len", "(", "events_list", ")", "!=", "len", "(", "raws", ")", ")", ":", "raise", "ValueError", "(", "'`raws` and `event_list` are required to be of the same length'", ")", "(", "first", ",", "last", ")", "=", "zip", "(", "*", "[", "(", "r", ".", "first_samp", ",", "r", ".", "last_samp", ")", "for", "r", "in", "raws", "]", ")", "events", "=", "concatenate_events", "(", "events_list", ",", "first", ",", "last", ")", "raws", "[", "0", "]", ".", "append", "(", "raws", "[", "1", ":", "]", ",", "preload", ")", "if", "(", "events_list", "is", "None", ")", ":", "return", "raws", "[", "0", "]", "else", ":", "return", "(", "raws", "[", "0", "]", ",", "events", ")" ]
concatenate raw instances as if they were continuous .
train
false
14,276
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
connect mappings to the database .
train
false
14,277
def _rand_cpu_str(cpu): cpu = int(cpu) avail = __salt__['status.nproc']() if (cpu < avail): return '0-{0}'.format(avail) to_set = set() while (len(to_set) < cpu): choice = random.randint(0, (avail - 1)) if (choice not in to_set): to_set.add(str(choice)) return ','.join(sorted(to_set))
[ "def", "_rand_cpu_str", "(", "cpu", ")", ":", "cpu", "=", "int", "(", "cpu", ")", "avail", "=", "__salt__", "[", "'status.nproc'", "]", "(", ")", "if", "(", "cpu", "<", "avail", ")", ":", "return", "'0-{0}'", ".", "format", "(", "avail", ")", "to_set", "=", "set", "(", ")", "while", "(", "len", "(", "to_set", ")", "<", "cpu", ")", ":", "choice", "=", "random", ".", "randint", "(", "0", ",", "(", "avail", "-", "1", ")", ")", "if", "(", "choice", "not", "in", "to_set", ")", ":", "to_set", ".", "add", "(", "str", "(", "choice", ")", ")", "return", "','", ".", "join", "(", "sorted", "(", "to_set", ")", ")" ]
return a random subset of cpus for the cpuset config .
train
true
14,278
def start_plugin_services(portal): pass
[ "def", "start_plugin_services", "(", "portal", ")", ":", "pass" ]
this hook is called by evennia .
train
false
14,279
def jsonFileLogObserver(outFile, recordSeparator=u'\x1e'): return FileLogObserver(outFile, (lambda event: u'{0}{1}\n'.format(recordSeparator, eventAsJSON(event))))
[ "def", "jsonFileLogObserver", "(", "outFile", ",", "recordSeparator", "=", "u'\\x1e'", ")", ":", "return", "FileLogObserver", "(", "outFile", ",", "(", "lambda", "event", ":", "u'{0}{1}\\n'", ".", "format", "(", "recordSeparator", ",", "eventAsJSON", "(", "event", ")", ")", ")", ")" ]
create a l{filelogobserver} that emits json-serialized events to a specified file-like object .
train
false
14,281
def find_guest(name, quiet=False, path=None): if quiet: log.warning("'quiet' argument is being deprecated. Please migrate to --quiet") for data in _list_iter(path=path): (host, l) = next(six.iteritems(data)) for x in ('running', 'frozen', 'stopped'): if (name in l[x]): if (not quiet): __jid_event__.fire_event({'data': host, 'outputter': 'lxc_find_host'}, 'progress') return host return None
[ "def", "find_guest", "(", "name", ",", "quiet", "=", "False", ",", "path", "=", "None", ")", ":", "if", "quiet", ":", "log", ".", "warning", "(", "\"'quiet' argument is being deprecated. Please migrate to --quiet\"", ")", "for", "data", "in", "_list_iter", "(", "path", "=", "path", ")", ":", "(", "host", ",", "l", ")", "=", "next", "(", "six", ".", "iteritems", "(", "data", ")", ")", "for", "x", "in", "(", "'running'", ",", "'frozen'", ",", "'stopped'", ")", ":", "if", "(", "name", "in", "l", "[", "x", "]", ")", ":", "if", "(", "not", "quiet", ")", ":", "__jid_event__", ".", "fire_event", "(", "{", "'data'", ":", "host", ",", "'outputter'", ":", "'lxc_find_host'", "}", ",", "'progress'", ")", "return", "host", "return", "None" ]
returns the host for a container .
train
true
14,282
def get_service_account_info(http, service_account='default'): return get(http, 'instance/service-accounts/{0}/'.format(service_account), recursive=True)
[ "def", "get_service_account_info", "(", "http", ",", "service_account", "=", "'default'", ")", ":", "return", "get", "(", "http", ",", "'instance/service-accounts/{0}/'", ".", "format", "(", "service_account", ")", ",", "recursive", "=", "True", ")" ]
get information about a service account from the metadata server .
train
false
14,283
def prep_course_for_grading(course, request): course._field_data_cache = {} course.set_grading_policy(course.grading_policy)
[ "def", "prep_course_for_grading", "(", "course", ",", "request", ")", ":", "course", ".", "_field_data_cache", "=", "{", "}", "course", ".", "set_grading_policy", "(", "course", ".", "grading_policy", ")" ]
set up course module for overrides to function properly .
train
false
14,284
def _get_full_spec_without_validation(name, email): if name: encoded_name = smart_quote(encode_string(None, name, maxlinelen=MAX_ADDRESS_LENGTH)) return '{0} <{1}>'.format(encoded_name, email) return u'{0}'.format(email)
[ "def", "_get_full_spec_without_validation", "(", "name", ",", "email", ")", ":", "if", "name", ":", "encoded_name", "=", "smart_quote", "(", "encode_string", "(", "None", ",", "name", ",", "maxlinelen", "=", "MAX_ADDRESS_LENGTH", ")", ")", "return", "'{0} <{1}>'", ".", "format", "(", "encoded_name", ",", "email", ")", "return", "u'{0}'", ".", "format", "(", "email", ")" ]
this function is the same as calling full_spec() on a flanker address .
train
false
14,285
def _plot_onscroll(event, params): if (event.key == 'control'): if (event.step < 0): event.key = '-' else: event.key = '+' _plot_onkey(event, params) return if params['butterfly']: return _plot_raw_onscroll(event, params, len(params['ch_names']))
[ "def", "_plot_onscroll", "(", "event", ",", "params", ")", ":", "if", "(", "event", ".", "key", "==", "'control'", ")", ":", "if", "(", "event", ".", "step", "<", "0", ")", ":", "event", ".", "key", "=", "'-'", "else", ":", "event", ".", "key", "=", "'+'", "_plot_onkey", "(", "event", ",", "params", ")", "return", "if", "params", "[", "'butterfly'", "]", ":", "return", "_plot_raw_onscroll", "(", "event", ",", "params", ",", "len", "(", "params", "[", "'ch_names'", "]", ")", ")" ]
handle scroll events .
train
false
14,286
def repair_central_directory(zipFile, is_file_instance): f = (zipFile if is_file_instance else open(zipFile, 'rb+')) data = f.read() pos = data.find(CENTRAL_DIRECTORY_SIGNATURE) if (pos > 0): sio = BytesIO(data) sio.seek((pos + 22)) sio.truncate() sio.seek(0) return sio f.seek(0) return f
[ "def", "repair_central_directory", "(", "zipFile", ",", "is_file_instance", ")", ":", "f", "=", "(", "zipFile", "if", "is_file_instance", "else", "open", "(", "zipFile", ",", "'rb+'", ")", ")", "data", "=", "f", ".", "read", "(", ")", "pos", "=", "data", ".", "find", "(", "CENTRAL_DIRECTORY_SIGNATURE", ")", "if", "(", "pos", ">", "0", ")", ":", "sio", "=", "BytesIO", "(", "data", ")", "sio", ".", "seek", "(", "(", "pos", "+", "22", ")", ")", "sio", ".", "truncate", "(", ")", "sio", ".", "seek", "(", "0", ")", "return", "sio", "f", ".", "seek", "(", "0", ")", "return", "f" ]
trims trailing data from the central directory code taken from URL courtesy of uri cohen .
train
true
14,287
@pytest.mark.network def test_env_vars_override_config_file(script, virtualenv): (fd, config_file) = tempfile.mkstemp('-pip.cfg', 'test-') try: _test_env_vars_override_config_file(script, virtualenv, config_file) finally: os.close(fd) os.remove(config_file)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_env_vars_override_config_file", "(", "script", ",", "virtualenv", ")", ":", "(", "fd", ",", "config_file", ")", "=", "tempfile", ".", "mkstemp", "(", "'-pip.cfg'", ",", "'test-'", ")", "try", ":", "_test_env_vars_override_config_file", "(", "script", ",", "virtualenv", ",", "config_file", ")", "finally", ":", "os", ".", "close", "(", "fd", ")", "os", ".", "remove", "(", "config_file", ")" ]
test that environmental variables override settings in config files .
train
false
14,288
def optional_is_none(context, builder, sig, args): [lty, rty] = sig.args [lval, rval] = args if (lty == types.none): (lty, rty) = (rty, lty) (lval, rval) = (rval, lval) opt_type = lty opt_val = lval opt = context.make_helper(builder, opt_type, opt_val) res = builder.not_(cgutils.as_bool_bit(builder, opt.valid)) return impl_ret_untracked(context, builder, sig.return_type, res)
[ "def", "optional_is_none", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "[", "lty", ",", "rty", "]", "=", "sig", ".", "args", "[", "lval", ",", "rval", "]", "=", "args", "if", "(", "lty", "==", "types", ".", "none", ")", ":", "(", "lty", ",", "rty", ")", "=", "(", "rty", ",", "lty", ")", "(", "lval", ",", "rval", ")", "=", "(", "rval", ",", "lval", ")", "opt_type", "=", "lty", "opt_val", "=", "lval", "opt", "=", "context", ".", "make_helper", "(", "builder", ",", "opt_type", ",", "opt_val", ")", "res", "=", "builder", ".", "not_", "(", "cgutils", ".", "as_bool_bit", "(", "builder", ",", "opt", ".", "valid", ")", ")", "return", "impl_ret_untracked", "(", "context", ",", "builder", ",", "sig", ".", "return_type", ",", "res", ")" ]
check if an optional value is invalid .
train
false
14,290
def vectorsFromSeqList(seqList, patternMatrix): totalLen = 0 for seq in seqList: totalLen += len(seq) vectors = numpy.zeros((totalLen, patternMatrix.shape[1]), dtype='bool') vecOffset = 0 for seq in seqList: seq = numpy.array(seq, dtype='uint32') for (idx, coinc) in enumerate(seq): vectors[vecOffset] = patternMatrix.getRow(int(coinc)) vecOffset += 1 return vectors
[ "def", "vectorsFromSeqList", "(", "seqList", ",", "patternMatrix", ")", ":", "totalLen", "=", "0", "for", "seq", "in", "seqList", ":", "totalLen", "+=", "len", "(", "seq", ")", "vectors", "=", "numpy", ".", "zeros", "(", "(", "totalLen", ",", "patternMatrix", ".", "shape", "[", "1", "]", ")", ",", "dtype", "=", "'bool'", ")", "vecOffset", "=", "0", "for", "seq", "in", "seqList", ":", "seq", "=", "numpy", ".", "array", "(", "seq", ",", "dtype", "=", "'uint32'", ")", "for", "(", "idx", ",", "coinc", ")", "in", "enumerate", "(", "seq", ")", ":", "vectors", "[", "vecOffset", "]", "=", "patternMatrix", ".", "getRow", "(", "int", "(", "coinc", ")", ")", "vecOffset", "+=", "1", "return", "vectors" ]
convert a list of sequences of pattern indices .
train
true
14,292
def ocr_buttons(r): if (not current.deployment_settings.has_module('ocr')): return '' if r.component: urlargs = [r.id, r.component_name] else: urlargs = [] f = r.function c = r.controller a = r.application T = current.T UPLOAD = T('Upload Scanned OCR Form') DOWNLOAD = T('Download OCR-able PDF Form') _style = 'height:10px;float:right;padding:3px;' output = DIV(A(IMG(_src=('/%s/static/img/upload-ocr.png' % a), _alt=UPLOAD), _id='upload-pdf-btn', _href=URL(c=c, f=f, args=(urlargs + ['import.pdf'])), _title=UPLOAD, _style=_style), A(IMG(_src=('/%s/static/img/download-ocr.png' % a), _alt=DOWNLOAD), _id='download-pdf-btn', _href=URL(c=c, f=f, args=(urlargs + ['create.pdf'])), _title=DOWNLOAD, _style=_style)) return output
[ "def", "ocr_buttons", "(", "r", ")", ":", "if", "(", "not", "current", ".", "deployment_settings", ".", "has_module", "(", "'ocr'", ")", ")", ":", "return", "''", "if", "r", ".", "component", ":", "urlargs", "=", "[", "r", ".", "id", ",", "r", ".", "component_name", "]", "else", ":", "urlargs", "=", "[", "]", "f", "=", "r", ".", "function", "c", "=", "r", ".", "controller", "a", "=", "r", ".", "application", "T", "=", "current", ".", "T", "UPLOAD", "=", "T", "(", "'Upload Scanned OCR Form'", ")", "DOWNLOAD", "=", "T", "(", "'Download OCR-able PDF Form'", ")", "_style", "=", "'height:10px;float:right;padding:3px;'", "output", "=", "DIV", "(", "A", "(", "IMG", "(", "_src", "=", "(", "'/%s/static/img/upload-ocr.png'", "%", "a", ")", ",", "_alt", "=", "UPLOAD", ")", ",", "_id", "=", "'upload-pdf-btn'", ",", "_href", "=", "URL", "(", "c", "=", "c", ",", "f", "=", "f", ",", "args", "=", "(", "urlargs", "+", "[", "'import.pdf'", "]", ")", ")", ",", "_title", "=", "UPLOAD", ",", "_style", "=", "_style", ")", ",", "A", "(", "IMG", "(", "_src", "=", "(", "'/%s/static/img/download-ocr.png'", "%", "a", ")", ",", "_alt", "=", "DOWNLOAD", ")", ",", "_id", "=", "'download-pdf-btn'", ",", "_href", "=", "URL", "(", "c", "=", "c", ",", "f", "=", "f", ",", "args", "=", "(", "urlargs", "+", "[", "'create.pdf'", "]", ")", ")", ",", "_title", "=", "DOWNLOAD", ",", "_style", "=", "_style", ")", ")", "return", "output" ]
generate print pdf button in the view .
train
false
14,293
def run_shell_cmd(exe, stdout=subprocess.PIPE, stderr=subprocess.PIPE): p = subprocess.Popen(exe, stdout=stdout, stderr=stderr) (last_stdout_str, last_stderr_str) = p.communicate() last_stdout = last_stdout_str.split('\n') if (LOG_LINE_PREFIX in last_stdout_str): log('') for line in last_stdout: if line.startswith(LOG_LINE_PREFIX): log(('INFO: %s' % line[len(LOG_LINE_PREFIX):])) log('') result = ('%s%s' % (last_stdout_str, last_stderr_str)) if (p.returncode != 0): raise Exception(('Error %s\n%s' % (p.returncode, result))) return result
[ "def", "run_shell_cmd", "(", "exe", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", ":", "p", "=", "subprocess", ".", "Popen", "(", "exe", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ")", "(", "last_stdout_str", ",", "last_stderr_str", ")", "=", "p", ".", "communicate", "(", ")", "last_stdout", "=", "last_stdout_str", ".", "split", "(", "'\\n'", ")", "if", "(", "LOG_LINE_PREFIX", "in", "last_stdout_str", ")", ":", "log", "(", "''", ")", "for", "line", "in", "last_stdout", ":", "if", "line", ".", "startswith", "(", "LOG_LINE_PREFIX", ")", ":", "log", "(", "(", "'INFO: %s'", "%", "line", "[", "len", "(", "LOG_LINE_PREFIX", ")", ":", "]", ")", ")", "log", "(", "''", ")", "result", "=", "(", "'%s%s'", "%", "(", "last_stdout_str", ",", "last_stderr_str", ")", ")", "if", "(", "p", ".", "returncode", "!=", "0", ")", ":", "raise", "Exception", "(", "(", "'Error %s\\n%s'", "%", "(", "p", ".", "returncode", ",", "result", ")", ")", ")", "return", "result" ]
runs a shell command and captures the stdout and stderr output .
train
false
14,294
def is_available(): try: NVVM() except NvvmSupportError: return False else: return True
[ "def", "is_available", "(", ")", ":", "try", ":", "NVVM", "(", ")", "except", "NvvmSupportError", ":", "return", "False", "else", ":", "return", "True" ]
return true if we can successfully connect to solr .
train
false
14,296
@constructor def smallest(*args): if (len(args) == 2): (a, b) = args return switch((a < b), a, b) else: return min(stack(args), axis=0)
[ "@", "constructor", "def", "smallest", "(", "*", "args", ")", ":", "if", "(", "len", "(", "args", ")", "==", "2", ")", ":", "(", "a", ",", "b", ")", "=", "args", "return", "switch", "(", "(", "a", "<", "b", ")", ",", "a", ",", "b", ")", "else", ":", "return", "min", "(", "stack", "(", "args", ")", ",", "axis", "=", "0", ")" ]
create a proxy for an instance which makes it sort smaller than anything it is compared to .
train
false
14,297
def stubout_attach_disks(stubs): def f(*args): raise fake.Failure('Test Exception raised by fake _attach_disks') stubs.Set(vmops.VMOps, '_attach_disks', f)
[ "def", "stubout_attach_disks", "(", "stubs", ")", ":", "def", "f", "(", "*", "args", ")", ":", "raise", "fake", ".", "Failure", "(", "'Test Exception raised by fake _attach_disks'", ")", "stubs", ".", "Set", "(", "vmops", ".", "VMOps", ",", "'_attach_disks'", ",", "f", ")" ]
simulates a failure in _attach_disks .
train
false
14,298
def find_sock_file(conf_file): try: fd = open(conf_file) except IOError as e: utils.err(('Error: %s. Config file path is relative: %s' % (e, conf_file))) return None try: for line in fd: if line.lstrip(' DCTB ').startswith('stats socket'): sock_file = line.split()[2] if utils.is_sockfile(sock_file): return sock_file finally: fd.close()
[ "def", "find_sock_file", "(", "conf_file", ")", ":", "try", ":", "fd", "=", "open", "(", "conf_file", ")", "except", "IOError", "as", "e", ":", "utils", ".", "err", "(", "(", "'Error: %s. Config file path is relative: %s'", "%", "(", "e", ",", "conf_file", ")", ")", ")", "return", "None", "try", ":", "for", "line", "in", "fd", ":", "if", "line", ".", "lstrip", "(", "' DCTB '", ")", ".", "startswith", "(", "'stats socket'", ")", ":", "sock_file", "=", "line", ".", "split", "(", ")", "[", "2", "]", "if", "utils", ".", "is_sockfile", "(", "sock_file", ")", ":", "return", "sock_file", "finally", ":", "fd", ".", "close", "(", ")" ]
returns the unix socket file of haproxy .
train
false
14,299
def daemonize_if(opts): if ('salt-call' in sys.argv[0]): return if (not opts.get('multiprocessing', True)): return if sys.platform.startswith('win'): return daemonize(False)
[ "def", "daemonize_if", "(", "opts", ")", ":", "if", "(", "'salt-call'", "in", "sys", ".", "argv", "[", "0", "]", ")", ":", "return", "if", "(", "not", "opts", ".", "get", "(", "'multiprocessing'", ",", "True", ")", ")", ":", "return", "if", "sys", ".", "platform", ".", "startswith", "(", "'win'", ")", ":", "return", "daemonize", "(", "False", ")" ]
daemonize a module function process if multiprocessing is true and the process is not being called by salt-call .
train
true
14,300
def int_output(func, argtypes): func.argtypes = argtypes func.restype = c_int return func
[ "def", "int_output", "(", "func", ",", "argtypes", ")", ":", "func", ".", "argtypes", "=", "argtypes", "func", ".", "restype", "=", "c_int", "return", "func" ]
generates a ctypes function that returns an integer value .
train
false
14,301
def quote_header_value(value, extra_chars='', allow_token=True): value = str(value) if allow_token: token_chars = (_token_chars | set(extra_chars)) if set(value).issubset(token_chars): return value return ('"%s"' % value.replace('\\', '\\\\').replace('"', '\\"'))
[ "def", "quote_header_value", "(", "value", ",", "extra_chars", "=", "''", ",", "allow_token", "=", "True", ")", ":", "value", "=", "str", "(", "value", ")", "if", "allow_token", ":", "token_chars", "=", "(", "_token_chars", "|", "set", "(", "extra_chars", ")", ")", "if", "set", "(", "value", ")", ".", "issubset", "(", "token_chars", ")", ":", "return", "value", "return", "(", "'\"%s\"'", "%", "value", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ".", "replace", "(", "'\"'", ",", "'\\\\\"'", ")", ")" ]
quote a header value if necessary .
train
false
14,302
def get_user_icons(user): user_perms = UserPermissions(user) user_perms.build_permissions() from frappe.boot import get_allowed_pages allowed_pages = get_allowed_pages() icons = [] for icon in get_desktop_icons(user): add = True if icon.hidden_in_standard: add = False if (not icon.custom): if (icon.module_name == u'Learn'): pass elif ((icon.type == u'page') and (icon.link not in allowed_pages)): add = False elif ((icon.type == u'module') and (icon.module_name not in user_perms.allow_modules)): add = False if add: icons.append(icon) return icons
[ "def", "get_user_icons", "(", "user", ")", ":", "user_perms", "=", "UserPermissions", "(", "user", ")", "user_perms", ".", "build_permissions", "(", ")", "from", "frappe", ".", "boot", "import", "get_allowed_pages", "allowed_pages", "=", "get_allowed_pages", "(", ")", "icons", "=", "[", "]", "for", "icon", "in", "get_desktop_icons", "(", "user", ")", ":", "add", "=", "True", "if", "icon", ".", "hidden_in_standard", ":", "add", "=", "False", "if", "(", "not", "icon", ".", "custom", ")", ":", "if", "(", "icon", ".", "module_name", "==", "u'Learn'", ")", ":", "pass", "elif", "(", "(", "icon", ".", "type", "==", "u'page'", ")", "and", "(", "icon", ".", "link", "not", "in", "allowed_pages", ")", ")", ":", "add", "=", "False", "elif", "(", "(", "icon", ".", "type", "==", "u'module'", ")", "and", "(", "icon", ".", "module_name", "not", "in", "user_perms", ".", "allow_modules", ")", ")", ":", "add", "=", "False", "if", "add", ":", "icons", ".", "append", "(", "icon", ")", "return", "icons" ]
get user icons for module setup page .
train
false
14,303
def test_valid_css(): styles = ['color', 'float'] eq_('<p style="float: left;">foo</p>', clean('<p style="float: left; color: ">foo</p>', styles=styles)) eq_('<p style="">foo</p>', clean('<p style="color: float: left;">foo</p>', styles=styles))
[ "def", "test_valid_css", "(", ")", ":", "styles", "=", "[", "'color'", ",", "'float'", "]", "eq_", "(", "'<p style=\"float: left;\">foo</p>'", ",", "clean", "(", "'<p style=\"float: left; color: \">foo</p>'", ",", "styles", "=", "styles", ")", ")", "eq_", "(", "'<p style=\"\">foo</p>'", ",", "clean", "(", "'<p style=\"color: float: left;\">foo</p>'", ",", "styles", "=", "styles", ")", ")" ]
the sanitizer should fix missing css values .
train
false
14,304
def format_user_agent(name=None): parts = [(u'Mopidy/%s' % mopidy.__version__), (u'%s/%s' % (platform.python_implementation(), platform.python_version()))] if name: parts.insert(0, name) return u' '.join(parts)
[ "def", "format_user_agent", "(", "name", "=", "None", ")", ":", "parts", "=", "[", "(", "u'Mopidy/%s'", "%", "mopidy", ".", "__version__", ")", ",", "(", "u'%s/%s'", "%", "(", "platform", ".", "python_implementation", "(", ")", ",", "platform", ".", "python_version", "(", ")", ")", ")", "]", "if", "name", ":", "parts", ".", "insert", "(", "0", ",", "name", ")", "return", "u' '", ".", "join", "(", "parts", ")" ]
construct a user-agent suitable for use in client code .
train
false
14,305
def dot_vals(value): ret = {} for (key, val) in six.iteritems(__pillar__.get('master', {})): if key.startswith('{0}.'.format(value)): ret[key] = val for (key, val) in six.iteritems(__opts__): if key.startswith('{0}.'.format(value)): ret[key] = val return ret
[ "def", "dot_vals", "(", "value", ")", ":", "ret", "=", "{", "}", "for", "(", "key", ",", "val", ")", "in", "six", ".", "iteritems", "(", "__pillar__", ".", "get", "(", "'master'", ",", "{", "}", ")", ")", ":", "if", "key", ".", "startswith", "(", "'{0}.'", ".", "format", "(", "value", ")", ")", ":", "ret", "[", "key", "]", "=", "val", "for", "(", "key", ",", "val", ")", "in", "six", ".", "iteritems", "(", "__opts__", ")", ":", "if", "key", ".", "startswith", "(", "'{0}.'", ".", "format", "(", "value", ")", ")", ":", "ret", "[", "key", "]", "=", "val", "return", "ret" ]
pass in a configuration value that should be preceded by the module name and a dot .
train
true
14,308
def UrnStringToHuntId(urn): if urn.startswith(AFF4_PREFIX): urn = urn[len(AFF4_PREFIX):] components = urn.split('/') if ((len(components) != 2) or (components[0] != 'hunts')): raise ValueError('Invalid hunt URN: %s', urn) return components[(-1)]
[ "def", "UrnStringToHuntId", "(", "urn", ")", ":", "if", "urn", ".", "startswith", "(", "AFF4_PREFIX", ")", ":", "urn", "=", "urn", "[", "len", "(", "AFF4_PREFIX", ")", ":", "]", "components", "=", "urn", ".", "split", "(", "'/'", ")", "if", "(", "(", "len", "(", "components", ")", "!=", "2", ")", "or", "(", "components", "[", "0", "]", "!=", "'hunts'", ")", ")", ":", "raise", "ValueError", "(", "'Invalid hunt URN: %s'", ",", "urn", ")", "return", "components", "[", "(", "-", "1", ")", "]" ]
converts given urn string to a flow id string .
train
true
14,309
def par_sort(a, b): aext = a.lower().split('.')[(-1)] bext = b.lower().split('.')[(-1)] if (aext == bext): return cmp(a, b) elif (aext == 'par2'): return (-1) elif (bext == 'par2'): return 1
[ "def", "par_sort", "(", "a", ",", "b", ")", ":", "aext", "=", "a", ".", "lower", "(", ")", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", "bext", "=", "b", ".", "lower", "(", ")", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", "if", "(", "aext", "==", "bext", ")", ":", "return", "cmp", "(", "a", ",", "b", ")", "elif", "(", "aext", "==", "'par2'", ")", ":", "return", "(", "-", "1", ")", "elif", "(", "bext", "==", "'par2'", ")", ":", "return", "1" ]
define sort method for par2 file names .
train
false
14,310
def _trim_doc_string(text): lines = text.replace('\r\n', '\n').split('\n') nlines = [lines.pop(0)] if lines: min_indent = min([(len(line) - len(line.lstrip())) for line in lines]) for line in lines: nlines.append(line[min_indent:]) return '\n'.join(nlines)
[ "def", "_trim_doc_string", "(", "text", ")", ":", "lines", "=", "text", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", ".", "split", "(", "'\\n'", ")", "nlines", "=", "[", "lines", ".", "pop", "(", "0", ")", "]", "if", "lines", ":", "min_indent", "=", "min", "(", "[", "(", "len", "(", "line", ")", "-", "len", "(", "line", ".", "lstrip", "(", ")", ")", ")", "for", "line", "in", "lines", "]", ")", "for", "line", "in", "lines", ":", "nlines", ".", "append", "(", "line", "[", "min_indent", ":", "]", ")", "return", "'\\n'", ".", "join", "(", "nlines", ")" ]
trims a doc string to make it format correctly with structured text .
train
false
14,311
def dnsdomain_register_for_project(context, fqdomain, project): return IMPL.dnsdomain_register_for_project(context, fqdomain, project)
[ "def", "dnsdomain_register_for_project", "(", "context", ",", "fqdomain", ",", "project", ")", ":", "return", "IMPL", ".", "dnsdomain_register_for_project", "(", "context", ",", "fqdomain", ",", "project", ")" ]
associated a dns domain with a project id .
train
false
14,313
def gm_constrs(t, x_list, p): assert is_weight(p) w = dyad_completion(p) tree = decompose(w) d = defaultdict((lambda : lu.create_var(t.size))) d[w] = t if (len(x_list) < len(w)): x_list += [t] assert (len(x_list) == len(w)) for (i, (p, v)) in enumerate(zip(w, x_list)): if (p > 0): tmp = ([0] * len(w)) tmp[i] = 1 d[tuple(tmp)] = v constraints = [] for (elem, children) in tree.items(): if (1 not in elem): constraints += [gm(d[elem], d[children[0]], d[children[1]])] return constraints
[ "def", "gm_constrs", "(", "t", ",", "x_list", ",", "p", ")", ":", "assert", "is_weight", "(", "p", ")", "w", "=", "dyad_completion", "(", "p", ")", "tree", "=", "decompose", "(", "w", ")", "d", "=", "defaultdict", "(", "(", "lambda", ":", "lu", ".", "create_var", "(", "t", ".", "size", ")", ")", ")", "d", "[", "w", "]", "=", "t", "if", "(", "len", "(", "x_list", ")", "<", "len", "(", "w", ")", ")", ":", "x_list", "+=", "[", "t", "]", "assert", "(", "len", "(", "x_list", ")", "==", "len", "(", "w", ")", ")", "for", "(", "i", ",", "(", "p", ",", "v", ")", ")", "in", "enumerate", "(", "zip", "(", "w", ",", "x_list", ")", ")", ":", "if", "(", "p", ">", "0", ")", ":", "tmp", "=", "(", "[", "0", "]", "*", "len", "(", "w", ")", ")", "tmp", "[", "i", "]", "=", "1", "d", "[", "tuple", "(", "tmp", ")", "]", "=", "v", "constraints", "=", "[", "]", "for", "(", "elem", ",", "children", ")", "in", "tree", ".", "items", "(", ")", ":", "if", "(", "1", "not", "in", "elem", ")", ":", "constraints", "+=", "[", "gm", "(", "d", "[", "elem", "]", ",", "d", "[", "children", "[", "0", "]", "]", ",", "d", "[", "children", "[", "1", "]", "]", ")", "]", "return", "constraints" ]
form the internal cxvpy constraints to form the weighted geometric mean t <= x^p .
train
false
14,314
def load_comparable_csr(*names): return jose.ComparableX509(load_csr(*names))
[ "def", "load_comparable_csr", "(", "*", "names", ")", ":", "return", "jose", ".", "ComparableX509", "(", "load_csr", "(", "*", "names", ")", ")" ]
load comparablex509 certificate request .
train
false
14,315
def pmd(registry, xml_parent, data): xml_element = XML.SubElement(xml_parent, 'hudson.plugins.pmd.PmdPublisher') helpers.build_trends_publisher('[PMD] ', xml_element, data)
[ "def", "pmd", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "xml_element", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.pmd.PmdPublisher'", ")", "helpers", ".", "build_trends_publisher", "(", "'[PMD] '", ",", "xml_element", ",", "data", ")" ]
yaml: pmd publish trend reports with pmd .
train
false
14,316
def getAllTransformedVertexes(transformedVertexes, xmlObject): for archivableObject in xmlObject.archivableObjects: transformedVertexes += archivableObject.getTransformedVertexes() return transformedVertexes
[ "def", "getAllTransformedVertexes", "(", "transformedVertexes", ",", "xmlObject", ")", ":", "for", "archivableObject", "in", "xmlObject", ".", "archivableObjects", ":", "transformedVertexes", "+=", "archivableObject", ".", "getTransformedVertexes", "(", ")", "return", "transformedVertexes" ]
get all transformed vertexes .
train
false
14,317
def _format_state_result(name, result, changes=None, comment=''): if (changes is None): changes = {'old': '', 'new': ''} return {'name': name, 'result': result, 'changes': changes, 'comment': comment}
[ "def", "_format_state_result", "(", "name", ",", "result", ",", "changes", "=", "None", ",", "comment", "=", "''", ")", ":", "if", "(", "changes", "is", "None", ")", ":", "changes", "=", "{", "'old'", ":", "''", ",", "'new'", ":", "''", "}", "return", "{", "'name'", ":", "name", ",", "'result'", ":", "result", ",", "'changes'", ":", "changes", ",", "'comment'", ":", "comment", "}" ]
creates the state result dictionary .
train
true
14,318
def get_action_by_id(action_id): action = None try: action = Action.get_by_id(action_id) except (ValueError, ValidationError) as e: LOG.warning('Database lookup for action with id="%s" resulted in exception: %s', action_id, e) raise StackStormDBObjectNotFoundError(('Unable to find action with id="%s"' % action_id)) return action
[ "def", "get_action_by_id", "(", "action_id", ")", ":", "action", "=", "None", "try", ":", "action", "=", "Action", ".", "get_by_id", "(", "action_id", ")", "except", "(", "ValueError", ",", "ValidationError", ")", "as", "e", ":", "LOG", ".", "warning", "(", "'Database lookup for action with id=\"%s\" resulted in exception: %s'", ",", "action_id", ",", "e", ")", "raise", "StackStormDBObjectNotFoundError", "(", "(", "'Unable to find action with id=\"%s\"'", "%", "action_id", ")", ")", "return", "action" ]
get action by id .
train
false
14,321
def generate_initial_profile_picture(user_id): user_email = get_email_from_user_id(user_id) user_gravatar = fetch_gravatar(user_email) update_profile_picture_data_url(user_id, user_gravatar)
[ "def", "generate_initial_profile_picture", "(", "user_id", ")", ":", "user_email", "=", "get_email_from_user_id", "(", "user_id", ")", "user_gravatar", "=", "fetch_gravatar", "(", "user_email", ")", "update_profile_picture_data_url", "(", "user_id", ",", "user_gravatar", ")" ]
generates a profile picture for a new user and updates the users settings in the datastore .
train
false
14,322
def wait_time(): return uniform(0.3, 0.5)
[ "def", "wait_time", "(", ")", ":", "return", "uniform", "(", "0.3", ",", "0.5", ")" ]
sleep for the given delay .
train
false
14,324
@app.route('/auth/info/googleidtoken', methods=['GET']) def auth_info_google_id_token(): return auth_info()
[ "@", "app", ".", "route", "(", "'/auth/info/googleidtoken'", ",", "methods", "=", "[", "'GET'", "]", ")", "def", "auth_info_google_id_token", "(", ")", ":", "return", "auth_info", "(", ")" ]
auth info with google id token .
train
false
14,325
def set_attached_console_visible(state): flag = {True: SW_SHOW, False: SW_HIDE} return bool(ShowWindow(console_window_handle, flag[state]))
[ "def", "set_attached_console_visible", "(", "state", ")", ":", "flag", "=", "{", "True", ":", "SW_SHOW", ",", "False", ":", "SW_HIDE", "}", "return", "bool", "(", "ShowWindow", "(", "console_window_handle", ",", "flag", "[", "state", "]", ")", ")" ]
show/hide system console window attached to current process .
train
true
14,326
def _get_series_result_type(result): if isinstance(result, dict): if all((is_sparse(c) for c in compat.itervalues(result))): from pandas.sparse.api import SparseDataFrame return SparseDataFrame else: from pandas.core.frame import DataFrame return DataFrame elif is_sparse(result): from pandas.sparse.api import SparseSeries return SparseSeries else: from pandas.core.series import Series return Series
[ "def", "_get_series_result_type", "(", "result", ")", ":", "if", "isinstance", "(", "result", ",", "dict", ")", ":", "if", "all", "(", "(", "is_sparse", "(", "c", ")", "for", "c", "in", "compat", ".", "itervalues", "(", "result", ")", ")", ")", ":", "from", "pandas", ".", "sparse", ".", "api", "import", "SparseDataFrame", "return", "SparseDataFrame", "else", ":", "from", "pandas", ".", "core", ".", "frame", "import", "DataFrame", "return", "DataFrame", "elif", "is_sparse", "(", "result", ")", ":", "from", "pandas", ".", "sparse", ".", "api", "import", "SparseSeries", "return", "SparseSeries", "else", ":", "from", "pandas", ".", "core", ".", "series", "import", "Series", "return", "Series" ]
return appropriate class of series concat input is either dict or array-like .
train
false
14,327
def administrator(method): @functools.wraps(method) def wrapper(self, *args, **kwargs): if (not self.current_user): if (self.request.method == 'GET'): self.redirect(self.get_login_url()) return raise tornado.web.HTTPError(403) elif (not self.current_user.administrator): if (self.request.method == 'GET'): self.redirect('/') return raise tornado.web.HTTPError(403) else: return method(self, *args, **kwargs) return wrapper
[ "def", "administrator", "(", "method", ")", ":", "@", "functools", ".", "wraps", "(", "method", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "self", ".", "current_user", ")", ":", "if", "(", "self", ".", "request", ".", "method", "==", "'GET'", ")", ":", "self", ".", "redirect", "(", "self", ".", "get_login_url", "(", ")", ")", "return", "raise", "tornado", ".", "web", ".", "HTTPError", "(", "403", ")", "elif", "(", "not", "self", ".", "current_user", ".", "administrator", ")", ":", "if", "(", "self", ".", "request", ".", "method", "==", "'GET'", ")", ":", "self", ".", "redirect", "(", "'/'", ")", "return", "raise", "tornado", ".", "web", ".", "HTTPError", "(", "403", ")", "else", ":", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
decorate with this method to restrict to site admins .
train
false
14,329
def assert_arping(src_namespace, dst_ip, source=None, timeout=1, count=1): ns_ip_wrapper = ip_lib.IPWrapper(src_namespace) arping_cmd = ['arping', '-c', count, '-w', timeout] if source: arping_cmd.extend(['-s', source]) arping_cmd.append(dst_ip) ns_ip_wrapper.netns.execute(arping_cmd)
[ "def", "assert_arping", "(", "src_namespace", ",", "dst_ip", ",", "source", "=", "None", ",", "timeout", "=", "1", ",", "count", "=", "1", ")", ":", "ns_ip_wrapper", "=", "ip_lib", ".", "IPWrapper", "(", "src_namespace", ")", "arping_cmd", "=", "[", "'arping'", ",", "'-c'", ",", "count", ",", "'-w'", ",", "timeout", "]", "if", "source", ":", "arping_cmd", ".", "extend", "(", "[", "'-s'", ",", "source", "]", ")", "arping_cmd", ".", "append", "(", "dst_ip", ")", "ns_ip_wrapper", ".", "netns", ".", "execute", "(", "arping_cmd", ")" ]
send arp request using arping executable .
train
false
14,332
@pytest.fixture def _pytest(request): return PytestArg(request)
[ "@", "pytest", ".", "fixture", "def", "_pytest", "(", "request", ")", ":", "return", "PytestArg", "(", "request", ")" ]
return a helper which offers a gethookrecorder method which returns a hookrecorder instance which helps to make assertions about called hooks .
train
false
14,334
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
14,336
def format_secret(session, secret, content_type): if (not isinstance(secret, bytes)): secret = secret.encode('utf-8') if (not session.encrypted): return dbus.Struct((session.object_path, '', dbus.ByteArray(secret), content_type)) padding = (16 - (len(secret) & 15)) secret += bytes((bytearray((padding,)) * padding)) aes_iv = os.urandom(16) aes = Cipher.AES.new(session.aes_key, Cipher.MODE_CBC, Cipher.AES.aes_iv) encrypted_secret = aes.encrypt(secret) return dbus.Struct((session.object_path, dbus.Array(aes_iv), dbus.Array(bytearray(encrypted_secret)), content_type))
[ "def", "format_secret", "(", "session", ",", "secret", ",", "content_type", ")", ":", "if", "(", "not", "isinstance", "(", "secret", ",", "bytes", ")", ")", ":", "secret", "=", "secret", ".", "encode", "(", "'utf-8'", ")", "if", "(", "not", "session", ".", "encrypted", ")", ":", "return", "dbus", ".", "Struct", "(", "(", "session", ".", "object_path", ",", "''", ",", "dbus", ".", "ByteArray", "(", "secret", ")", ",", "content_type", ")", ")", "padding", "=", "(", "16", "-", "(", "len", "(", "secret", ")", "&", "15", ")", ")", "secret", "+=", "bytes", "(", "(", "bytearray", "(", "(", "padding", ",", ")", ")", "*", "padding", ")", ")", "aes_iv", "=", "os", ".", "urandom", "(", "16", ")", "aes", "=", "Cipher", ".", "AES", ".", "new", "(", "session", ".", "aes_key", ",", "Cipher", ".", "MODE_CBC", ",", "Cipher", ".", "AES", ".", "aes_iv", ")", "encrypted_secret", "=", "aes", ".", "encrypt", "(", "secret", ")", "return", "dbus", ".", "Struct", "(", "(", "session", ".", "object_path", ",", "dbus", ".", "Array", "(", "aes_iv", ")", ",", "dbus", ".", "Array", "(", "bytearray", "(", "encrypted_secret", ")", ")", ",", "content_type", ")", ")" ]
formats secret to make possible to pass it to the secret service api .
train
false
14,337
def test_thread_delete(db, gmail_account): generic_thread = add_fake_thread(db.session, gmail_account.namespace.id) generic_message = add_fake_message(db.session, gmail_account.namespace.id, generic_thread) assert (db.session.query(Thread).filter((Thread.id == generic_thread.id)).all() == [generic_thread]) assert (db.session.query(Message).filter((Message.id == generic_message.id)).all() == [generic_message]) db.session.delete(generic_thread) db.session.commit() assert (db.session.query(Thread).filter((Thread.id == generic_thread.id)).all() == []) assert (db.session.query(Message).filter((Message.id == generic_message.id)).all() == [])
[ "def", "test_thread_delete", "(", "db", ",", "gmail_account", ")", ":", "generic_thread", "=", "add_fake_thread", "(", "db", ".", "session", ",", "gmail_account", ".", "namespace", ".", "id", ")", "generic_message", "=", "add_fake_message", "(", "db", ".", "session", ",", "gmail_account", ".", "namespace", ".", "id", ",", "generic_thread", ")", "assert", "(", "db", ".", "session", ".", "query", "(", "Thread", ")", ".", "filter", "(", "(", "Thread", ".", "id", "==", "generic_thread", ".", "id", ")", ")", ".", "all", "(", ")", "==", "[", "generic_thread", "]", ")", "assert", "(", "db", ".", "session", ".", "query", "(", "Message", ")", ".", "filter", "(", "(", "Message", ".", "id", "==", "generic_message", ".", "id", ")", ")", ".", "all", "(", ")", "==", "[", "generic_message", "]", ")", "db", ".", "session", ".", "delete", "(", "generic_thread", ")", "db", ".", "session", ".", "commit", "(", ")", "assert", "(", "db", ".", "session", ".", "query", "(", "Thread", ")", ".", "filter", "(", "(", "Thread", ".", "id", "==", "generic_thread", ".", "id", ")", ")", ".", "all", "(", ")", "==", "[", "]", ")", "assert", "(", "db", ".", "session", ".", "query", "(", "Message", ")", ".", "filter", "(", "(", "Message", ".", "id", "==", "generic_message", ".", "id", ")", ")", ".", "all", "(", ")", "==", "[", "]", ")" ]
ensure that all associated messages are deleted when a thread is deleted .
train
false
14,338
def _require_auth(fnc): @wraps(fnc) @_assure_identity def _wrapped(*args, **kwargs): if (not identity.authenticated): msg = ("Authentication required before calling '%s'." % fnc.__name__) raise exc.NotAuthenticated(msg) return fnc(*args, **kwargs) return _wrapped
[ "def", "_require_auth", "(", "fnc", ")", ":", "@", "wraps", "(", "fnc", ")", "@", "_assure_identity", "def", "_wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "identity", ".", "authenticated", ")", ":", "msg", "=", "(", "\"Authentication required before calling '%s'.\"", "%", "fnc", ".", "__name__", ")", "raise", "exc", ".", "NotAuthenticated", "(", "msg", ")", "return", "fnc", "(", "*", "args", ",", "**", "kwargs", ")", "return", "_wrapped" ]
authentication decorator .
train
true