id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
44,449
def test_oss_fit_sample(): oss = OneSidedSelection(random_state=RND_SEED) (X_resampled, y_resampled) = oss.fit_sample(X, Y) X_gt = np.array([[(-0.3879569), 0.6894251], [0.91542919, (-0.65453327)], [(-0.65571327), 0.42412021], [1.06446472, (-1.09279772)], [0.30543283, (-0.02589502)], [(-0.00717161), 0.00318087], [(-0.09322739), 1.28177189], [(-0.77740357), 0.74097941], [(-0.43877303), 1.07366684], [(-0.85795321), 0.82980738], [(-0.30126957), (-0.66268378)], [0.20246714, (-0.34727125)]]) y_gt = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt)
[ "def", "test_oss_fit_sample", "(", ")", ":", "oss", "=", "OneSidedSelection", "(", "random_state", "=", "RND_SEED", ")", "(", "X_resampled", ",", "y_resampled", ")", "=", "oss", ".", "fit_sample", "(", "X", ",", "Y", ")", "X_gt", "=", "np", ".", "array", "(", "[", "[", "(", "-", "0.3879569", ")", ",", "0.6894251", "]", ",", "[", "0.91542919", ",", "(", "-", "0.65453327", ")", "]", ",", "[", "(", "-", "0.65571327", ")", ",", "0.42412021", "]", ",", "[", "1.06446472", ",", "(", "-", "1.09279772", ")", "]", ",", "[", "0.30543283", ",", "(", "-", "0.02589502", ")", "]", ",", "[", "(", "-", "0.00717161", ")", ",", "0.00318087", "]", ",", "[", "(", "-", "0.09322739", ")", ",", "1.28177189", "]", ",", "[", "(", "-", "0.77740357", ")", ",", "0.74097941", "]", ",", "[", "(", "-", "0.43877303", ")", ",", "1.07366684", "]", ",", "[", "(", "-", "0.85795321", ")", ",", "0.82980738", "]", ",", "[", "(", "-", "0.30126957", ")", ",", "(", "-", "0.66268378", ")", "]", ",", "[", "0.20246714", ",", "(", "-", "0.34727125", ")", "]", "]", ")", "y_gt", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "1", ",", "1", ",", "1", ",", "1", ",", "1", "]", ")", "assert_array_equal", "(", "X_resampled", ",", "X_gt", ")", "assert_array_equal", "(", "y_resampled", ",", "y_gt", ")" ]
test the fit sample routine .
train
false
44,452
def mock_input(testfunc): def test_method(self): testgen = testfunc(self) with mock_input_helper(testgen) as mih: mih.ip.interact() if (mih.exception is not None): (etype, value, tb) = mih.exception import traceback traceback.print_tb(tb, file=sys.stdout) del tb raise value return test_method
[ "def", "mock_input", "(", "testfunc", ")", ":", "def", "test_method", "(", "self", ")", ":", "testgen", "=", "testfunc", "(", "self", ")", "with", "mock_input_helper", "(", "testgen", ")", "as", "mih", ":", "mih", ".", "ip", ".", "interact", "(", ")", "if", "(", "mih", ".", "exception", "is", "not", "None", ")", ":", "(", "etype", ",", "value", ",", "tb", ")", "=", "mih", ".", "exception", "import", "traceback", "traceback", ".", "print_tb", "(", "tb", ",", "file", "=", "sys", ".", "stdout", ")", "del", "tb", "raise", "value", "return", "test_method" ]
decorator for tests of the main interact loop .
train
false
44,453
def unregister_serializer(format): if (not _serializers): _load_serializers() if (format not in _serializers): raise SerializerDoesNotExist(format) del _serializers[format]
[ "def", "unregister_serializer", "(", "format", ")", ":", "if", "(", "not", "_serializers", ")", ":", "_load_serializers", "(", ")", "if", "(", "format", "not", "in", "_serializers", ")", ":", "raise", "SerializerDoesNotExist", "(", "format", ")", "del", "_serializers", "[", "format", "]" ]
unregister a given serializer .
train
false
44,454
def create_option_group(name, engine_name, major_engine_version, option_group_description, tags=None, region=None, key=None, keyid=None, profile=None): res = __salt__['boto_rds.option_group_exists'](name, tags, region, key, keyid, profile) if res.get('exists'): return {'exists': bool(res)} try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if (not conn): return {'results': bool(conn)} taglist = _tag_doc(tags) rds = conn.create_option_group(OptionGroupName=name, EngineName=engine_name, MajorEngineVersion=major_engine_version, OptionGroupDescription=option_group_description, Tags=taglist) return {'exists': bool(rds)} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "create_option_group", "(", "name", ",", "engine_name", ",", "major_engine_version", ",", "option_group_description", ",", "tags", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "res", "=", "__salt__", "[", "'boto_rds.option_group_exists'", "]", "(", "name", ",", "tags", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "res", ".", "get", "(", "'exists'", ")", ":", "return", "{", "'exists'", ":", "bool", "(", "res", ")", "}", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "conn", ")", ":", "return", "{", "'results'", ":", "bool", "(", "conn", ")", "}", "taglist", "=", "_tag_doc", "(", "tags", ")", "rds", "=", "conn", ".", "create_option_group", "(", "OptionGroupName", "=", "name", ",", "EngineName", "=", "engine_name", ",", "MajorEngineVersion", "=", "major_engine_version", ",", "OptionGroupDescription", "=", "option_group_description", ",", "Tags", "=", "taglist", ")", "return", "{", "'exists'", ":", "bool", "(", "rds", ")", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
create an rds option group cli example to create an rds option group:: salt myminion boto_rds .
train
true
44,455
def set_hibernate_timeout(timeout, power='ac', scheme=None): return _set_powercfg_value(scheme, 'SUB_SLEEP', 'HIBERNATEIDLE', power, timeout)
[ "def", "set_hibernate_timeout", "(", "timeout", ",", "power", "=", "'ac'", ",", "scheme", "=", "None", ")", ":", "return", "_set_powercfg_value", "(", "scheme", ",", "'SUB_SLEEP'", ",", "'HIBERNATEIDLE'", ",", "power", ",", "timeout", ")" ]
set the hibernate timeout in minutes for the given power scheme cli example: .
train
false
44,457
def check_argument_range(low, high, parameter_name): def _in_range(value): value = int(value) if (not (low <= value < high)): print_error((u'\n** ERROR: khmer only supports %i <= %s < %i.\n' % (low, parameter_name, high))) sys.exit(1) else: return value return _in_range
[ "def", "check_argument_range", "(", "low", ",", "high", ",", "parameter_name", ")", ":", "def", "_in_range", "(", "value", ")", ":", "value", "=", "int", "(", "value", ")", "if", "(", "not", "(", "low", "<=", "value", "<", "high", ")", ")", ":", "print_error", "(", "(", "u'\\n** ERROR: khmer only supports %i <= %s < %i.\\n'", "%", "(", "low", ",", "parameter_name", ",", "high", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "return", "value", "return", "_in_range" ]
check if parameter value is in the range low to high .
train
false
44,459
def ipParse(ip): args = [int(arg) for arg in ip.split('.')] while (len(args) < 4): args.insert((len(args) - 1), 0) return ipNum(*args)
[ "def", "ipParse", "(", "ip", ")", ":", "args", "=", "[", "int", "(", "arg", ")", "for", "arg", "in", "ip", ".", "split", "(", "'.'", ")", "]", "while", "(", "len", "(", "args", ")", "<", "4", ")", ":", "args", ".", "insert", "(", "(", "len", "(", "args", ")", "-", "1", ")", ",", "0", ")", "return", "ipNum", "(", "*", "args", ")" ]
parse an ip address and return an unsigned int .
train
false
44,460
def logs_buffer(): return _global_buffer
[ "def", "logs_buffer", "(", ")", ":", "return", "_global_buffer" ]
returns the logsbuffer used by the current request .
train
false
44,463
def render_frac(numerator, denominator): if (len(numerator) == 1): num_latex = numerator[0].sans_parens else: num_latex = '\\cdot '.join((k.latex for k in numerator)) if (len(denominator) == 1): den_latex = denominator[0].sans_parens else: den_latex = '\\cdot '.join((k.latex for k in denominator)) latex = u'\\frac{{{num}}}{{{den}}}'.format(num=num_latex, den=den_latex) return latex
[ "def", "render_frac", "(", "numerator", ",", "denominator", ")", ":", "if", "(", "len", "(", "numerator", ")", "==", "1", ")", ":", "num_latex", "=", "numerator", "[", "0", "]", ".", "sans_parens", "else", ":", "num_latex", "=", "'\\\\cdot '", ".", "join", "(", "(", "k", ".", "latex", "for", "k", "in", "numerator", ")", ")", "if", "(", "len", "(", "denominator", ")", "==", "1", ")", ":", "den_latex", "=", "denominator", "[", "0", "]", ".", "sans_parens", "else", ":", "den_latex", "=", "'\\\\cdot '", ".", "join", "(", "(", "k", ".", "latex", "for", "k", "in", "denominator", ")", ")", "latex", "=", "u'\\\\frac{{{num}}}{{{den}}}'", ".", "format", "(", "num", "=", "num_latex", ",", "den", "=", "den_latex", ")", "return", "latex" ]
given a list of elements in the numerator and denominator .
train
false
44,464
@raises(wcs.InvalidTransformError) def test_find_all_wcs_crash(): with open(get_pkg_data_filename(u'data/too_many_pv.hdr')) as fd: header = fd.read() wcses = wcs.find_all_wcs(header, fix=False)
[ "@", "raises", "(", "wcs", ".", "InvalidTransformError", ")", "def", "test_find_all_wcs_crash", "(", ")", ":", "with", "open", "(", "get_pkg_data_filename", "(", "u'data/too_many_pv.hdr'", ")", ")", "as", "fd", ":", "header", "=", "fd", ".", "read", "(", ")", "wcses", "=", "wcs", ".", "find_all_wcs", "(", "header", ",", "fix", "=", "False", ")" ]
causes a double free without a recent fix in wcslib_wrap .
train
false
44,465
def decorate_welcome_icon(icon, background_color): welcome_icon = QIcon() sizes = [32, 48, 64, 80] background_color = NAMED_COLORS.get(background_color, background_color) background_color = QColor(background_color) grad = radial_gradient(background_color) for size in sizes: icon_pixmap = icon.pixmap(((5 * size) / 8), ((5 * size) / 8)) icon_size = icon_pixmap.size() icon_rect = QRect(QPoint(0, 0), icon_size) pixmap = QPixmap(size, size) pixmap.fill(QColor(0, 0, 0, 0)) p = QPainter(pixmap) p.setRenderHint(QPainter.Antialiasing, True) p.setBrush(QBrush(grad)) p.setPen(Qt.NoPen) ellipse_rect = QRect(0, 0, size, size) p.drawEllipse(ellipse_rect) icon_rect.moveCenter(ellipse_rect.center()) p.drawPixmap(icon_rect.topLeft(), icon_pixmap) p.end() welcome_icon.addPixmap(pixmap) return welcome_icon
[ "def", "decorate_welcome_icon", "(", "icon", ",", "background_color", ")", ":", "welcome_icon", "=", "QIcon", "(", ")", "sizes", "=", "[", "32", ",", "48", ",", "64", ",", "80", "]", "background_color", "=", "NAMED_COLORS", ".", "get", "(", "background_color", ",", "background_color", ")", "background_color", "=", "QColor", "(", "background_color", ")", "grad", "=", "radial_gradient", "(", "background_color", ")", "for", "size", "in", "sizes", ":", "icon_pixmap", "=", "icon", ".", "pixmap", "(", "(", "(", "5", "*", "size", ")", "/", "8", ")", ",", "(", "(", "5", "*", "size", ")", "/", "8", ")", ")", "icon_size", "=", "icon_pixmap", ".", "size", "(", ")", "icon_rect", "=", "QRect", "(", "QPoint", "(", "0", ",", "0", ")", ",", "icon_size", ")", "pixmap", "=", "QPixmap", "(", "size", ",", "size", ")", "pixmap", ".", "fill", "(", "QColor", "(", "0", ",", "0", ",", "0", ",", "0", ")", ")", "p", "=", "QPainter", "(", "pixmap", ")", "p", ".", "setRenderHint", "(", "QPainter", ".", "Antialiasing", ",", "True", ")", "p", ".", "setBrush", "(", "QBrush", "(", "grad", ")", ")", "p", ".", "setPen", "(", "Qt", ".", "NoPen", ")", "ellipse_rect", "=", "QRect", "(", "0", ",", "0", ",", "size", ",", "size", ")", "p", ".", "drawEllipse", "(", "ellipse_rect", ")", "icon_rect", ".", "moveCenter", "(", "ellipse_rect", ".", "center", "(", ")", ")", "p", ".", "drawPixmap", "(", "icon_rect", ".", "topLeft", "(", ")", ",", "icon_pixmap", ")", "p", ".", "end", "(", ")", "welcome_icon", ".", "addPixmap", "(", "pixmap", ")", "return", "welcome_icon" ]
return a qicon with a circle shaped background .
train
false
44,466
def _cleanup_destdir(name): try: os.rmdir(name) except OSError: pass
[ "def", "_cleanup_destdir", "(", "name", ")", ":", "try", ":", "os", ".", "rmdir", "(", "name", ")", "except", "OSError", ":", "pass" ]
attempt to remove the specified directory .
train
false
44,468
def get_disk_type(vm_): return config.get_cloud_config_value('disk_type', vm_, __opts__, default='HDD', search_global=False)
[ "def", "get_disk_type", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'disk_type'", ",", "vm_", ",", "__opts__", ",", "default", "=", "'HDD'", ",", "search_global", "=", "False", ")" ]
return the type of disk to use .
train
false
44,470
def libvlc_media_list_player_play_item(p_mlp, p_md): f = (_Cfunctions.get('libvlc_media_list_player_play_item', None) or _Cfunction('libvlc_media_list_player_play_item', ((1,), (1,)), None, ctypes.c_int, MediaListPlayer, Media)) return f(p_mlp, p_md)
[ "def", "libvlc_media_list_player_play_item", "(", "p_mlp", ",", "p_md", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_play_item'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_play_item'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaListPlayer", ",", "Media", ")", ")", "return", "f", "(", "p_mlp", ",", "p_md", ")" ]
play the given media item .
train
true
44,471
def get_env_var(key): if (not hasattr(os, 'environ')): raise Exception('os.environ not implemented!') l = [os.environ[x] for x in os.environ.keys() if (x.lower() == key.lower())] if (len(l) > 0): return l[0] else: return None
[ "def", "get_env_var", "(", "key", ")", ":", "if", "(", "not", "hasattr", "(", "os", ",", "'environ'", ")", ")", ":", "raise", "Exception", "(", "'os.environ not implemented!'", ")", "l", "=", "[", "os", ".", "environ", "[", "x", "]", "for", "x", "in", "os", ".", "environ", ".", "keys", "(", ")", "if", "(", "x", ".", "lower", "(", ")", "==", "key", ".", "lower", "(", ")", ")", "]", "if", "(", "len", "(", "l", ")", ">", "0", ")", ":", "return", "l", "[", "0", "]", "else", ":", "return", "None" ]
returns the environment variable denoted by key .
train
false
44,472
def add_pids(pids): if (not isinstance(pids, (tuple, list, set))): pids = [pids] for pid in pids: log.info('Added new process to list with pid: %s', pid) pid = int(pid) if (not (pid in SEEN_LIST)): PROCESS_LIST.add(pid) SEEN_LIST.add(pid)
[ "def", "add_pids", "(", "pids", ")", ":", "if", "(", "not", "isinstance", "(", "pids", ",", "(", "tuple", ",", "list", ",", "set", ")", ")", ")", ":", "pids", "=", "[", "pids", "]", "for", "pid", "in", "pids", ":", "log", ".", "info", "(", "'Added new process to list with pid: %s'", ",", "pid", ")", "pid", "=", "int", "(", "pid", ")", "if", "(", "not", "(", "pid", "in", "SEEN_LIST", ")", ")", ":", "PROCESS_LIST", ".", "add", "(", "pid", ")", "SEEN_LIST", ".", "add", "(", "pid", ")" ]
add pid .
train
false
44,473
def is_socket(fd): result = False file_socket = socket.fromfd(fd, socket.AF_INET, socket.SOCK_RAW) try: socket_type = file_socket.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE) except socket.error as exc: exc_errno = exc.args[0] if (exc_errno == errno.ENOTSOCK): pass else: result = True else: result = True return result
[ "def", "is_socket", "(", "fd", ")", ":", "result", "=", "False", "file_socket", "=", "socket", ".", "fromfd", "(", "fd", ",", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_RAW", ")", "try", ":", "socket_type", "=", "file_socket", ".", "getsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_TYPE", ")", "except", "socket", ".", "error", "as", "exc", ":", "exc_errno", "=", "exc", ".", "args", "[", "0", "]", "if", "(", "exc_errno", "==", "errno", ".", "ENOTSOCK", ")", ":", "pass", "else", ":", "result", "=", "True", "else", ":", "result", "=", "True", "return", "result" ]
determine if the file descriptor is a socket .
train
false
44,476
def full_port_name(portname): m = re.match('^COM(\\d+)$', portname) if (m and (int(m.group(1)) < 10)): return portname return ('\\\\.\\' + portname)
[ "def", "full_port_name", "(", "portname", ")", ":", "m", "=", "re", ".", "match", "(", "'^COM(\\\\d+)$'", ",", "portname", ")", "if", "(", "m", "and", "(", "int", "(", "m", ".", "group", "(", "1", ")", ")", "<", "10", ")", ")", ":", "return", "portname", "return", "(", "'\\\\\\\\.\\\\'", "+", "portname", ")" ]
given a port-name returns a full name suitable for opening with the serial class .
train
false
44,477
def _loopbackTLSConnection(serverOpts, clientOpts): class GreetingServer(protocol.Protocol, ): greeting = 'greetings!' def connectionMade(self): self.transport.write(self.greeting) class ListeningClient(protocol.Protocol, ): data = '' lostReason = None def dataReceived(self, data): self.data += data def connectionLost(self, reason): self.lostReason = reason clientFactory = TLSMemoryBIOFactory(clientOpts, isClient=True, wrappedFactory=protocol.Factory.forProtocol(GreetingServer)) serverFactory = TLSMemoryBIOFactory(serverOpts, isClient=False, wrappedFactory=protocol.Factory.forProtocol(ListeningClient)) (sProto, cProto, pump) = connectedServerAndClient((lambda : serverFactory.buildProtocol(None)), (lambda : clientFactory.buildProtocol(None))) return (sProto, cProto, pump)
[ "def", "_loopbackTLSConnection", "(", "serverOpts", ",", "clientOpts", ")", ":", "class", "GreetingServer", "(", "protocol", ".", "Protocol", ",", ")", ":", "greeting", "=", "'greetings!'", "def", "connectionMade", "(", "self", ")", ":", "self", ".", "transport", ".", "write", "(", "self", ".", "greeting", ")", "class", "ListeningClient", "(", "protocol", ".", "Protocol", ",", ")", ":", "data", "=", "''", "lostReason", "=", "None", "def", "dataReceived", "(", "self", ",", "data", ")", ":", "self", ".", "data", "+=", "data", "def", "connectionLost", "(", "self", ",", "reason", ")", ":", "self", ".", "lostReason", "=", "reason", "clientFactory", "=", "TLSMemoryBIOFactory", "(", "clientOpts", ",", "isClient", "=", "True", ",", "wrappedFactory", "=", "protocol", ".", "Factory", ".", "forProtocol", "(", "GreetingServer", ")", ")", "serverFactory", "=", "TLSMemoryBIOFactory", "(", "serverOpts", ",", "isClient", "=", "False", ",", "wrappedFactory", "=", "protocol", ".", "Factory", ".", "forProtocol", "(", "ListeningClient", ")", ")", "(", "sProto", ",", "cProto", ",", "pump", ")", "=", "connectedServerAndClient", "(", "(", "lambda", ":", "serverFactory", ".", "buildProtocol", "(", "None", ")", ")", ",", "(", "lambda", ":", "clientFactory", ".", "buildProtocol", "(", "None", ")", ")", ")", "return", "(", "sProto", ",", "cProto", ",", "pump", ")" ]
common implementation code for both l{loopbacktlsconnection} and l{loopbacktlsconnectioninmemory} .
train
false
44,478
def add_origin(examples, filename): if (not filename): return vars = (examples.domain.variables + examples.domain.metas) strings = [var for var in vars if var.is_string] (dir_name, basename) = os.path.split(filename) for var in strings: if (('type' in var.attributes) and ('origin' not in var.attributes)): var.attributes['origin'] = dir_name
[ "def", "add_origin", "(", "examples", ",", "filename", ")", ":", "if", "(", "not", "filename", ")", ":", "return", "vars", "=", "(", "examples", ".", "domain", ".", "variables", "+", "examples", ".", "domain", ".", "metas", ")", "strings", "=", "[", "var", "for", "var", "in", "vars", "if", "var", ".", "is_string", "]", "(", "dir_name", ",", "basename", ")", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "for", "var", "in", "strings", ":", "if", "(", "(", "'type'", "in", "var", ".", "attributes", ")", "and", "(", "'origin'", "not", "in", "var", ".", "attributes", ")", ")", ":", "var", ".", "attributes", "[", "'origin'", "]", "=", "dir_name" ]
adds attribute with file location to each string variable used for relative filenames stored in string variables todo: we should consider a cleaner solution .
train
false
44,479
def keyring_auth(username=None, region=None, authenticate=True): if (not keyring): raise exc.KeyringModuleNotInstalled("The 'keyring' Python module is not installed on this system.") if (username is None): username = settings.get('keyring_username') if (not username): raise exc.KeyringUsernameMissing('No username specified for keyring authentication.') password = keyring.get_password('pyrax', username) if (password is None): raise exc.KeyringPasswordNotFound(("No password was found for the username '%s'." % username)) set_credentials(username, password, region=region, authenticate=authenticate)
[ "def", "keyring_auth", "(", "username", "=", "None", ",", "region", "=", "None", ",", "authenticate", "=", "True", ")", ":", "if", "(", "not", "keyring", ")", ":", "raise", "exc", ".", "KeyringModuleNotInstalled", "(", "\"The 'keyring' Python module is not installed on this system.\"", ")", "if", "(", "username", "is", "None", ")", ":", "username", "=", "settings", ".", "get", "(", "'keyring_username'", ")", "if", "(", "not", "username", ")", ":", "raise", "exc", ".", "KeyringUsernameMissing", "(", "'No username specified for keyring authentication.'", ")", "password", "=", "keyring", ".", "get_password", "(", "'pyrax'", ",", "username", ")", "if", "(", "password", "is", "None", ")", ":", "raise", "exc", ".", "KeyringPasswordNotFound", "(", "(", "\"No password was found for the username '%s'.\"", "%", "username", ")", ")", "set_credentials", "(", "username", ",", "password", ",", "region", "=", "region", ",", "authenticate", "=", "authenticate", ")" ]
use the password stored within the keyring to authenticate .
train
true
44,480
def migrate_guid_wiki(wiki): data = wiki.to_storage() uid = data.get('user') if uid: record = models.User.load(uid.lower()) if record: wiki.user = record pid = data.get('node') if pid: record = models.Node.load(pid.lower()) if record: wiki.node = record wiki.save()
[ "def", "migrate_guid_wiki", "(", "wiki", ")", ":", "data", "=", "wiki", ".", "to_storage", "(", ")", "uid", "=", "data", ".", "get", "(", "'user'", ")", "if", "uid", ":", "record", "=", "models", ".", "User", ".", "load", "(", "uid", ".", "lower", "(", ")", ")", "if", "record", ":", "wiki", ".", "user", "=", "record", "pid", "=", "data", ".", "get", "(", "'node'", ")", "if", "pid", ":", "record", "=", "models", ".", "Node", ".", "load", "(", "pid", ".", "lower", "(", ")", ")", "if", "record", ":", "wiki", ".", "node", "=", "record", "wiki", ".", "save", "(", ")" ]
migrate non-reference fields containing primary keys on wiki pages .
train
false
44,482
def analyse_action(func): _deprecated() description = (inspect.getdoc(func) or 'undocumented action') arguments = [] (args, varargs, kwargs, defaults) = inspect.getargspec(func) if (varargs or kwargs): raise TypeError('variable length arguments for action not allowed.') if (len(args) != len((defaults or ()))): raise TypeError('not all arguments have proper definitions') for (idx, (arg, definition)) in enumerate(zip(args, (defaults or ()))): if arg.startswith('_'): raise TypeError('arguments may not start with an underscore') if (not isinstance(definition, tuple)): shortcut = None default = definition else: (shortcut, default) = definition argument_type = argument_types[type(default)] if (isinstance(default, bool) and (default is True)): arg = ('no-' + arg) arguments.append((arg.replace('_', '-'), shortcut, default, argument_type)) return (func, description, arguments)
[ "def", "analyse_action", "(", "func", ")", ":", "_deprecated", "(", ")", "description", "=", "(", "inspect", ".", "getdoc", "(", "func", ")", "or", "'undocumented action'", ")", "arguments", "=", "[", "]", "(", "args", ",", "varargs", ",", "kwargs", ",", "defaults", ")", "=", "inspect", ".", "getargspec", "(", "func", ")", "if", "(", "varargs", "or", "kwargs", ")", ":", "raise", "TypeError", "(", "'variable length arguments for action not allowed.'", ")", "if", "(", "len", "(", "args", ")", "!=", "len", "(", "(", "defaults", "or", "(", ")", ")", ")", ")", ":", "raise", "TypeError", "(", "'not all arguments have proper definitions'", ")", "for", "(", "idx", ",", "(", "arg", ",", "definition", ")", ")", "in", "enumerate", "(", "zip", "(", "args", ",", "(", "defaults", "or", "(", ")", ")", ")", ")", ":", "if", "arg", ".", "startswith", "(", "'_'", ")", ":", "raise", "TypeError", "(", "'arguments may not start with an underscore'", ")", "if", "(", "not", "isinstance", "(", "definition", ",", "tuple", ")", ")", ":", "shortcut", "=", "None", "default", "=", "definition", "else", ":", "(", "shortcut", ",", "default", ")", "=", "definition", "argument_type", "=", "argument_types", "[", "type", "(", "default", ")", "]", "if", "(", "isinstance", "(", "default", ",", "bool", ")", "and", "(", "default", "is", "True", ")", ")", ":", "arg", "=", "(", "'no-'", "+", "arg", ")", "arguments", ".", "append", "(", "(", "arg", ".", "replace", "(", "'_'", ",", "'-'", ")", ",", "shortcut", ",", "default", ",", "argument_type", ")", ")", "return", "(", "func", ",", "description", ",", "arguments", ")" ]
analyse a function .
train
true
44,483
def checkstyle(registry, xml_parent, data): def convert_settings(lookup, data): 'Helper to convert settings from one key to another\n ' for old_key in list(data.keys()): if (old_key in lookup): data.setdefault(lookup[old_key], data[old_key]) del data[old_key] checkstyle = XML.SubElement(xml_parent, 'hudson.plugins.checkstyle.CheckStylePublisher') checkstyle.set('plugin', 'checkstyle') convert_settings({'unHealthy': 'unhealthy', 'healthThreshold': 'health-threshold', 'defaultEncoding': 'default-encoding', 'canRunOnFailed': 'can-run-on-failed', 'shouldDetectModules': 'should-detect-modules'}, data) threshold_data = data.get('thresholds', {}) for threshold in ['unstable', 'failed']: convert_settings({'totalAll': 'total-all', 'totalHigh': 'total-high', 'totalNormal': 'total-normal', 'totalLow': 'total-low'}, threshold_data.get(threshold, {})) helpers.build_trends_publisher('[CHECKSTYLE] ', checkstyle, data)
[ "def", "checkstyle", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "def", "convert_settings", "(", "lookup", ",", "data", ")", ":", "for", "old_key", "in", "list", "(", "data", ".", "keys", "(", ")", ")", ":", "if", "(", "old_key", "in", "lookup", ")", ":", "data", ".", "setdefault", "(", "lookup", "[", "old_key", "]", ",", "data", "[", "old_key", "]", ")", "del", "data", "[", "old_key", "]", "checkstyle", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.checkstyle.CheckStylePublisher'", ")", "checkstyle", ".", "set", "(", "'plugin'", ",", "'checkstyle'", ")", "convert_settings", "(", "{", "'unHealthy'", ":", "'unhealthy'", ",", "'healthThreshold'", ":", "'health-threshold'", ",", "'defaultEncoding'", ":", "'default-encoding'", ",", "'canRunOnFailed'", ":", "'can-run-on-failed'", ",", "'shouldDetectModules'", ":", "'should-detect-modules'", "}", ",", "data", ")", "threshold_data", "=", "data", ".", "get", "(", "'thresholds'", ",", "{", "}", ")", "for", "threshold", "in", "[", "'unstable'", ",", "'failed'", "]", ":", "convert_settings", "(", "{", "'totalAll'", ":", "'total-all'", ",", "'totalHigh'", ":", "'total-high'", ",", "'totalNormal'", ":", "'total-normal'", ",", "'totalLow'", ":", "'total-low'", "}", ",", "threshold_data", ".", "get", "(", "threshold", ",", "{", "}", ")", ")", "helpers", ".", "build_trends_publisher", "(", "'[CHECKSTYLE] '", ",", "checkstyle", ",", "data", ")" ]
yaml: checkstyle publish trend reports with checkstyle .
train
false
44,484
def is_stable_version(version): if (not isinstance(version, tuple)): version = version.split('.') last_part = version[(-1)] if (not re.search('[a-zA-Z]', last_part)): return True else: return False
[ "def", "is_stable_version", "(", "version", ")", ":", "if", "(", "not", "isinstance", "(", "version", ",", "tuple", ")", ")", ":", "version", "=", "version", ".", "split", "(", "'.'", ")", "last_part", "=", "version", "[", "(", "-", "1", ")", "]", "if", "(", "not", "re", ".", "search", "(", "'[a-zA-Z]'", ",", "last_part", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
a stable version has no letters in the final component .
train
true
44,485
def blob_doh(image, min_sigma=1, max_sigma=30, num_sigma=10, threshold=0.01, overlap=0.5, log_scale=False): assert_nD(image, 2) image = img_as_float(image) image = integral_image(image) if log_scale: (start, stop) = (log(min_sigma, 10), log(max_sigma, 10)) sigma_list = np.logspace(start, stop, num_sigma) else: sigma_list = np.linspace(min_sigma, max_sigma, num_sigma) hessian_images = [_hessian_matrix_det(image, s) for s in sigma_list] image_cube = np.dstack(hessian_images) local_maxima = peak_local_max(image_cube, threshold_abs=threshold, footprint=np.ones((3, 3, 3)), threshold_rel=0.0, exclude_border=False) if (local_maxima.size == 0): return np.empty((0, 3)) lm = local_maxima.astype(np.float64) lm[:, 2] = sigma_list[local_maxima[:, 2]] local_maxima = lm return _prune_blobs(local_maxima, overlap)
[ "def", "blob_doh", "(", "image", ",", "min_sigma", "=", "1", ",", "max_sigma", "=", "30", ",", "num_sigma", "=", "10", ",", "threshold", "=", "0.01", ",", "overlap", "=", "0.5", ",", "log_scale", "=", "False", ")", ":", "assert_nD", "(", "image", ",", "2", ")", "image", "=", "img_as_float", "(", "image", ")", "image", "=", "integral_image", "(", "image", ")", "if", "log_scale", ":", "(", "start", ",", "stop", ")", "=", "(", "log", "(", "min_sigma", ",", "10", ")", ",", "log", "(", "max_sigma", ",", "10", ")", ")", "sigma_list", "=", "np", ".", "logspace", "(", "start", ",", "stop", ",", "num_sigma", ")", "else", ":", "sigma_list", "=", "np", ".", "linspace", "(", "min_sigma", ",", "max_sigma", ",", "num_sigma", ")", "hessian_images", "=", "[", "_hessian_matrix_det", "(", "image", ",", "s", ")", "for", "s", "in", "sigma_list", "]", "image_cube", "=", "np", ".", "dstack", "(", "hessian_images", ")", "local_maxima", "=", "peak_local_max", "(", "image_cube", ",", "threshold_abs", "=", "threshold", ",", "footprint", "=", "np", ".", "ones", "(", "(", "3", ",", "3", ",", "3", ")", ")", ",", "threshold_rel", "=", "0.0", ",", "exclude_border", "=", "False", ")", "if", "(", "local_maxima", ".", "size", "==", "0", ")", ":", "return", "np", ".", "empty", "(", "(", "0", ",", "3", ")", ")", "lm", "=", "local_maxima", ".", "astype", "(", "np", ".", "float64", ")", "lm", "[", ":", ",", "2", "]", "=", "sigma_list", "[", "local_maxima", "[", ":", ",", "2", "]", "]", "local_maxima", "=", "lm", "return", "_prune_blobs", "(", "local_maxima", ",", "overlap", ")" ]
finds blobs in the given grayscale image .
train
false
44,487
def check_no_log_audit(logical_line): if no_audit_log.match(logical_line): (yield (0, 'C304: Found LOG.audit. Use LOG.info instead.'))
[ "def", "check_no_log_audit", "(", "logical_line", ")", ":", "if", "no_audit_log", ".", "match", "(", "logical_line", ")", ":", "(", "yield", "(", "0", ",", "'C304: Found LOG.audit. Use LOG.info instead.'", ")", ")" ]
ensure that we are not using log .
train
false
44,488
def remove_interface_router(router, subnet, profile=None): conn = _auth(profile) return conn.remove_interface_router(router, subnet)
[ "def", "remove_interface_router", "(", "router", ",", "subnet", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "remove_interface_router", "(", "router", ",", "subnet", ")" ]
removes an internal network interface from the specified router cli example: .
train
true
44,489
def users_role_for_group_or_org(group_id, user_name): if (not group_id): return None group_id = model.Group.get(group_id).id user_id = get_user_id_for_username(user_name, allow_none=True) if (not user_id): return None q = model.Session.query(model.Member).filter((model.Member.group_id == group_id)).filter((model.Member.table_name == 'user')).filter((model.Member.state == 'active')).filter((model.Member.table_id == user_id)) for row in q.all(): return row.capacity return None
[ "def", "users_role_for_group_or_org", "(", "group_id", ",", "user_name", ")", ":", "if", "(", "not", "group_id", ")", ":", "return", "None", "group_id", "=", "model", ".", "Group", ".", "get", "(", "group_id", ")", ".", "id", "user_id", "=", "get_user_id_for_username", "(", "user_name", ",", "allow_none", "=", "True", ")", "if", "(", "not", "user_id", ")", ":", "return", "None", "q", "=", "model", ".", "Session", ".", "query", "(", "model", ".", "Member", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "group_id", "==", "group_id", ")", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "table_name", "==", "'user'", ")", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "state", "==", "'active'", ")", ")", ".", "filter", "(", "(", "model", ".", "Member", ".", "table_id", "==", "user_id", ")", ")", "for", "row", "in", "q", ".", "all", "(", ")", ":", "return", "row", ".", "capacity", "return", "None" ]
returns the users role for the group .
train
false
44,490
def _maybe_numeric_slice(df, slice_, include_bool=False): if (slice_ is None): dtypes = [np.number] if include_bool: dtypes.append(bool) slice_ = IndexSlice[:, df.select_dtypes(include=dtypes).columns] return slice_
[ "def", "_maybe_numeric_slice", "(", "df", ",", "slice_", ",", "include_bool", "=", "False", ")", ":", "if", "(", "slice_", "is", "None", ")", ":", "dtypes", "=", "[", "np", ".", "number", "]", "if", "include_bool", ":", "dtypes", ".", "append", "(", "bool", ")", "slice_", "=", "IndexSlice", "[", ":", ",", "df", ".", "select_dtypes", "(", "include", "=", "dtypes", ")", ".", "columns", "]", "return", "slice_" ]
want nice defaults for background_gradient that dont break with non-numeric data .
train
true
44,491
@utils.arg('flavor', metavar='<flavor>', help=_('Flavor name or ID to remove access for the given tenant.')) @utils.arg('tenant', metavar='<tenant_id>', help=_('Tenant ID to remove flavor access for.')) def do_flavor_access_remove(cs, args): flavor = _find_flavor(cs, args.flavor) access_list = cs.flavor_access.remove_tenant_access(flavor, args.tenant) columns = ['Flavor_ID', 'Tenant_ID'] utils.print_list(access_list, columns)
[ "@", "utils", ".", "arg", "(", "'flavor'", ",", "metavar", "=", "'<flavor>'", ",", "help", "=", "_", "(", "'Flavor name or ID to remove access for the given tenant.'", ")", ")", "@", "utils", ".", "arg", "(", "'tenant'", ",", "metavar", "=", "'<tenant_id>'", ",", "help", "=", "_", "(", "'Tenant ID to remove flavor access for.'", ")", ")", "def", "do_flavor_access_remove", "(", "cs", ",", "args", ")", ":", "flavor", "=", "_find_flavor", "(", "cs", ",", "args", ".", "flavor", ")", "access_list", "=", "cs", ".", "flavor_access", ".", "remove_tenant_access", "(", "flavor", ",", "args", ".", "tenant", ")", "columns", "=", "[", "'Flavor_ID'", ",", "'Tenant_ID'", "]", "utils", ".", "print_list", "(", "access_list", ",", "columns", ")" ]
remove flavor access for the given tenant .
train
false
44,492
def snapshot_get_all_for_group_snapshot(context, group_snapshot_id): return IMPL.snapshot_get_all_for_group_snapshot(context, group_snapshot_id)
[ "def", "snapshot_get_all_for_group_snapshot", "(", "context", ",", "group_snapshot_id", ")", ":", "return", "IMPL", ".", "snapshot_get_all_for_group_snapshot", "(", "context", ",", "group_snapshot_id", ")" ]
get all snapshots belonging to a group snapshot .
train
false
44,493
def _pause_anim(event, params): params['pause'] = (not params['pause'])
[ "def", "_pause_anim", "(", "event", ",", "params", ")", ":", "params", "[", "'pause'", "]", "=", "(", "not", "params", "[", "'pause'", "]", ")" ]
pause or continue the animation on mouse click .
train
false
44,494
def prettify_jsonc(jsonc_obj, indentation=2): return simplejson.dumps(_convert_to_object(jsonc_obj), indent=indentation)
[ "def", "prettify_jsonc", "(", "jsonc_obj", ",", "indentation", "=", "2", ")", ":", "return", "simplejson", ".", "dumps", "(", "_convert_to_object", "(", "jsonc_obj", ")", ",", "indent", "=", "indentation", ")" ]
converts a jsonc object to a pretified json string .
train
false
44,496
def split_fasta_on_sample_ids(seqs): for (seq_id, seq) in seqs: (yield (seq_id.split()[0].rsplit('_', 1)[0], seq_id, seq)) return
[ "def", "split_fasta_on_sample_ids", "(", "seqs", ")", ":", "for", "(", "seq_id", ",", "seq", ")", "in", "seqs", ":", "(", "yield", "(", "seq_id", ".", "split", "(", ")", "[", "0", "]", ".", "rsplit", "(", "'_'", ",", "1", ")", "[", "0", "]", ",", "seq_id", ",", "seq", ")", ")", "return" ]
yields for each entry in seqs seqs: pairs .
train
false
44,497
def rsa_private_key(p, q, e): n = (p * q) if (isprime(p) and isprime(q)): phi = totient(n) if (gcd(e, phi) == 1): d = mod_inverse(e, phi) return (n, d) return False
[ "def", "rsa_private_key", "(", "p", ",", "q", ",", "e", ")", ":", "n", "=", "(", "p", "*", "q", ")", "if", "(", "isprime", "(", "p", ")", "and", "isprime", "(", "q", ")", ")", ":", "phi", "=", "totient", "(", "n", ")", "if", "(", "gcd", "(", "e", ",", "phi", ")", "==", "1", ")", ":", "d", "=", "mod_inverse", "(", "e", ",", "phi", ")", "return", "(", "n", ",", "d", ")", "return", "False" ]
return the rsa *private key* .
train
false
44,498
def load_optional_q(config, cidr_name): cidr = config.get(cidr_name) ip_q = None if (cidr is not None): ip_q = Queue.Queue() load_ip_q(cidr=cidr, ip_q=ip_q) return ip_q
[ "def", "load_optional_q", "(", "config", ",", "cidr_name", ")", ":", "cidr", "=", "config", ".", "get", "(", "cidr_name", ")", "ip_q", "=", "None", "if", "(", "cidr", "is", "not", "None", ")", ":", "ip_q", "=", "Queue", ".", "Queue", "(", ")", "load_ip_q", "(", "cidr", "=", "cidr", ",", "ip_q", "=", "ip_q", ")", "return", "ip_q" ]
load optional queue with ip addresses .
train
false
44,499
def run_step(*args): global DRY_RUN cmd = args print(' '.join(cmd)) if skip_step(): print('--- Skipping...') elif DRY_RUN: print('--- Pretending to run...') else: subprocess.check_output(cmd)
[ "def", "run_step", "(", "*", "args", ")", ":", "global", "DRY_RUN", "cmd", "=", "args", "print", "(", "' '", ".", "join", "(", "cmd", ")", ")", "if", "skip_step", "(", ")", ":", "print", "(", "'--- Skipping...'", ")", "elif", "DRY_RUN", ":", "print", "(", "'--- Pretending to run...'", ")", "else", ":", "subprocess", ".", "check_output", "(", "cmd", ")" ]
prints out the command and asks if it should be run .
train
true
44,500
def mean_percentile(image, selem, out=None, mask=None, shift_x=False, shift_y=False, p0=0, p1=1): return _apply(percentile_cy._mean, image, selem, out=out, mask=mask, shift_x=shift_x, shift_y=shift_y, p0=p0, p1=p1)
[ "def", "mean_percentile", "(", "image", ",", "selem", ",", "out", "=", "None", ",", "mask", "=", "None", ",", "shift_x", "=", "False", ",", "shift_y", "=", "False", ",", "p0", "=", "0", ",", "p1", "=", "1", ")", ":", "return", "_apply", "(", "percentile_cy", ".", "_mean", ",", "image", ",", "selem", ",", "out", "=", "out", ",", "mask", "=", "mask", ",", "shift_x", "=", "shift_x", ",", "shift_y", "=", "shift_y", ",", "p0", "=", "p0", ",", "p1", "=", "p1", ")" ]
return local mean of an image .
train
false
44,501
def local_var(img, size=3): structure_element = np.ones((size, size), dtype=img.dtype) l_var = signal.correlate((img ** 2), structure_element, mode='same') l_var /= (size ** 2) l_var -= (local_mean(img, size=size) ** 2) return l_var
[ "def", "local_var", "(", "img", ",", "size", "=", "3", ")", ":", "structure_element", "=", "np", ".", "ones", "(", "(", "size", ",", "size", ")", ",", "dtype", "=", "img", ".", "dtype", ")", "l_var", "=", "signal", ".", "correlate", "(", "(", "img", "**", "2", ")", ",", "structure_element", ",", "mode", "=", "'same'", ")", "l_var", "/=", "(", "size", "**", "2", ")", "l_var", "-=", "(", "local_mean", "(", "img", ",", "size", "=", "size", ")", "**", "2", ")", "return", "l_var" ]
compute a image of the local variance .
train
false
44,502
def _is_reparse_point(path): if (sys.getwindowsversion().major < 6): raise SaltInvocationError('Symlinks are only supported on Windows Vista or later.') result = win32file.GetFileAttributesW(path) if (result == (-1)): raise SaltInvocationError('The path given is not valid, symlink or not. (does it exist?)') if (result & 1024): return True else: return False
[ "def", "_is_reparse_point", "(", "path", ")", ":", "if", "(", "sys", ".", "getwindowsversion", "(", ")", ".", "major", "<", "6", ")", ":", "raise", "SaltInvocationError", "(", "'Symlinks are only supported on Windows Vista or later.'", ")", "result", "=", "win32file", ".", "GetFileAttributesW", "(", "path", ")", "if", "(", "result", "==", "(", "-", "1", ")", ")", ":", "raise", "SaltInvocationError", "(", "'The path given is not valid, symlink or not. (does it exist?)'", ")", "if", "(", "result", "&", "1024", ")", ":", "return", "True", "else", ":", "return", "False" ]
returns true if path is a reparse point; false otherwise .
train
false
44,503
def test_unit(): os.environ['SECUREDROP_ENV'] = 'test' import config _start_test_rqworker(config) test_rc = int(subprocess.call(['py.test', '--cov'])) _stop_test_rqworker() sys.exit(test_rc)
[ "def", "test_unit", "(", ")", ":", "os", ".", "environ", "[", "'SECUREDROP_ENV'", "]", "=", "'test'", "import", "config", "_start_test_rqworker", "(", "config", ")", "test_rc", "=", "int", "(", "subprocess", ".", "call", "(", "[", "'py.test'", ",", "'--cov'", "]", ")", ")", "_stop_test_rqworker", "(", ")", "sys", ".", "exit", "(", "test_rc", ")" ]
runs the unit tests .
train
false
44,504
def logistic_log_partial_ij(x_i, y_i, beta, j): return ((y_i - logistic(dot(x_i, beta))) * x_i[j])
[ "def", "logistic_log_partial_ij", "(", "x_i", ",", "y_i", ",", "beta", ",", "j", ")", ":", "return", "(", "(", "y_i", "-", "logistic", "(", "dot", "(", "x_i", ",", "beta", ")", ")", ")", "*", "x_i", "[", "j", "]", ")" ]
here i is the index of the data point .
train
false
44,505
@task @timed def install_coverage_prereqs(): if no_prereq_install(): print NO_PREREQ_MESSAGE return pip_install_req_file(COVERAGE_REQ_FILE)
[ "@", "task", "@", "timed", "def", "install_coverage_prereqs", "(", ")", ":", "if", "no_prereq_install", "(", ")", ":", "print", "NO_PREREQ_MESSAGE", "return", "pip_install_req_file", "(", "COVERAGE_REQ_FILE", ")" ]
install python prereqs for measuring coverage .
train
false
44,507
def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees, verbose=0, class_weight=None): if (verbose > 1): print ('building tree %d of %d' % ((tree_idx + 1), n_trees)) if forest.bootstrap: n_samples = X.shape[0] if (sample_weight is None): curr_sample_weight = np.ones((n_samples,), dtype=np.float64) else: curr_sample_weight = sample_weight.copy() indices = _generate_sample_indices(tree.random_state, n_samples) sample_counts = bincount(indices, minlength=n_samples) curr_sample_weight *= sample_counts if (class_weight == 'subsample'): with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) curr_sample_weight *= compute_sample_weight('auto', y, indices) elif (class_weight == 'balanced_subsample'): curr_sample_weight *= compute_sample_weight('balanced', y, indices) tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False) else: tree.fit(X, y, sample_weight=sample_weight, check_input=False) return tree
[ "def", "_parallel_build_trees", "(", "tree", ",", "forest", ",", "X", ",", "y", ",", "sample_weight", ",", "tree_idx", ",", "n_trees", ",", "verbose", "=", "0", ",", "class_weight", "=", "None", ")", ":", "if", "(", "verbose", ">", "1", ")", ":", "print", "(", "'building tree %d of %d'", "%", "(", "(", "tree_idx", "+", "1", ")", ",", "n_trees", ")", ")", "if", "forest", ".", "bootstrap", ":", "n_samples", "=", "X", ".", "shape", "[", "0", "]", "if", "(", "sample_weight", "is", "None", ")", ":", "curr_sample_weight", "=", "np", ".", "ones", "(", "(", "n_samples", ",", ")", ",", "dtype", "=", "np", ".", "float64", ")", "else", ":", "curr_sample_weight", "=", "sample_weight", ".", "copy", "(", ")", "indices", "=", "_generate_sample_indices", "(", "tree", ".", "random_state", ",", "n_samples", ")", "sample_counts", "=", "bincount", "(", "indices", ",", "minlength", "=", "n_samples", ")", "curr_sample_weight", "*=", "sample_counts", "if", "(", "class_weight", "==", "'subsample'", ")", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "simplefilter", "(", "'ignore'", ",", "DeprecationWarning", ")", "curr_sample_weight", "*=", "compute_sample_weight", "(", "'auto'", ",", "y", ",", "indices", ")", "elif", "(", "class_weight", "==", "'balanced_subsample'", ")", ":", "curr_sample_weight", "*=", "compute_sample_weight", "(", "'balanced'", ",", "y", ",", "indices", ")", "tree", ".", "fit", "(", "X", ",", "y", ",", "sample_weight", "=", "curr_sample_weight", ",", "check_input", "=", "False", ")", "else", ":", "tree", ".", "fit", "(", "X", ",", "y", ",", "sample_weight", "=", "sample_weight", ",", "check_input", "=", "False", ")", "return", "tree" ]
private function used to fit a single tree in parallel .
train
false
44,509
def read_files(*filenames): output = [] for filename in filenames: f = codecs.open(filename, encoding=u'utf-8') try: output.append(f.read()) finally: f.close() return u'\n\n'.join(output)
[ "def", "read_files", "(", "*", "filenames", ")", ":", "output", "=", "[", "]", "for", "filename", "in", "filenames", ":", "f", "=", "codecs", ".", "open", "(", "filename", ",", "encoding", "=", "u'utf-8'", ")", "try", ":", "output", ".", "append", "(", "f", ".", "read", "(", ")", ")", "finally", ":", "f", ".", "close", "(", ")", "return", "u'\\n\\n'", ".", "join", "(", "output", ")" ]
output the contents of one or more files to a single concatenated string .
train
true
44,510
def _get_neighbor_conf(neigh_ip_address): neigh_conf = CORE_MANAGER.neighbors_conf.get_neighbor_conf(neigh_ip_address) if (not neigh_conf): raise RuntimeConfigError(desc=('No Neighbor configuration with IP address %s' % neigh_ip_address)) assert isinstance(neigh_conf, NeighborConf) return neigh_conf
[ "def", "_get_neighbor_conf", "(", "neigh_ip_address", ")", ":", "neigh_conf", "=", "CORE_MANAGER", ".", "neighbors_conf", ".", "get_neighbor_conf", "(", "neigh_ip_address", ")", "if", "(", "not", "neigh_conf", ")", ":", "raise", "RuntimeConfigError", "(", "desc", "=", "(", "'No Neighbor configuration with IP address %s'", "%", "neigh_ip_address", ")", ")", "assert", "isinstance", "(", "neigh_conf", ",", "NeighborConf", ")", "return", "neigh_conf" ]
returns neighbor configuration for given neighbor ip address .
train
true
44,511
def getInsetPointsByInsetLoops(insetLoops, inside, loops, radius): insetPointsByInsetLoops = [] for insetLoop in insetLoops: insetPointsByInsetLoops += getInsetPointsByInsetLoop(insetLoop, inside, loops, radius) return insetPointsByInsetLoops
[ "def", "getInsetPointsByInsetLoops", "(", "insetLoops", ",", "inside", ",", "loops", ",", "radius", ")", ":", "insetPointsByInsetLoops", "=", "[", "]", "for", "insetLoop", "in", "insetLoops", ":", "insetPointsByInsetLoops", "+=", "getInsetPointsByInsetLoop", "(", "insetLoop", ",", "inside", ",", "loops", ",", "radius", ")", "return", "insetPointsByInsetLoops" ]
get the inset points of the inset loops inside the loops .
train
false
44,512
def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, failure=None, ending=False, log_failure=True): with ConnectionContext(conf, connection_pool) as conn: if failure: failure = rpc_common.serialize_remote_exception(failure, log_failure) try: msg = {'result': reply, 'failure': failure} except TypeError: msg = {'result': dict(((k, repr(v)) for (k, v) in reply.__dict__.iteritems())), 'failure': failure} if ending: msg['ending'] = True _add_unique_id(msg) if reply_q: msg['_msg_id'] = msg_id conn.direct_send(reply_q, rpc_common.serialize_msg(msg)) else: conn.direct_send(msg_id, rpc_common.serialize_msg(msg))
[ "def", "msg_reply", "(", "conf", ",", "msg_id", ",", "reply_q", ",", "connection_pool", ",", "reply", "=", "None", ",", "failure", "=", "None", ",", "ending", "=", "False", ",", "log_failure", "=", "True", ")", ":", "with", "ConnectionContext", "(", "conf", ",", "connection_pool", ")", "as", "conn", ":", "if", "failure", ":", "failure", "=", "rpc_common", ".", "serialize_remote_exception", "(", "failure", ",", "log_failure", ")", "try", ":", "msg", "=", "{", "'result'", ":", "reply", ",", "'failure'", ":", "failure", "}", "except", "TypeError", ":", "msg", "=", "{", "'result'", ":", "dict", "(", "(", "(", "k", ",", "repr", "(", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "reply", ".", "__dict__", ".", "iteritems", "(", ")", ")", ")", ",", "'failure'", ":", "failure", "}", "if", "ending", ":", "msg", "[", "'ending'", "]", "=", "True", "_add_unique_id", "(", "msg", ")", "if", "reply_q", ":", "msg", "[", "'_msg_id'", "]", "=", "msg_id", "conn", ".", "direct_send", "(", "reply_q", ",", "rpc_common", ".", "serialize_msg", "(", "msg", ")", ")", "else", ":", "conn", ".", "direct_send", "(", "msg_id", ",", "rpc_common", ".", "serialize_msg", "(", "msg", ")", ")" ]
sends a reply or an error on the channel signified by msg_id .
train
false
44,513
def is_series_episode(title): title = title.strip() if _split_series_episode(title)[0]: return 1 return 0
[ "def", "is_series_episode", "(", "title", ")", ":", "title", "=", "title", ".", "strip", "(", ")", "if", "_split_series_episode", "(", "title", ")", "[", "0", "]", ":", "return", "1", "return", "0" ]
return true if title is an series episode .
train
false
44,516
def ordered_obj(obj): if isinstance(obj, dict): return sorted(((k, ordered_obj(v)) for (k, v) in obj.items())) if isinstance(obj, list): return sorted((ordered_obj(x) for x in obj)) else: return obj
[ "def", "ordered_obj", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "sorted", "(", "(", "(", "k", ",", "ordered_obj", "(", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "obj", ".", "items", "(", ")", ")", ")", "if", "isinstance", "(", "obj", ",", "list", ")", ":", "return", "sorted", "(", "(", "ordered_obj", "(", "x", ")", "for", "x", "in", "obj", ")", ")", "else", ":", "return", "obj" ]
order object for comparison purposes .
train
false
44,517
def google_ajax(query): if isinstance(query, unicode): query = query.encode('utf-8') uri = 'http://ajax.googleapis.com/ajax/services/search/web' args = ('?v=1.0&safe=off&q=' + web.urllib.quote(query)) handler = web.urllib._urlopener web.urllib._urlopener = Grab() bytes = web.get((uri + args)) web.urllib._urlopener = handler return web.json(bytes)
[ "def", "google_ajax", "(", "query", ")", ":", "if", "isinstance", "(", "query", ",", "unicode", ")", ":", "query", "=", "query", ".", "encode", "(", "'utf-8'", ")", "uri", "=", "'http://ajax.googleapis.com/ajax/services/search/web'", "args", "=", "(", "'?v=1.0&safe=off&q='", "+", "web", ".", "urllib", ".", "quote", "(", "query", ")", ")", "handler", "=", "web", ".", "urllib", ".", "_urlopener", "web", ".", "urllib", ".", "_urlopener", "=", "Grab", "(", ")", "bytes", "=", "web", ".", "get", "(", "(", "uri", "+", "args", ")", ")", "web", ".", "urllib", ".", "_urlopener", "=", "handler", "return", "web", ".", "json", "(", "bytes", ")" ]
search using ajaxsearch .
train
false
44,519
def test_denoise_tv_chambolle_3d(): (x, y, z) = np.ogrid[0:40, 0:40, 0:40] mask = (((((x - 22) ** 2) + ((y - 20) ** 2)) + ((z - 17) ** 2)) < (8 ** 2)) mask = (100 * mask.astype(np.float)) mask += 60 mask += (20 * np.random.rand(*mask.shape)) mask[(mask < 0)] = 0 mask[(mask > 255)] = 255 res = restoration.denoise_tv_chambolle(mask.astype(np.uint8), weight=0.1) assert_((res.dtype == np.float)) assert_(((res.std() * 255) < mask.std()))
[ "def", "test_denoise_tv_chambolle_3d", "(", ")", ":", "(", "x", ",", "y", ",", "z", ")", "=", "np", ".", "ogrid", "[", "0", ":", "40", ",", "0", ":", "40", ",", "0", ":", "40", "]", "mask", "=", "(", "(", "(", "(", "(", "x", "-", "22", ")", "**", "2", ")", "+", "(", "(", "y", "-", "20", ")", "**", "2", ")", ")", "+", "(", "(", "z", "-", "17", ")", "**", "2", ")", ")", "<", "(", "8", "**", "2", ")", ")", "mask", "=", "(", "100", "*", "mask", ".", "astype", "(", "np", ".", "float", ")", ")", "mask", "+=", "60", "mask", "+=", "(", "20", "*", "np", ".", "random", ".", "rand", "(", "*", "mask", ".", "shape", ")", ")", "mask", "[", "(", "mask", "<", "0", ")", "]", "=", "0", "mask", "[", "(", "mask", ">", "255", ")", "]", "=", "255", "res", "=", "restoration", ".", "denoise_tv_chambolle", "(", "mask", ".", "astype", "(", "np", ".", "uint8", ")", ",", "weight", "=", "0.1", ")", "assert_", "(", "(", "res", ".", "dtype", "==", "np", ".", "float", ")", ")", "assert_", "(", "(", "(", "res", ".", "std", "(", ")", "*", "255", ")", "<", "mask", ".", "std", "(", ")", ")", ")" ]
apply the tv denoising algorithm on a 3d image representing a sphere .
train
false
44,520
def put_headers_in_environ(headers, environ): for (key, value) in headers: environ[('HTTP_%s' % key.upper().replace('-', '_'))] = value
[ "def", "put_headers_in_environ", "(", "headers", ",", "environ", ")", ":", "for", "(", "key", ",", "value", ")", "in", "headers", ":", "environ", "[", "(", "'HTTP_%s'", "%", "key", ".", "upper", "(", ")", ".", "replace", "(", "'-'", ",", "'_'", ")", ")", "]", "=", "value" ]
given a list of headers .
train
true
44,522
@contextlib.contextmanager def _wrap_text(textobj): if textobj.get_wrap(): old_text = textobj.get_text() try: textobj.set_text(textobj._get_wrapped_text()) (yield textobj) finally: textobj.set_text(old_text) else: (yield textobj)
[ "@", "contextlib", ".", "contextmanager", "def", "_wrap_text", "(", "textobj", ")", ":", "if", "textobj", ".", "get_wrap", "(", ")", ":", "old_text", "=", "textobj", ".", "get_text", "(", ")", "try", ":", "textobj", ".", "set_text", "(", "textobj", ".", "_get_wrapped_text", "(", ")", ")", "(", "yield", "textobj", ")", "finally", ":", "textobj", ".", "set_text", "(", "old_text", ")", "else", ":", "(", "yield", "textobj", ")" ]
temporarily inserts newlines to the text if the wrap option is enabled .
train
false
44,523
def _get_apparent_body_position(body, time, ephemeris): if (ephemeris is None): ephemeris = solar_system_ephemeris.get() if ((ephemeris == u'builtin') and (body.lower() == u'moon')): return get_body_barycentric(body, time, ephemeris) delta_light_travel_time = (20.0 * u.s) emitted_time = time light_travel_time = (0.0 * u.s) earth_loc = get_body_barycentric(u'earth', time, ephemeris) while np.any((np.fabs(delta_light_travel_time) > (1e-08 * u.s))): body_loc = get_body_barycentric(body, emitted_time, ephemeris) earth_distance = (body_loc - earth_loc).norm() delta_light_travel_time = (light_travel_time - (earth_distance / speed_of_light)) light_travel_time = (earth_distance / speed_of_light) emitted_time = (time - light_travel_time) return get_body_barycentric(body, emitted_time, ephemeris)
[ "def", "_get_apparent_body_position", "(", "body", ",", "time", ",", "ephemeris", ")", ":", "if", "(", "ephemeris", "is", "None", ")", ":", "ephemeris", "=", "solar_system_ephemeris", ".", "get", "(", ")", "if", "(", "(", "ephemeris", "==", "u'builtin'", ")", "and", "(", "body", ".", "lower", "(", ")", "==", "u'moon'", ")", ")", ":", "return", "get_body_barycentric", "(", "body", ",", "time", ",", "ephemeris", ")", "delta_light_travel_time", "=", "(", "20.0", "*", "u", ".", "s", ")", "emitted_time", "=", "time", "light_travel_time", "=", "(", "0.0", "*", "u", ".", "s", ")", "earth_loc", "=", "get_body_barycentric", "(", "u'earth'", ",", "time", ",", "ephemeris", ")", "while", "np", ".", "any", "(", "(", "np", ".", "fabs", "(", "delta_light_travel_time", ")", ">", "(", "1e-08", "*", "u", ".", "s", ")", ")", ")", ":", "body_loc", "=", "get_body_barycentric", "(", "body", ",", "emitted_time", ",", "ephemeris", ")", "earth_distance", "=", "(", "body_loc", "-", "earth_loc", ")", ".", "norm", "(", ")", "delta_light_travel_time", "=", "(", "light_travel_time", "-", "(", "earth_distance", "/", "speed_of_light", ")", ")", "light_travel_time", "=", "(", "earth_distance", "/", "speed_of_light", ")", "emitted_time", "=", "(", "time", "-", "light_travel_time", ")", "return", "get_body_barycentric", "(", "body", ",", "emitted_time", ",", "ephemeris", ")" ]
calculate the apparent position of body body relative to earth .
train
false
44,524
def test_legend_at_bottom(Chart): chart = Chart(legend_at_bottom=True) chart.add('1', [4, (-5), 123, 59, 38]) chart.add('2', [89, 0, 8, 0.12, 8]) lab = chart.render() chart.legend_at_bottom = False assert (lab != chart.render())
[ "def", "test_legend_at_bottom", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", "legend_at_bottom", "=", "True", ")", "chart", ".", "add", "(", "'1'", ",", "[", "4", ",", "(", "-", "5", ")", ",", "123", ",", "59", ",", "38", "]", ")", "chart", ".", "add", "(", "'2'", ",", "[", "89", ",", "0", ",", "8", ",", "0.12", ",", "8", "]", ")", "lab", "=", "chart", ".", "render", "(", ")", "chart", ".", "legend_at_bottom", "=", "False", "assert", "(", "lab", "!=", "chart", ".", "render", "(", ")", ")" ]
test legend at bottom option .
train
false
44,525
def add_summary_mapping(otu_table, mapping, level, md_as_string=False, md_identifier='taxonomy'): (counts_by_consensus, sample_map) = sum_counts_by_consensus(otu_table, level, 'Other', md_as_string, md_identifier) summary = defaultdict(list) for row in mapping: sample_id = row[0] if (sample_id not in sample_map): continue otu_idx = sample_map[sample_id] for (consensus, counts) in sorted(counts_by_consensus.items()): summary[sample_id].append(counts[otu_idx]) taxon_order = sorted(counts_by_consensus.keys()) return (summary, taxon_order)
[ "def", "add_summary_mapping", "(", "otu_table", ",", "mapping", ",", "level", ",", "md_as_string", "=", "False", ",", "md_identifier", "=", "'taxonomy'", ")", ":", "(", "counts_by_consensus", ",", "sample_map", ")", "=", "sum_counts_by_consensus", "(", "otu_table", ",", "level", ",", "'Other'", ",", "md_as_string", ",", "md_identifier", ")", "summary", "=", "defaultdict", "(", "list", ")", "for", "row", "in", "mapping", ":", "sample_id", "=", "row", "[", "0", "]", "if", "(", "sample_id", "not", "in", "sample_map", ")", ":", "continue", "otu_idx", "=", "sample_map", "[", "sample_id", "]", "for", "(", "consensus", ",", "counts", ")", "in", "sorted", "(", "counts_by_consensus", ".", "items", "(", ")", ")", ":", "summary", "[", "sample_id", "]", ".", "append", "(", "counts", "[", "otu_idx", "]", ")", "taxon_order", "=", "sorted", "(", "counts_by_consensus", ".", "keys", "(", ")", ")", "return", "(", "summary", ",", "taxon_order", ")" ]
returns sample summary of sample counts by taxon summary is keyed by sample_id .
train
false
44,526
@login_required @require_http_methods(['GET', 'POST']) def remove_leader(request, group_slug, user_id): prof = get_object_or_404(GroupProfile, slug=group_slug) user = get_object_or_404(User, id=user_id) if (not _user_can_manage_leaders(request.user, prof)): raise PermissionDenied if (request.method == 'POST'): prof.leaders.remove(user) msg = _('{user} removed from the group leaders successfully!').format(user=user.username) messages.add_message(request, messages.SUCCESS, msg) return HttpResponseRedirect(prof.get_absolute_url()) return render(request, 'groups/confirm_remove_leader.html', {'profile': prof, 'leader': user})
[ "@", "login_required", "@", "require_http_methods", "(", "[", "'GET'", ",", "'POST'", "]", ")", "def", "remove_leader", "(", "request", ",", "group_slug", ",", "user_id", ")", ":", "prof", "=", "get_object_or_404", "(", "GroupProfile", ",", "slug", "=", "group_slug", ")", "user", "=", "get_object_or_404", "(", "User", ",", "id", "=", "user_id", ")", "if", "(", "not", "_user_can_manage_leaders", "(", "request", ".", "user", ",", "prof", ")", ")", ":", "raise", "PermissionDenied", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "prof", ".", "leaders", ".", "remove", "(", "user", ")", "msg", "=", "_", "(", "'{user} removed from the group leaders successfully!'", ")", ".", "format", "(", "user", "=", "user", ".", "username", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "SUCCESS", ",", "msg", ")", "return", "HttpResponseRedirect", "(", "prof", ".", "get_absolute_url", "(", ")", ")", "return", "render", "(", "request", ",", "'groups/confirm_remove_leader.html'", ",", "{", "'profile'", ":", "prof", ",", "'leader'", ":", "user", "}", ")" ]
remove a leader from the group .
train
false
44,527
def show_router(router, profile=None): conn = _auth(profile) return conn.show_router(router)
[ "def", "show_router", "(", "router", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "show_router", "(", "router", ")" ]
fetches information of a certain router cli example: .
train
false
44,528
def get_auth_traps_enabled(): reg_ret = __salt__['reg.read_value'](_HKEY, _SNMP_KEY, 'EnableAuthenticationTraps') if (reg_ret['vdata'] == '(value not set)'): return False return bool((reg_ret['vdata'] or 0))
[ "def", "get_auth_traps_enabled", "(", ")", ":", "reg_ret", "=", "__salt__", "[", "'reg.read_value'", "]", "(", "_HKEY", ",", "_SNMP_KEY", ",", "'EnableAuthenticationTraps'", ")", "if", "(", "reg_ret", "[", "'vdata'", "]", "==", "'(value not set)'", ")", ":", "return", "False", "return", "bool", "(", "(", "reg_ret", "[", "'vdata'", "]", "or", "0", ")", ")" ]
determine whether the host is configured to send authentication traps .
train
false
44,531
def single_client_noauth(h=client_context.host, p=client_context.port, **kwargs): return _mongo_client(h, p, authenticate=False, direct=True, **kwargs)
[ "def", "single_client_noauth", "(", "h", "=", "client_context", ".", "host", ",", "p", "=", "client_context", ".", "port", ",", "**", "kwargs", ")", ":", "return", "_mongo_client", "(", "h", ",", "p", ",", "authenticate", "=", "False", ",", "direct", "=", "True", ",", "**", "kwargs", ")" ]
make a direct connection .
train
false
44,532
def get_msvcr(): msc_pos = sys.version.find('MSC v.') if (msc_pos != (-1)): msc_ver = sys.version[(msc_pos + 6):(msc_pos + 10)] if (msc_ver == '1300'): return ['msvcr70'] elif (msc_ver == '1310'): return ['msvcr71'] elif (msc_ver == '1400'): return ['msvcr80'] elif (msc_ver == '1500'): return ['msvcr90'] elif (msc_ver == '1600'): return ['msvcr100'] else: raise ValueError(('Unknown MS Compiler version %s ' % msc_ver))
[ "def", "get_msvcr", "(", ")", ":", "msc_pos", "=", "sys", ".", "version", ".", "find", "(", "'MSC v.'", ")", "if", "(", "msc_pos", "!=", "(", "-", "1", ")", ")", ":", "msc_ver", "=", "sys", ".", "version", "[", "(", "msc_pos", "+", "6", ")", ":", "(", "msc_pos", "+", "10", ")", "]", "if", "(", "msc_ver", "==", "'1300'", ")", ":", "return", "[", "'msvcr70'", "]", "elif", "(", "msc_ver", "==", "'1310'", ")", ":", "return", "[", "'msvcr71'", "]", "elif", "(", "msc_ver", "==", "'1400'", ")", ":", "return", "[", "'msvcr80'", "]", "elif", "(", "msc_ver", "==", "'1500'", ")", ":", "return", "[", "'msvcr90'", "]", "elif", "(", "msc_ver", "==", "'1600'", ")", ":", "return", "[", "'msvcr100'", "]", "else", ":", "raise", "ValueError", "(", "(", "'Unknown MS Compiler version %s '", "%", "msc_ver", ")", ")" ]
include the appropriate msvc runtime library if python was built with msvc 7 .
train
false
44,533
def check_enough_semaphores(): nsems_min = 256 try: nsems = os.sysconf('SC_SEM_NSEMS_MAX') except (AttributeError, ValueError): return if ((nsems == (-1)) or (nsems >= nsems_min)): return raise unittest.SkipTest(("The OS doesn't support enough semaphores to run the test (required: %d)." % nsems_min))
[ "def", "check_enough_semaphores", "(", ")", ":", "nsems_min", "=", "256", "try", ":", "nsems", "=", "os", ".", "sysconf", "(", "'SC_SEM_NSEMS_MAX'", ")", "except", "(", "AttributeError", ",", "ValueError", ")", ":", "return", "if", "(", "(", "nsems", "==", "(", "-", "1", ")", ")", "or", "(", "nsems", ">=", "nsems_min", ")", ")", ":", "return", "raise", "unittest", ".", "SkipTest", "(", "(", "\"The OS doesn't support enough semaphores to run the test (required: %d).\"", "%", "nsems_min", ")", ")" ]
check that the system supports enough semaphores to run the test .
train
false
44,534
def get_credit_request_status(username, course_key): credit_request = CreditRequest.get_user_request_status(username, course_key) return ({'uuid': credit_request.uuid, 'timestamp': credit_request.modified, 'course_key': credit_request.course.course_key, 'provider': {'id': credit_request.provider.provider_id, 'display_name': credit_request.provider.display_name}, 'status': credit_request.status} if credit_request else {})
[ "def", "get_credit_request_status", "(", "username", ",", "course_key", ")", ":", "credit_request", "=", "CreditRequest", ".", "get_user_request_status", "(", "username", ",", "course_key", ")", "return", "(", "{", "'uuid'", ":", "credit_request", ".", "uuid", ",", "'timestamp'", ":", "credit_request", ".", "modified", ",", "'course_key'", ":", "credit_request", ".", "course", ".", "course_key", ",", "'provider'", ":", "{", "'id'", ":", "credit_request", ".", "provider", ".", "provider_id", ",", "'display_name'", ":", "credit_request", ".", "provider", ".", "display_name", "}", ",", "'status'", ":", "credit_request", ".", "status", "}", "if", "credit_request", "else", "{", "}", ")" ]
get the credit request status .
train
false
44,535
def course_specific_register(request, course_id): course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = modulestore().get_course(course_key) if (not course): return redirect_with_get('register_user', request.GET) if (settings.FEATURES.get('AUTH_USE_SHIB') and course.enrollment_domain and course.enrollment_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX)): return redirect_with_get('shib-login', request.GET) return redirect_with_get('register_user', request.GET)
[ "def", "course_specific_register", "(", "request", ",", "course_id", ")", ":", "course_key", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ")", "if", "(", "not", "course", ")", ":", "return", "redirect_with_get", "(", "'register_user'", ",", "request", ".", "GET", ")", "if", "(", "settings", ".", "FEATURES", ".", "get", "(", "'AUTH_USE_SHIB'", ")", "and", "course", ".", "enrollment_domain", "and", "course", ".", "enrollment_domain", ".", "startswith", "(", "SHIBBOLETH_DOMAIN_PREFIX", ")", ")", ":", "return", "redirect_with_get", "(", "'shib-login'", ",", "request", ".", "GET", ")", "return", "redirect_with_get", "(", "'register_user'", ",", "request", ".", "GET", ")" ]
dispatcher function for selecting the specific registration method required by the course .
train
false
44,537
@lru_cache(maxsize=128) def is_mime_represents_text(input_mime): input_mime_l = input_mime.lower() for text_word in text_like_mime_keywords: if (text_word in input_mime_l): return True return False
[ "@", "lru_cache", "(", "maxsize", "=", "128", ")", "def", "is_mime_represents_text", "(", "input_mime", ")", ":", "input_mime_l", "=", "input_mime", ".", "lower", "(", ")", "for", "text_word", "in", "text_like_mime_keywords", ":", "if", "(", "text_word", "in", "input_mime_l", ")", ":", "return", "True", "return", "False" ]
determine whether an mime is text .
train
false
44,539
def caching_wire_encode(obj): result = _wire_encode_cache.get(obj) if (result is None): result = wire_encode(obj) _wire_encode_cache.put(obj, result) return result
[ "def", "caching_wire_encode", "(", "obj", ")", ":", "result", "=", "_wire_encode_cache", ".", "get", "(", "obj", ")", "if", "(", "result", "is", "None", ")", ":", "result", "=", "wire_encode", "(", "obj", ")", "_wire_encode_cache", ".", "put", "(", "obj", ",", "result", ")", "return", "result" ]
encode an object to bytes using wire_encode and cache the result .
train
false
44,540
def splitter(orientation, *widgets): layout = QtWidgets.QSplitter() layout.setOrientation(orientation) layout.setHandleWidth(defs.handle_width) layout.setChildrenCollapsible(True) for (idx, widget) in enumerate(widgets): layout.addWidget(widget) layout.setStretchFactor(idx, 1) layout.handle(1).setAttribute(Qt.WA_Hover) return layout
[ "def", "splitter", "(", "orientation", ",", "*", "widgets", ")", ":", "layout", "=", "QtWidgets", ".", "QSplitter", "(", ")", "layout", ".", "setOrientation", "(", "orientation", ")", "layout", ".", "setHandleWidth", "(", "defs", ".", "handle_width", ")", "layout", ".", "setChildrenCollapsible", "(", "True", ")", "for", "(", "idx", ",", "widget", ")", "in", "enumerate", "(", "widgets", ")", ":", "layout", ".", "addWidget", "(", "widget", ")", "layout", ".", "setStretchFactor", "(", "idx", ",", "1", ")", "layout", ".", "handle", "(", "1", ")", ".", "setAttribute", "(", "Qt", ".", "WA_Hover", ")", "return", "layout" ]
create a spliter over the specified widgets .
train
false
44,541
def parse_hosts(hostsfile='/etc/hosts', hosts=None): if (not hosts): try: with salt.utils.fopen(hostsfile, 'r') as fp_: hosts = fp_.read() except Exception: return 'Error: hosts data was not found' hostsdict = {} for line in hosts.splitlines(): if (not line): continue if line.startswith('#'): continue comps = line.split() ip = comps[0] aliases = comps[1:] hostsdict.setdefault(ip, []).extend(aliases) return hostsdict
[ "def", "parse_hosts", "(", "hostsfile", "=", "'/etc/hosts'", ",", "hosts", "=", "None", ")", ":", "if", "(", "not", "hosts", ")", ":", "try", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "hostsfile", ",", "'r'", ")", "as", "fp_", ":", "hosts", "=", "fp_", ".", "read", "(", ")", "except", "Exception", ":", "return", "'Error: hosts data was not found'", "hostsdict", "=", "{", "}", "for", "line", "in", "hosts", ".", "splitlines", "(", ")", ":", "if", "(", "not", "line", ")", ":", "continue", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "comps", "=", "line", ".", "split", "(", ")", "ip", "=", "comps", "[", "0", "]", "aliases", "=", "comps", "[", "1", ":", "]", "hostsdict", ".", "setdefault", "(", "ip", ",", "[", "]", ")", ".", "extend", "(", "aliases", ")", "return", "hostsdict" ]
parse /etc/hosts file .
train
true
44,542
def _regexSearchKeyValueCombo(policy_data, policy_regpath, policy_regkey): if policy_data: specialValueRegex = '(\\*\\*Del\\.|\\*\\*DelVals\\.){0,1}' _thisSearch = '\\[{1}{0};{3}{2}{0};'.format(chr(0), re.escape(policy_regpath), re.escape(policy_regkey), specialValueRegex) match = re.search(_thisSearch, policy_data, re.IGNORECASE) if match: return policy_data[match.start():(policy_data.index(']', match.end()) + 1)] return None
[ "def", "_regexSearchKeyValueCombo", "(", "policy_data", ",", "policy_regpath", ",", "policy_regkey", ")", ":", "if", "policy_data", ":", "specialValueRegex", "=", "'(\\\\*\\\\*Del\\\\.|\\\\*\\\\*DelVals\\\\.){0,1}'", "_thisSearch", "=", "'\\\\[{1}{0};{3}{2}{0};'", ".", "format", "(", "chr", "(", "0", ")", ",", "re", ".", "escape", "(", "policy_regpath", ")", ",", "re", ".", "escape", "(", "policy_regkey", ")", ",", "specialValueRegex", ")", "match", "=", "re", ".", "search", "(", "_thisSearch", ",", "policy_data", ",", "re", ".", "IGNORECASE", ")", "if", "match", ":", "return", "policy_data", "[", "match", ".", "start", "(", ")", ":", "(", "policy_data", ".", "index", "(", "']'", ",", "match", ".", "end", "(", ")", ")", "+", "1", ")", "]", "return", "None" ]
helper function to do a search of policy data from a registry .
train
false
44,544
def assign_role_to_user(role_db, user_db, description=None): role_assignment_db = UserRoleAssignmentDB(user=user_db.name, role=role_db.name, description=description) role_assignment_db = UserRoleAssignment.add_or_update(role_assignment_db) return role_assignment_db
[ "def", "assign_role_to_user", "(", "role_db", ",", "user_db", ",", "description", "=", "None", ")", ":", "role_assignment_db", "=", "UserRoleAssignmentDB", "(", "user", "=", "user_db", ".", "name", ",", "role", "=", "role_db", ".", "name", ",", "description", "=", "description", ")", "role_assignment_db", "=", "UserRoleAssignment", ".", "add_or_update", "(", "role_assignment_db", ")", "return", "role_assignment_db" ]
assign role to a user .
train
false
44,545
def dict_with_files_option(s): return dict_option(s)
[ "def", "dict_with_files_option", "(", "s", ")", ":", "return", "dict_option", "(", "s", ")" ]
same as dict .
train
false
44,546
@translations.command('compile') @click.option('is_all', '--all', '-a', default=True, is_flag=True, help='Compiles the plugin translations as well.') @click.option('--plugin', '-p', type=click.STRING, help='Compiles the translations for a given plugin.') def compile_translation(is_all, plugin): if (plugin is not None): validate_plugin(plugin) click.secho('[+] Compiling language files for plugin {}...'.format(plugin), fg='cyan') compile_plugin_translations(plugin) else: click.secho('[+] Compiling language files...', fg='cyan') compile_translations(include_plugins=is_all)
[ "@", "translations", ".", "command", "(", "'compile'", ")", "@", "click", ".", "option", "(", "'is_all'", ",", "'--all'", ",", "'-a'", ",", "default", "=", "True", ",", "is_flag", "=", "True", ",", "help", "=", "'Compiles the plugin translations as well.'", ")", "@", "click", ".", "option", "(", "'--plugin'", ",", "'-p'", ",", "type", "=", "click", ".", "STRING", ",", "help", "=", "'Compiles the translations for a given plugin.'", ")", "def", "compile_translation", "(", "is_all", ",", "plugin", ")", ":", "if", "(", "plugin", "is", "not", "None", ")", ":", "validate_plugin", "(", "plugin", ")", "click", ".", "secho", "(", "'[+] Compiling language files for plugin {}...'", ".", "format", "(", "plugin", ")", ",", "fg", "=", "'cyan'", ")", "compile_plugin_translations", "(", "plugin", ")", "else", ":", "click", ".", "secho", "(", "'[+] Compiling language files...'", ",", "fg", "=", "'cyan'", ")", "compile_translations", "(", "include_plugins", "=", "is_all", ")" ]
compiles the translations .
train
false
44,547
def backup_dir_exists(backup_dir): if (not os.path.exists(backup_dir)): logging.error('Error while accessing backup files.') logging.info('Please provide a valid backup directory.') return False return True
[ "def", "backup_dir_exists", "(", "backup_dir", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "backup_dir", ")", ")", ":", "logging", ".", "error", "(", "'Error while accessing backup files.'", ")", "logging", ".", "info", "(", "'Please provide a valid backup directory.'", ")", "return", "False", "return", "True" ]
checks it the given backup directory exists .
train
false
44,548
def uses_settings(handle, path, lazy_load=True): config = get_config(handle) if ((not lazy_load) and (not config.get('settings_loaded', False))): config.load(path) config.set('settings_loaded', 'true') def decorator(func): def wrapped(*args, **kwargs): if (lazy_load and (not config.get('settings_loaded', False))): config.load(path) config.set('settings_loaded', 'true') if ('config' in inspect.getargspec(func).args): return func(config=config, *args, **kwargs) else: return func(*args, **kwargs) return wrapped return decorator
[ "def", "uses_settings", "(", "handle", ",", "path", ",", "lazy_load", "=", "True", ")", ":", "config", "=", "get_config", "(", "handle", ")", "if", "(", "(", "not", "lazy_load", ")", "and", "(", "not", "config", ".", "get", "(", "'settings_loaded'", ",", "False", ")", ")", ")", ":", "config", ".", "load", "(", "path", ")", "config", ".", "set", "(", "'settings_loaded'", ",", "'true'", ")", "def", "decorator", "(", "func", ")", ":", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "lazy_load", "and", "(", "not", "config", ".", "get", "(", "'settings_loaded'", ",", "False", ")", ")", ")", ":", "config", ".", "load", "(", "path", ")", "config", ".", "set", "(", "'settings_loaded'", ",", "'true'", ")", "if", "(", "'config'", "in", "inspect", ".", "getargspec", "(", "func", ")", ".", "args", ")", ":", "return", "func", "(", "config", "=", "config", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped", "return", "decorator" ]
provides a function that can be used as a decorator for other functions that require settings to be loaded .
train
false
44,549
def generate_unique_codename(num_words=7): while True: codename = crypto_util.genrandomid(num_words) if (len(codename) > Source.MAX_CODENAME_LEN): app.logger.warning("Generated a source codename that was too long, skipping it. This should not happen. (Codename='{}')".format(codename)) continue sid = crypto_util.hash_codename(codename) matching_sources = Source.query.filter((Source.filesystem_id == sid)).all() if (len(matching_sources) == 0): return codename
[ "def", "generate_unique_codename", "(", "num_words", "=", "7", ")", ":", "while", "True", ":", "codename", "=", "crypto_util", ".", "genrandomid", "(", "num_words", ")", "if", "(", "len", "(", "codename", ")", ">", "Source", ".", "MAX_CODENAME_LEN", ")", ":", "app", ".", "logger", ".", "warning", "(", "\"Generated a source codename that was too long, skipping it. This should not happen. (Codename='{}')\"", ".", "format", "(", "codename", ")", ")", "continue", "sid", "=", "crypto_util", ".", "hash_codename", "(", "codename", ")", "matching_sources", "=", "Source", ".", "query", ".", "filter", "(", "(", "Source", ".", "filesystem_id", "==", "sid", ")", ")", ".", "all", "(", ")", "if", "(", "len", "(", "matching_sources", ")", "==", "0", ")", ":", "return", "codename" ]
generate random codenames until we get an unused one .
train
false
44,551
def create_sort_link(pretty_name, sort_field, get_params, sort, order): get_params.append(('sort', sort_field)) if ((sort == sort_field) and (order == 'asc')): get_params.append(('order', 'desc')) else: get_params.append(('order', 'asc')) url_class = '' if (sort == sort_field): url_class = (' class="sort-icon ed-sprite-sort-%s"' % order) return (u'<a href="?%s"%s>%s</a>' % (urllib.urlencode(get_params, True), url_class, pretty_name))
[ "def", "create_sort_link", "(", "pretty_name", ",", "sort_field", ",", "get_params", ",", "sort", ",", "order", ")", ":", "get_params", ".", "append", "(", "(", "'sort'", ",", "sort_field", ")", ")", "if", "(", "(", "sort", "==", "sort_field", ")", "and", "(", "order", "==", "'asc'", ")", ")", ":", "get_params", ".", "append", "(", "(", "'order'", ",", "'desc'", ")", ")", "else", ":", "get_params", ".", "append", "(", "(", "'order'", ",", "'asc'", ")", ")", "url_class", "=", "''", "if", "(", "sort", "==", "sort_field", ")", ":", "url_class", "=", "(", "' class=\"sort-icon ed-sprite-sort-%s\"'", "%", "order", ")", "return", "(", "u'<a href=\"?%s\"%s>%s</a>'", "%", "(", "urllib", ".", "urlencode", "(", "get_params", ",", "True", ")", ",", "url_class", ",", "pretty_name", ")", ")" ]
generate table header sort links .
train
false
44,552
def algorithm_from_text(text): value = _algorithm_by_text.get(text.upper()) if (value is None): value = int(text) return value
[ "def", "algorithm_from_text", "(", "text", ")", ":", "value", "=", "_algorithm_by_text", ".", "get", "(", "text", ".", "upper", "(", ")", ")", "if", "(", "value", "is", "None", ")", ":", "value", "=", "int", "(", "text", ")", "return", "value" ]
convert text into a dnssec algorithm value @rtype: int .
train
true
44,553
def kernel_metrics(): return PAIRWISE_KERNEL_FUNCTIONS
[ "def", "kernel_metrics", "(", ")", ":", "return", "PAIRWISE_KERNEL_FUNCTIONS" ]
valid metrics for pairwise_kernels this function simply returns the valid pairwise distance metrics .
train
false
44,556
def make_digest_acl(username, password, read=False, write=False, create=False, delete=False, admin=False, all=False): cred = make_digest_acl_credential(username, password) return make_acl('digest', cred, read=read, write=write, create=create, delete=delete, admin=admin, all=all)
[ "def", "make_digest_acl", "(", "username", ",", "password", ",", "read", "=", "False", ",", "write", "=", "False", ",", "create", "=", "False", ",", "delete", "=", "False", ",", "admin", "=", "False", ",", "all", "=", "False", ")", ":", "cred", "=", "make_digest_acl_credential", "(", "username", ",", "password", ")", "return", "make_acl", "(", "'digest'", ",", "cred", ",", "read", "=", "read", ",", "write", "=", "write", ",", "create", "=", "create", ",", "delete", "=", "delete", ",", "admin", "=", "admin", ",", "all", "=", "all", ")" ]
create a digest acl for zookeeper with the given permissions this method combines :meth:make_digest_acl_credential and :meth:make_acl to create an :class:acl object appropriate for use with kazoos acl methods .
train
false
44,557
def store_results_dir(package_dirpath, results_dirpath): tgz_filepath = path.join(package_dirpath, RESULTS_DIR_TARBALL) tgz = tarfile.open(tgz_filepath, 'w:gz') results_dirname = path.basename(results_dirpath) tgz.add(results_dirpath, results_dirname) tgz.close()
[ "def", "store_results_dir", "(", "package_dirpath", ",", "results_dirpath", ")", ":", "tgz_filepath", "=", "path", ".", "join", "(", "package_dirpath", ",", "RESULTS_DIR_TARBALL", ")", "tgz", "=", "tarfile", ".", "open", "(", "tgz_filepath", ",", "'w:gz'", ")", "results_dirname", "=", "path", ".", "basename", "(", "results_dirpath", ")", "tgz", ".", "add", "(", "results_dirpath", ",", "results_dirname", ")", "tgz", ".", "close", "(", ")" ]
make tarball of results_dirpath in package_dirpath .
train
false
44,558
def _midrule(dataset_width): if ((not dataset_width) or (dataset_width == 1)): return '\\midrule' return ' '.join([_cmidrule(colindex, dataset_width) for colindex in range(1, (dataset_width + 1))])
[ "def", "_midrule", "(", "dataset_width", ")", ":", "if", "(", "(", "not", "dataset_width", ")", "or", "(", "dataset_width", "==", "1", ")", ")", ":", "return", "'\\\\midrule'", "return", "' '", ".", "join", "(", "[", "_cmidrule", "(", "colindex", ",", "dataset_width", ")", "for", "colindex", "in", "range", "(", "1", ",", "(", "dataset_width", "+", "1", ")", ")", "]", ")" ]
generates the table midrule .
train
false
44,559
def _parse_compound_config_option_value(option_name): name_parts = option_name.split('.') name_parts.reverse() option = config.get_config() while name_parts: option = getattr(option, name_parts.pop()) return option
[ "def", "_parse_compound_config_option_value", "(", "option_name", ")", ":", "name_parts", "=", "option_name", ".", "split", "(", "'.'", ")", "name_parts", ".", "reverse", "(", ")", "option", "=", "config", ".", "get_config", "(", ")", "while", "name_parts", ":", "option", "=", "getattr", "(", "option", ",", "name_parts", ".", "pop", "(", ")", ")", "return", "option" ]
parses the value of a given config option where options section name is separated from option name by .
train
false
44,560
def gamma_entropy_vec(a, scale): if isinstance(scale, float): return stats.gamma.entropy(a, scale=scale) else: return np.array([stats.gamma.entropy(a_x, scale=scale_x) for (a_x, scale_x) in zip(a, scale)])
[ "def", "gamma_entropy_vec", "(", "a", ",", "scale", ")", ":", "if", "isinstance", "(", "scale", ",", "float", ")", ":", "return", "stats", ".", "gamma", ".", "entropy", "(", "a", ",", "scale", "=", "scale", ")", "else", ":", "return", "np", ".", "array", "(", "[", "stats", ".", "gamma", ".", "entropy", "(", "a_x", ",", "scale", "=", "scale_x", ")", "for", "(", "a_x", ",", "scale_x", ")", "in", "zip", "(", "a", ",", "scale", ")", "]", ")" ]
vectorized version of stats .
train
false
44,561
def doc_path(options, allow_default=True): doc_type = getattr(options, 'type', 'default') path = DOC_PATHS.get(doc_type) if ((doc_type == 'default') and (not allow_default)): print("You must specify a documentation type using '--type'. Valid options are: {options}".format(options=valid_doc_types())) sys.exit(1) if (path is None): print("Invalid documentation type '{doc_type}'. Valid options are: {options}".format(doc_type=doc_type, options=valid_doc_types())) sys.exit(1) else: return path
[ "def", "doc_path", "(", "options", ",", "allow_default", "=", "True", ")", ":", "doc_type", "=", "getattr", "(", "options", ",", "'type'", ",", "'default'", ")", "path", "=", "DOC_PATHS", ".", "get", "(", "doc_type", ")", "if", "(", "(", "doc_type", "==", "'default'", ")", "and", "(", "not", "allow_default", ")", ")", ":", "print", "(", "\"You must specify a documentation type using '--type'. Valid options are: {options}\"", ".", "format", "(", "options", "=", "valid_doc_types", "(", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "if", "(", "path", "is", "None", ")", ":", "print", "(", "\"Invalid documentation type '{doc_type}'. Valid options are: {options}\"", ".", "format", "(", "doc_type", "=", "doc_type", ",", "options", "=", "valid_doc_types", "(", ")", ")", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "return", "path" ]
parse options to determine the path to the documentation directory .
train
false
44,562
def parse_vowarning(line): result = {} match = _warning_pat.search(line) if match: result[u'warning'] = warning = match.group(u'warning') if (warning is not None): result[u'is_warning'] = (warning[0].upper() == u'W') result[u'is_exception'] = (not result[u'is_warning']) result[u'number'] = int(match.group(u'warning')[1:]) result[u'doc_url'] = u'io/votable/api_exceptions.html#{0}'.format(warning.lower()) else: result[u'is_warning'] = False result[u'is_exception'] = False result[u'is_other'] = True result[u'number'] = None result[u'doc_url'] = None try: result[u'nline'] = int(match.group(u'nline')) except ValueError: result[u'nline'] = 0 try: result[u'nchar'] = int(match.group(u'nchar')) except ValueError: result[u'nchar'] = 0 result[u'message'] = match.group(u'rest') result[u'is_something'] = True else: result[u'warning'] = None result[u'is_warning'] = False result[u'is_exception'] = False result[u'is_other'] = False result[u'is_something'] = False if (not isinstance(line, six.text_type)): line = line.decode(u'utf-8') result[u'message'] = line return result
[ "def", "parse_vowarning", "(", "line", ")", ":", "result", "=", "{", "}", "match", "=", "_warning_pat", ".", "search", "(", "line", ")", "if", "match", ":", "result", "[", "u'warning'", "]", "=", "warning", "=", "match", ".", "group", "(", "u'warning'", ")", "if", "(", "warning", "is", "not", "None", ")", ":", "result", "[", "u'is_warning'", "]", "=", "(", "warning", "[", "0", "]", ".", "upper", "(", ")", "==", "u'W'", ")", "result", "[", "u'is_exception'", "]", "=", "(", "not", "result", "[", "u'is_warning'", "]", ")", "result", "[", "u'number'", "]", "=", "int", "(", "match", ".", "group", "(", "u'warning'", ")", "[", "1", ":", "]", ")", "result", "[", "u'doc_url'", "]", "=", "u'io/votable/api_exceptions.html#{0}'", ".", "format", "(", "warning", ".", "lower", "(", ")", ")", "else", ":", "result", "[", "u'is_warning'", "]", "=", "False", "result", "[", "u'is_exception'", "]", "=", "False", "result", "[", "u'is_other'", "]", "=", "True", "result", "[", "u'number'", "]", "=", "None", "result", "[", "u'doc_url'", "]", "=", "None", "try", ":", "result", "[", "u'nline'", "]", "=", "int", "(", "match", ".", "group", "(", "u'nline'", ")", ")", "except", "ValueError", ":", "result", "[", "u'nline'", "]", "=", "0", "try", ":", "result", "[", "u'nchar'", "]", "=", "int", "(", "match", ".", "group", "(", "u'nchar'", ")", ")", "except", "ValueError", ":", "result", "[", "u'nchar'", "]", "=", "0", "result", "[", "u'message'", "]", "=", "match", ".", "group", "(", "u'rest'", ")", "result", "[", "u'is_something'", "]", "=", "True", "else", ":", "result", "[", "u'warning'", "]", "=", "None", "result", "[", "u'is_warning'", "]", "=", "False", "result", "[", "u'is_exception'", "]", "=", "False", "result", "[", "u'is_other'", "]", "=", "False", "result", "[", "u'is_something'", "]", "=", "False", "if", "(", "not", "isinstance", "(", "line", ",", "six", ".", "text_type", ")", ")", ":", "line", "=", "line", ".", "decode", "(", "u'utf-8'", ")", "result", "[", "u'message'", "]", "=", "line", "return", "result" ]
parses the vo warning string back into its parts .
train
false
44,563
def hostinterface_delete(interfaceids, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'hostinterface.delete' if isinstance(interfaceids, list): params = interfaceids else: params = [interfaceids] ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result']['interfaceids'] else: raise KeyError except KeyError: return ret
[ "def", "hostinterface_delete", "(", "interfaceids", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'hostinterface.delete'", "if", "isinstance", "(", "interfaceids", ",", "list", ")", ":", "params", "=", "interfaceids", "else", ":", "params", "=", "[", "interfaceids", "]", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "[", "'interfaceids'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "ret" ]
delete host interface .
train
true
44,564
def list_securitygroup(call=None): if (call == 'action'): raise SaltCloudSystemExit('The list_nodes function must be called with -f or --function.') params = {'Action': 'DescribeSecurityGroups', 'RegionId': get_location()} result = query(params) if ('Code' in result): return {} ret = {} for sg in result['SecurityGroups']['SecurityGroup']: ret[sg['SecurityGroupId']] = {} for item in sg: ret[sg['SecurityGroupId']][item] = sg[item] return ret
[ "def", "list_securitygroup", "(", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_nodes function must be called with -f or --function.'", ")", "params", "=", "{", "'Action'", ":", "'DescribeSecurityGroups'", ",", "'RegionId'", ":", "get_location", "(", ")", "}", "result", "=", "query", "(", "params", ")", "if", "(", "'Code'", "in", "result", ")", ":", "return", "{", "}", "ret", "=", "{", "}", "for", "sg", "in", "result", "[", "'SecurityGroups'", "]", "[", "'SecurityGroup'", "]", ":", "ret", "[", "sg", "[", "'SecurityGroupId'", "]", "]", "=", "{", "}", "for", "item", "in", "sg", ":", "ret", "[", "sg", "[", "'SecurityGroupId'", "]", "]", "[", "item", "]", "=", "sg", "[", "item", "]", "return", "ret" ]
return a list of security group .
train
true
44,565
def check_logged_in(console): console.write('\r\n\r\n') time.sleep(1) prompt = read_serial(console) if (('>' in prompt) or ('#' in prompt)): return True else: return False
[ "def", "check_logged_in", "(", "console", ")", ":", "console", ".", "write", "(", "'\\r\\n\\r\\n'", ")", "time", ".", "sleep", "(", "1", ")", "prompt", "=", "read_serial", "(", "console", ")", "if", "(", "(", "'>'", "in", "prompt", ")", "or", "(", "'#'", "in", "prompt", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
check if logged in to router .
train
false
44,566
def make_url_https(url): new_url = UrlParser(url) new_url.scheme = 'https' if (not new_url.hostname): new_url.hostname = request.host.lower() return new_url.unparse()
[ "def", "make_url_https", "(", "url", ")", ":", "new_url", "=", "UrlParser", "(", "url", ")", "new_url", ".", "scheme", "=", "'https'", "if", "(", "not", "new_url", ".", "hostname", ")", ":", "new_url", ".", "hostname", "=", "request", ".", "host", ".", "lower", "(", ")", "return", "new_url", ".", "unparse", "(", ")" ]
turn a possibly relative url into a fully-qualified https url .
train
false
44,569
@treeio_login_required @handle_response_format def report_filter(request, report_id, field_name, response_format='html'): report = get_object_or_404(Report, pk=report_id) if (not request.user.profile.has_permission(report, mode='w')): return user_denied(request, message="You don't have access to this Report") if request.POST: FilterForm(request.user.profile, request.POST, report=report, field_name=field_name).save() return HttpResponseRedirect(reverse('reports_report_edit', args=[report.id])) else: form = FilterForm(request.user.profile, report=report, field_name=field_name) return render_to_response('reports/report_filter', {'form': form, 'field_name': field_name}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "report_filter", "(", "request", ",", "report_id", ",", "field_name", ",", "response_format", "=", "'html'", ")", ":", "report", "=", "get_object_or_404", "(", "Report", ",", "pk", "=", "report_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "report", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Report\"", ")", "if", "request", ".", "POST", ":", "FilterForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "report", "=", "report", ",", "field_name", "=", "field_name", ")", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'reports_report_edit'", ",", "args", "=", "[", "report", ".", "id", "]", ")", ")", "else", ":", "form", "=", "FilterForm", "(", "request", ".", "user", ".", "profile", ",", "report", "=", "report", ",", "field_name", "=", "field_name", ")", "return", "render_to_response", "(", "'reports/report_filter'", ",", "{", "'form'", ":", "form", ",", "'field_name'", ":", "field_name", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
view to filter over a given field for a report .
train
false
44,570
def activity_type_sector(): if (auth.permission.format != 's3json'): return '' def prep(r): if (r.method != 'options'): return False return True s3.prep = prep return s3_rest_controller()
[ "def", "activity_type_sector", "(", ")", ":", "if", "(", "auth", ".", "permission", ".", "format", "!=", "'s3json'", ")", ":", "return", "''", "def", "prep", "(", "r", ")", ":", "if", "(", "r", ".", "method", "!=", "'options'", ")", ":", "return", "False", "return", "True", "s3", ".", "prep", "=", "prep", "return", "s3_rest_controller", "(", ")" ]
restful crud controller for options .
train
false
44,571
def multiple(messages): return MultiMessage(messages)
[ "def", "multiple", "(", "messages", ")", ":", "return", "MultiMessage", "(", "messages", ")" ]
yield multiple from a pipeline stage to send multiple values to the next pipeline stage .
train
false
44,572
def get_IntegerField(kwargs): v = validators.NumberRange(min=(-9223372036854775808), max=9223372036854775807) kwargs['validators'].append(v) return f.IntegerField(**kwargs)
[ "def", "get_IntegerField", "(", "kwargs", ")", ":", "v", "=", "validators", ".", "NumberRange", "(", "min", "=", "(", "-", "9223372036854775808", ")", ",", "max", "=", "9223372036854775807", ")", "kwargs", "[", "'validators'", "]", ".", "append", "(", "v", ")", "return", "f", ".", "IntegerField", "(", "**", "kwargs", ")" ]
returns an integerfield .
train
false
44,573
def sumsets(sets): return frozenset().union(*sets)
[ "def", "sumsets", "(", "sets", ")", ":", "return", "frozenset", "(", ")", ".", "union", "(", "*", "sets", ")" ]
union of sets .
train
false
44,574
def fit_grid_point(X, y, estimator, parameters, train, test, scorer, verbose, error_score='raise', **fit_params): (score, n_samples_test, _) = _fit_and_score(estimator, X, y, scorer, train, test, verbose, parameters, fit_params, error_score) return (score, parameters, n_samples_test)
[ "def", "fit_grid_point", "(", "X", ",", "y", ",", "estimator", ",", "parameters", ",", "train", ",", "test", ",", "scorer", ",", "verbose", ",", "error_score", "=", "'raise'", ",", "**", "fit_params", ")", ":", "(", "score", ",", "n_samples_test", ",", "_", ")", "=", "_fit_and_score", "(", "estimator", ",", "X", ",", "y", ",", "scorer", ",", "train", ",", "test", ",", "verbose", ",", "parameters", ",", "fit_params", ",", "error_score", ")", "return", "(", "score", ",", "parameters", ",", "n_samples_test", ")" ]
run fit on one set of parameters .
train
false
44,576
def _output_lines_to_list(cmdoutput): return [line.strip() for line in cmdoutput.splitlines() if _safe_output(line)]
[ "def", "_output_lines_to_list", "(", "cmdoutput", ")", ":", "return", "[", "line", ".", "strip", "(", ")", "for", "line", "in", "cmdoutput", ".", "splitlines", "(", ")", "if", "_safe_output", "(", "line", ")", "]" ]
convert rabbitmqctl output to a list of strings .
train
false
44,577
@app.route('/scans/<int:scan_id>', methods=['DELETE']) @requires_auth def scan_delete(scan_id): scan_info = get_scan_info_from_id(scan_id) if (scan_info is None): abort(404, 'Scan not found') if (scan_info.w3af_core is None): abort(400, 'Scan state is invalid and can not be cleared') if (not scan_info.w3af_core.can_cleanup()): abort(403, 'Scan is not ready to be cleared') scan_info.cleanup() SCANS[scan_id] = None return jsonify({'message': 'Success'})
[ "@", "app", ".", "route", "(", "'/scans/<int:scan_id>'", ",", "methods", "=", "[", "'DELETE'", "]", ")", "@", "requires_auth", "def", "scan_delete", "(", "scan_id", ")", ":", "scan_info", "=", "get_scan_info_from_id", "(", "scan_id", ")", "if", "(", "scan_info", "is", "None", ")", ":", "abort", "(", "404", ",", "'Scan not found'", ")", "if", "(", "scan_info", ".", "w3af_core", "is", "None", ")", ":", "abort", "(", "400", ",", "'Scan state is invalid and can not be cleared'", ")", "if", "(", "not", "scan_info", ".", "w3af_core", ".", "can_cleanup", "(", ")", ")", ":", "abort", "(", "403", ",", "'Scan is not ready to be cleared'", ")", "scan_info", ".", "cleanup", "(", ")", "SCANS", "[", "scan_id", "]", "=", "None", "return", "jsonify", "(", "{", "'message'", ":", "'Success'", "}", ")" ]
clear all the scan information .
train
false
44,578
def find_flaky_tests(suites): for test in _iter_tests(suites): annotation = get_flaky_annotation(test) if annotation: (yield (test, annotation))
[ "def", "find_flaky_tests", "(", "suites", ")", ":", "for", "test", "in", "_iter_tests", "(", "suites", ")", ":", "annotation", "=", "get_flaky_annotation", "(", "test", ")", "if", "annotation", ":", "(", "yield", "(", "test", ",", "annotation", ")", ")" ]
find all flaky tests in the given suites .
train
false