id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
36,216
def get_disabled(): ret = set() for name in _iter_service_names(): if _service_is_upstart(name): if _upstart_is_disabled(name): ret.add(name) elif _service_is_sysv(name): if _sysv_is_disabled(name): ret.add(name) return sorted(ret)
[ "def", "get_disabled", "(", ")", ":", "ret", "=", "set", "(", ")", "for", "name", "in", "_iter_service_names", "(", ")", ":", "if", "_service_is_upstart", "(", "name", ")", ":", "if", "_upstart_is_disabled", "(", "name", ")", ":", "ret", ".", "add", "(", "name", ")", "elif", "_service_is_sysv", "(", "name", ")", ":", "if", "_sysv_is_disabled", "(", "name", ")", ":", "ret", ".", "add", "(", "name", ")", "return", "sorted", "(", "ret", ")" ]
return a list of all disabled services cli example: .
train
true
36,217
def _isCheckpointDir(checkpointDir): lastSegment = os.path.split(checkpointDir)[1] if (lastSegment[0] == '.'): return False if (not checkpointDir.endswith(g_defaultCheckpointExtension)): return False if (not os.path.isdir(checkpointDir)): return False return True
[ "def", "_isCheckpointDir", "(", "checkpointDir", ")", ":", "lastSegment", "=", "os", ".", "path", ".", "split", "(", "checkpointDir", ")", "[", "1", "]", "if", "(", "lastSegment", "[", "0", "]", "==", "'.'", ")", ":", "return", "False", "if", "(", "not", "checkpointDir", ".", "endswith", "(", "g_defaultCheckpointExtension", ")", ")", ":", "return", "False", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "checkpointDir", ")", ")", ":", "return", "False", "return", "True" ]
return true iff checkpointdir appears to be a checkpoint directory .
train
true
36,220
def overrides_a_method(class_node, name): for ancestor in class_node.ancestors(): if ((name in ancestor) and isinstance(ancestor[name], astroid.Function)): return True return False
[ "def", "overrides_a_method", "(", "class_node", ",", "name", ")", ":", "for", "ancestor", "in", "class_node", ".", "ancestors", "(", ")", ":", "if", "(", "(", "name", "in", "ancestor", ")", "and", "isinstance", "(", "ancestor", "[", "name", "]", ",", "astroid", ".", "Function", ")", ")", ":", "return", "True", "return", "False" ]
return true if <name> is a method overridden from an ancestor .
train
false
36,221
@handle_db_data_error @require_admin_context def qos_specs_update(context, qos_specs_id, updates): session = get_session() with session.begin(): _qos_specs_get_all_ref(context, qos_specs_id, session) specs = updates.get('specs', {}) if ('consumer' in updates): specs = specs.copy() specs['consumer'] = updates['consumer'] spec_ref = None for key in specs.keys(): try: spec_ref = _qos_specs_get_item(context, qos_specs_id, key, session) except exception.QoSSpecsKeyNotFound: spec_ref = models.QualityOfServiceSpecs() id = None if spec_ref.get('id', None): id = spec_ref['id'] else: id = str(uuid.uuid4()) value = dict(id=id, key=key, value=specs[key], specs_id=qos_specs_id, deleted=False) LOG.debug('qos_specs_update() value: %s', value) spec_ref.update(value) spec_ref.save(session=session) return specs
[ "@", "handle_db_data_error", "@", "require_admin_context", "def", "qos_specs_update", "(", "context", ",", "qos_specs_id", ",", "updates", ")", ":", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "_qos_specs_get_all_ref", "(", "context", ",", "qos_specs_id", ",", "session", ")", "specs", "=", "updates", ".", "get", "(", "'specs'", ",", "{", "}", ")", "if", "(", "'consumer'", "in", "updates", ")", ":", "specs", "=", "specs", ".", "copy", "(", ")", "specs", "[", "'consumer'", "]", "=", "updates", "[", "'consumer'", "]", "spec_ref", "=", "None", "for", "key", "in", "specs", ".", "keys", "(", ")", ":", "try", ":", "spec_ref", "=", "_qos_specs_get_item", "(", "context", ",", "qos_specs_id", ",", "key", ",", "session", ")", "except", "exception", ".", "QoSSpecsKeyNotFound", ":", "spec_ref", "=", "models", ".", "QualityOfServiceSpecs", "(", ")", "id", "=", "None", "if", "spec_ref", ".", "get", "(", "'id'", ",", "None", ")", ":", "id", "=", "spec_ref", "[", "'id'", "]", "else", ":", "id", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "value", "=", "dict", "(", "id", "=", "id", ",", "key", "=", "key", ",", "value", "=", "specs", "[", "key", "]", ",", "specs_id", "=", "qos_specs_id", ",", "deleted", "=", "False", ")", "LOG", ".", "debug", "(", "'qos_specs_update() value: %s'", ",", "value", ")", "spec_ref", ".", "update", "(", "value", ")", "spec_ref", ".", "save", "(", "session", "=", "session", ")", "return", "specs" ]
make updates to an existing qos specs .
train
false
36,222
def _bypass_ensure_directory(path, mode=511): if (not WRITE_SUPPORT): raise IOError('"os.mkdir" not supported on this platform.') (dirname, filename) = split(path) if (dirname and filename and (not isdir(dirname))): _bypass_ensure_directory(dirname) mkdir(dirname, mode)
[ "def", "_bypass_ensure_directory", "(", "path", ",", "mode", "=", "511", ")", ":", "if", "(", "not", "WRITE_SUPPORT", ")", ":", "raise", "IOError", "(", "'\"os.mkdir\" not supported on this platform.'", ")", "(", "dirname", ",", "filename", ")", "=", "split", "(", "path", ")", "if", "(", "dirname", "and", "filename", "and", "(", "not", "isdir", "(", "dirname", ")", ")", ")", ":", "_bypass_ensure_directory", "(", "dirname", ")", "mkdir", "(", "dirname", ",", "mode", ")" ]
sandbox-bypassing version of ensure_directory() .
train
true
36,223
def getTestData(): filename = os.path.join(os.path.dirname(__file__), 'data', 'accept.txt') i = 1 lines = [] for line in file(filename): lines.append((i, line)) i += 1 return lines
[ "def", "getTestData", "(", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'data'", ",", "'accept.txt'", ")", "i", "=", "1", "lines", "=", "[", "]", "for", "line", "in", "file", "(", "filename", ")", ":", "lines", ".", "append", "(", "(", "i", ",", "line", ")", ")", "i", "+=", "1", "return", "lines" ]
read the test data off of disk () -> [] .
train
false
36,224
def make_signed_jwt(signer, payload): header = {'typ': 'JWT', 'alg': 'RS256'} segments = [_urlsafe_b64encode(_json_encode(header)), _urlsafe_b64encode(_json_encode(payload))] signing_input = '.'.join(segments) signature = signer.sign(signing_input) segments.append(_urlsafe_b64encode(signature)) logging.debug(str(segments)) return '.'.join(segments)
[ "def", "make_signed_jwt", "(", "signer", ",", "payload", ")", ":", "header", "=", "{", "'typ'", ":", "'JWT'", ",", "'alg'", ":", "'RS256'", "}", "segments", "=", "[", "_urlsafe_b64encode", "(", "_json_encode", "(", "header", ")", ")", ",", "_urlsafe_b64encode", "(", "_json_encode", "(", "payload", ")", ")", "]", "signing_input", "=", "'.'", ".", "join", "(", "segments", ")", "signature", "=", "signer", ".", "sign", "(", "signing_input", ")", "segments", ".", "append", "(", "_urlsafe_b64encode", "(", "signature", ")", ")", "logging", ".", "debug", "(", "str", "(", "segments", ")", ")", "return", "'.'", ".", "join", "(", "segments", ")" ]
make a signed jwt .
train
false
36,225
def is_404_page(browser): return ('Page not found (404)' in browser.find_element_by_tag_name('h1').text)
[ "def", "is_404_page", "(", "browser", ")", ":", "return", "(", "'Page not found (404)'", "in", "browser", ".", "find_element_by_tag_name", "(", "'h1'", ")", ".", "text", ")" ]
check if page is 404 .
train
false
36,226
def getCubicPoints(begin, controlPoints, end, numberOfBezierPoints=globalNumberOfBezierPoints): bezierPortion = (1.0 / float(numberOfBezierPoints)) cubicPoints = [] for bezierIndex in xrange(1, (numberOfBezierPoints + 1)): cubicPoints.append(getCubicPoint((bezierPortion * bezierIndex), begin, controlPoints, end)) return cubicPoints
[ "def", "getCubicPoints", "(", "begin", ",", "controlPoints", ",", "end", ",", "numberOfBezierPoints", "=", "globalNumberOfBezierPoints", ")", ":", "bezierPortion", "=", "(", "1.0", "/", "float", "(", "numberOfBezierPoints", ")", ")", "cubicPoints", "=", "[", "]", "for", "bezierIndex", "in", "xrange", "(", "1", ",", "(", "numberOfBezierPoints", "+", "1", ")", ")", ":", "cubicPoints", ".", "append", "(", "getCubicPoint", "(", "(", "bezierPortion", "*", "bezierIndex", ")", ",", "begin", ",", "controlPoints", ",", "end", ")", ")", "return", "cubicPoints" ]
get the cubic points .
train
false
36,228
def delete_task(name): return _run_cmd('kapacitor delete tasks {0}'.format(name))
[ "def", "delete_task", "(", "name", ")", ":", "return", "_run_cmd", "(", "'kapacitor delete tasks {0}'", ".", "format", "(", "name", ")", ")" ]
delete a kapacitor task .
train
false
36,231
def decrypt_bigfile(infile, outfile, priv_key): if (not isinstance(priv_key, key.PrivateKey)): raise TypeError(('Private key required, but got %r' % priv_key)) for block in varblock.yield_varblocks(infile): cleartext = pkcs1.decrypt(block, priv_key) outfile.write(cleartext)
[ "def", "decrypt_bigfile", "(", "infile", ",", "outfile", ",", "priv_key", ")", ":", "if", "(", "not", "isinstance", "(", "priv_key", ",", "key", ".", "PrivateKey", ")", ")", ":", "raise", "TypeError", "(", "(", "'Private key required, but got %r'", "%", "priv_key", ")", ")", "for", "block", "in", "varblock", ".", "yield_varblocks", "(", "infile", ")", ":", "cleartext", "=", "pkcs1", ".", "decrypt", "(", "block", ",", "priv_key", ")", "outfile", ".", "write", "(", "cleartext", ")" ]
decrypts an encrypted varblock file .
train
false
36,234
def canonicalize_cfg(blocks): return canonicalize_cfg_single_backedge(blocks)
[ "def", "canonicalize_cfg", "(", "blocks", ")", ":", "return", "canonicalize_cfg_single_backedge", "(", "blocks", ")" ]
rewrite the given blocks to canonicalize the cfg .
train
false
36,237
def get_entity_batch(last_key, datastore, batch_size): return datastore.range_query(APP_ENTITY_TABLE, APP_ENTITY_SCHEMA, last_key, '', batch_size, start_inclusive=False)
[ "def", "get_entity_batch", "(", "last_key", ",", "datastore", ",", "batch_size", ")", ":", "return", "datastore", ".", "range_query", "(", "APP_ENTITY_TABLE", ",", "APP_ENTITY_SCHEMA", ",", "last_key", ",", "''", ",", "batch_size", ",", "start_inclusive", "=", "False", ")" ]
gets a batch of entities to operate on .
train
false
36,238
def lookup_portion(twitter, user_ids): users = {} kwargs = dict(user_id=','.join(map(str, user_ids)), skip_status=1) for u in twitter.users.lookup(**kwargs): users[int(u['id'])] = u['screen_name'] return users
[ "def", "lookup_portion", "(", "twitter", ",", "user_ids", ")", ":", "users", "=", "{", "}", "kwargs", "=", "dict", "(", "user_id", "=", "','", ".", "join", "(", "map", "(", "str", ",", "user_ids", ")", ")", ",", "skip_status", "=", "1", ")", "for", "u", "in", "twitter", ".", "users", ".", "lookup", "(", "**", "kwargs", ")", ":", "users", "[", "int", "(", "u", "[", "'id'", "]", ")", "]", "=", "u", "[", "'screen_name'", "]", "return", "users" ]
resolve a limited list of user ids to screen names .
train
false
36,239
def make_volume_options_tests(make_options, extra_arguments=None): if (extra_arguments is None): extra_arguments = [] def parseOptions(options, argv): options.parseOptions((argv + extra_arguments)) class VolumeOptionsTests(TestCase, ): '\n Tests for ``Options`` subclasses decorated with\n ``flocker_volume_options``.\n ' def test_default_config(self): "\n By default the config file is ``b'/etc/flocker/volume.json'``.\n " options = make_options() parseOptions(options, []) self.assertEqual(FilePath('/etc/flocker/volume.json'), options['config']) def test_config(self): '\n The options class accepts a ``--config`` parameter.\n ' path = '/foo/bar' options = make_options() parseOptions(options, ['--config', path]) self.assertEqual(FilePath(path), options['config']) def test_pool(self): '\n The options class accepts a ``--pool`` parameter.\n ' pool = 'foo-bar' options = make_options() parseOptions(options, ['--pool', pool]) self.assertEqual(pool, options['pool']) def test_mountpoint(self): '\n The options class accepts a ``--mountpoint`` parameter.\n ' mountpoint = '/bar/baz' options = make_options() parseOptions(options, ['--mountpoint', mountpoint]) self.assertEqual(mountpoint, options['mountpoint']) dummy_options = make_options() VolumeOptionsTests.__name__ = (dummy_options.__class__.__name__ + 'Tests') return VolumeOptionsTests
[ "def", "make_volume_options_tests", "(", "make_options", ",", "extra_arguments", "=", "None", ")", ":", "if", "(", "extra_arguments", "is", "None", ")", ":", "extra_arguments", "=", "[", "]", "def", "parseOptions", "(", "options", ",", "argv", ")", ":", "options", ".", "parseOptions", "(", "(", "argv", "+", "extra_arguments", ")", ")", "class", "VolumeOptionsTests", "(", "TestCase", ",", ")", ":", "def", "test_default_config", "(", "self", ")", ":", "options", "=", "make_options", "(", ")", "parseOptions", "(", "options", ",", "[", "]", ")", "self", ".", "assertEqual", "(", "FilePath", "(", "'/etc/flocker/volume.json'", ")", ",", "options", "[", "'config'", "]", ")", "def", "test_config", "(", "self", ")", ":", "path", "=", "'/foo/bar'", "options", "=", "make_options", "(", ")", "parseOptions", "(", "options", ",", "[", "'--config'", ",", "path", "]", ")", "self", ".", "assertEqual", "(", "FilePath", "(", "path", ")", ",", "options", "[", "'config'", "]", ")", "def", "test_pool", "(", "self", ")", ":", "pool", "=", "'foo-bar'", "options", "=", "make_options", "(", ")", "parseOptions", "(", "options", ",", "[", "'--pool'", ",", "pool", "]", ")", "self", ".", "assertEqual", "(", "pool", ",", "options", "[", "'pool'", "]", ")", "def", "test_mountpoint", "(", "self", ")", ":", "mountpoint", "=", "'/bar/baz'", "options", "=", "make_options", "(", ")", "parseOptions", "(", "options", ",", "[", "'--mountpoint'", ",", "mountpoint", "]", ")", "self", ".", "assertEqual", "(", "mountpoint", ",", "options", "[", "'mountpoint'", "]", ")", "dummy_options", "=", "make_options", "(", ")", "VolumeOptionsTests", ".", "__name__", "=", "(", "dummy_options", ".", "__class__", ".", "__name__", "+", "'Tests'", ")", "return", "VolumeOptionsTests" ]
make a testcase to test the volumeservice specific arguments added to an options class by the flocker_volume_options class decorator .
train
false
36,242
def numberToByteArray(n, howManyBytes=None): if (howManyBytes == None): howManyBytes = numBytes(n) b = bytearray(howManyBytes) for count in range((howManyBytes - 1), (-1), (-1)): b[count] = int((n % 256)) n >>= 8 return b
[ "def", "numberToByteArray", "(", "n", ",", "howManyBytes", "=", "None", ")", ":", "if", "(", "howManyBytes", "==", "None", ")", ":", "howManyBytes", "=", "numBytes", "(", "n", ")", "b", "=", "bytearray", "(", "howManyBytes", ")", "for", "count", "in", "range", "(", "(", "howManyBytes", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "b", "[", "count", "]", "=", "int", "(", "(", "n", "%", "256", ")", ")", "n", ">>=", "8", "return", "b" ]
convert an integer into a bytearray .
train
false
36,243
def cleanup_deleted_sources(store_dir, c): for source_dir in os.listdir(store_dir): try: source = c.execute('SELECT * FROM sources WHERE filesystem_id=?', (source_dir,)).fetchone() if (not source): print "Deleting source with no db entry ('{}')...".format(source_dir) secure_unlink(os.path.join(store_dir, source_dir)) except Exception as e: print '\n!! Error occurred cleaning up deleted sources for source {}'.format(source_dir) print 'Source had {} submissions'.format(len(os.listdir(os.path.join(store_dir, source_dir)))) print traceback.format_exc()
[ "def", "cleanup_deleted_sources", "(", "store_dir", ",", "c", ")", ":", "for", "source_dir", "in", "os", ".", "listdir", "(", "store_dir", ")", ":", "try", ":", "source", "=", "c", ".", "execute", "(", "'SELECT * FROM sources WHERE filesystem_id=?'", ",", "(", "source_dir", ",", ")", ")", ".", "fetchone", "(", ")", "if", "(", "not", "source", ")", ":", "print", "\"Deleting source with no db entry ('{}')...\"", ".", "format", "(", "source_dir", ")", "secure_unlink", "(", "os", ".", "path", ".", "join", "(", "store_dir", ",", "source_dir", ")", ")", "except", "Exception", "as", "e", ":", "print", "'\\n!! Error occurred cleaning up deleted sources for source {}'", ".", "format", "(", "source_dir", ")", "print", "'Source had {} submissions'", ".", "format", "(", "len", "(", "os", ".", "listdir", "(", "os", ".", "path", ".", "join", "(", "store_dir", ",", "source_dir", ")", ")", ")", ")", "print", "traceback", ".", "format_exc", "(", ")" ]
in 0 .
train
false
36,244
def validate_dtype(termname, dtype, missing_value): if (dtype is NotSpecified): raise DTypeNotSpecified(termname=termname) try: dtype = dtype_class(dtype) except TypeError: raise NotDType(dtype=dtype, termname=termname) if (not can_represent_dtype(dtype)): raise UnsupportedDType(dtype=dtype, termname=termname) if (missing_value is NotSpecified): missing_value = default_missing_value_for_dtype(dtype) try: if (dtype == categorical_dtype): _assert_valid_categorical_missing_value(missing_value) array([missing_value]).astype(dtype=dtype, casting='same_kind') except TypeError as e: raise TypeError('Missing value {value!r} is not a valid choice for term {termname} with dtype {dtype}.\n\nCoercion attempt failed with: {error}'.format(termname=termname, value=missing_value, dtype=dtype, error=e)) return (dtype, missing_value)
[ "def", "validate_dtype", "(", "termname", ",", "dtype", ",", "missing_value", ")", ":", "if", "(", "dtype", "is", "NotSpecified", ")", ":", "raise", "DTypeNotSpecified", "(", "termname", "=", "termname", ")", "try", ":", "dtype", "=", "dtype_class", "(", "dtype", ")", "except", "TypeError", ":", "raise", "NotDType", "(", "dtype", "=", "dtype", ",", "termname", "=", "termname", ")", "if", "(", "not", "can_represent_dtype", "(", "dtype", ")", ")", ":", "raise", "UnsupportedDType", "(", "dtype", "=", "dtype", ",", "termname", "=", "termname", ")", "if", "(", "missing_value", "is", "NotSpecified", ")", ":", "missing_value", "=", "default_missing_value_for_dtype", "(", "dtype", ")", "try", ":", "if", "(", "dtype", "==", "categorical_dtype", ")", ":", "_assert_valid_categorical_missing_value", "(", "missing_value", ")", "array", "(", "[", "missing_value", "]", ")", ".", "astype", "(", "dtype", "=", "dtype", ",", "casting", "=", "'same_kind'", ")", "except", "TypeError", "as", "e", ":", "raise", "TypeError", "(", "'Missing value {value!r} is not a valid choice for term {termname} with dtype {dtype}.\\n\\nCoercion attempt failed with: {error}'", ".", "format", "(", "termname", "=", "termname", ",", "value", "=", "missing_value", ",", "dtype", "=", "dtype", ",", "error", "=", "e", ")", ")", "return", "(", "dtype", ",", "missing_value", ")" ]
validate a dtype and missing_value passed to term .
train
true
36,248
def safetar_extractall(tar_file, path='.', members=None): return tar_file.extractall(path, safemembers(tar_file, path))
[ "def", "safetar_extractall", "(", "tar_file", ",", "path", "=", "'.'", ",", "members", "=", "None", ")", ":", "return", "tar_file", ".", "extractall", "(", "path", ",", "safemembers", "(", "tar_file", ",", "path", ")", ")" ]
safe version of tar_file .
train
false
36,249
def disable_monitor_mode(): global RUN_CONFIG if (RUN_CONFIG.IFACE_TO_TAKE_DOWN == ''): return print (((GR + ' [+]') + W) + (' disabling monitor mode on %s...' % ((G + RUN_CONFIG.IFACE_TO_TAKE_DOWN) + W))), stdout.flush() call(['airmon-ng', 'stop', RUN_CONFIG.IFACE_TO_TAKE_DOWN], stdout=DN, stderr=DN) print 'done'
[ "def", "disable_monitor_mode", "(", ")", ":", "global", "RUN_CONFIG", "if", "(", "RUN_CONFIG", ".", "IFACE_TO_TAKE_DOWN", "==", "''", ")", ":", "return", "print", "(", "(", "(", "GR", "+", "' [+]'", ")", "+", "W", ")", "+", "(", "' disabling monitor mode on %s...'", "%", "(", "(", "G", "+", "RUN_CONFIG", ".", "IFACE_TO_TAKE_DOWN", ")", "+", "W", ")", ")", ")", ",", "stdout", ".", "flush", "(", ")", "call", "(", "[", "'airmon-ng'", ",", "'stop'", ",", "RUN_CONFIG", ".", "IFACE_TO_TAKE_DOWN", "]", ",", "stdout", "=", "DN", ",", "stderr", "=", "DN", ")", "print", "'done'" ]
the program may have enabled monitor mode on a wireless interface .
train
false
36,253
def _bind(l, bind=None): if (bind is None): return method = bind.get('method', 'simple') if (method is None): return elif (method == 'simple'): l.simple_bind_s(bind.get('dn', ''), bind.get('password', '')) elif (method == 'sasl'): sasl_class = getattr(ldap.sasl, bind.get('mechanism', 'EXTERNAL').lower()) creds = bind.get('credentials', None) if (creds is None): creds = {} auth = sasl_class(*creds.get('args', []), **creds.get('kwargs', {})) l.sasl_interactive_bind_s(bind.get('dn', ''), auth) else: raise ValueError((('unsupported bind method "' + method) + '"; supported bind methods: simple sasl'))
[ "def", "_bind", "(", "l", ",", "bind", "=", "None", ")", ":", "if", "(", "bind", "is", "None", ")", ":", "return", "method", "=", "bind", ".", "get", "(", "'method'", ",", "'simple'", ")", "if", "(", "method", "is", "None", ")", ":", "return", "elif", "(", "method", "==", "'simple'", ")", ":", "l", ".", "simple_bind_s", "(", "bind", ".", "get", "(", "'dn'", ",", "''", ")", ",", "bind", ".", "get", "(", "'password'", ",", "''", ")", ")", "elif", "(", "method", "==", "'sasl'", ")", ":", "sasl_class", "=", "getattr", "(", "ldap", ".", "sasl", ",", "bind", ".", "get", "(", "'mechanism'", ",", "'EXTERNAL'", ")", ".", "lower", "(", ")", ")", "creds", "=", "bind", ".", "get", "(", "'credentials'", ",", "None", ")", "if", "(", "creds", "is", "None", ")", ":", "creds", "=", "{", "}", "auth", "=", "sasl_class", "(", "*", "creds", ".", "get", "(", "'args'", ",", "[", "]", ")", ",", "**", "creds", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ")", "l", ".", "sasl_interactive_bind_s", "(", "bind", ".", "get", "(", "'dn'", ",", "''", ")", ",", "auth", ")", "else", ":", "raise", "ValueError", "(", "(", "(", "'unsupported bind method \"'", "+", "method", ")", "+", "'\"; supported bind methods: simple sasl'", ")", ")" ]
bind helper .
train
true
36,254
def _rmtree(parent, ignore_nocleanup=False, msg='', level=logging.DEBUG, ignore_if_missing=False): if (ignore_if_missing and (not os.path.exists(parent))): return try: if (ignore_nocleanup or (not config.nocleanup)): log_msg = 'Deleting' if msg: log_msg += (' (%s)' % msg) _logger.log(level, '%s: %s', log_msg, parent) shutil.rmtree(parent) except Exception as e: _logger.debug('In _rmtree, encountered exception: %s(%s)', type(e), e) if os.path.exists(parent): try: _logger.info('placing "delete.me" in %s', parent) open(os.path.join(parent, 'delete.me'), 'w').close() except Exception as ee: _logger.warning('Failed to remove or mark cache directory %s for removal %s', parent, ee)
[ "def", "_rmtree", "(", "parent", ",", "ignore_nocleanup", "=", "False", ",", "msg", "=", "''", ",", "level", "=", "logging", ".", "DEBUG", ",", "ignore_if_missing", "=", "False", ")", ":", "if", "(", "ignore_if_missing", "and", "(", "not", "os", ".", "path", ".", "exists", "(", "parent", ")", ")", ")", ":", "return", "try", ":", "if", "(", "ignore_nocleanup", "or", "(", "not", "config", ".", "nocleanup", ")", ")", ":", "log_msg", "=", "'Deleting'", "if", "msg", ":", "log_msg", "+=", "(", "' (%s)'", "%", "msg", ")", "_logger", ".", "log", "(", "level", ",", "'%s: %s'", ",", "log_msg", ",", "parent", ")", "shutil", ".", "rmtree", "(", "parent", ")", "except", "Exception", "as", "e", ":", "_logger", ".", "debug", "(", "'In _rmtree, encountered exception: %s(%s)'", ",", "type", "(", "e", ")", ",", "e", ")", "if", "os", ".", "path", ".", "exists", "(", "parent", ")", ":", "try", ":", "_logger", ".", "info", "(", "'placing \"delete.me\" in %s'", ",", "parent", ")", "open", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "'delete.me'", ")", ",", "'w'", ")", ".", "close", "(", ")", "except", "Exception", "as", "ee", ":", "_logger", ".", "warning", "(", "'Failed to remove or mark cache directory %s for removal %s'", ",", "parent", ",", "ee", ")" ]
on nfs filesystems .
train
false
36,255
def vrrp_config(app, interface, config): config_request = vrrp_event.EventVRRPConfigRequest(interface, config) config_request.sync = True return app.send_request(config_request)
[ "def", "vrrp_config", "(", "app", ",", "interface", ",", "config", ")", ":", "config_request", "=", "vrrp_event", ".", "EventVRRPConfigRequest", "(", "interface", ",", "config", ")", "config_request", ".", "sync", "=", "True", "return", "app", ".", "send_request", "(", "config_request", ")" ]
create an instance .
train
true
36,256
def smart_bytes(s, encoding=u'utf-8', strings_only=False, errors=u'strict'): if isinstance(s, Promise): return s return force_bytes(s, encoding, strings_only, errors)
[ "def", "smart_bytes", "(", "s", ",", "encoding", "=", "u'utf-8'", ",", "strings_only", "=", "False", ",", "errors", "=", "u'strict'", ")", ":", "if", "isinstance", "(", "s", ",", "Promise", ")", ":", "return", "s", "return", "force_bytes", "(", "s", ",", "encoding", ",", "strings_only", ",", "errors", ")" ]
returns a bytestring version of s .
train
true
36,257
def get_cpu(t): return (100 * (0.5 + (0.5 * np.sin(((0.2 * np.pi) * (t - 0.25))))))
[ "def", "get_cpu", "(", "t", ")", ":", "return", "(", "100", "*", "(", "0.5", "+", "(", "0.5", "*", "np", ".", "sin", "(", "(", "(", "0.2", "*", "np", ".", "pi", ")", "*", "(", "t", "-", "0.25", ")", ")", ")", ")", ")", ")" ]
simulate a function that returns cpu usage .
train
false
36,258
def print_files_information_flake8(): infracting_files = [] non_infracting_files = [] for path in list_files(): rel_path = os.path.relpath(path, theano.__path__[0]) number_of_infractions = flake8.main.check_file(path, ignore=ignore) if (number_of_infractions > 0): if (rel_path not in whitelist_flake8): infracting_files.append(rel_path) elif (rel_path in whitelist_flake8): non_infracting_files.append(rel_path) print('Files that must be corrected or added to whitelist:') for file in infracting_files: print(file) print('Files that can be removed from whitelist:') for file in non_infracting_files: print(file)
[ "def", "print_files_information_flake8", "(", ")", ":", "infracting_files", "=", "[", "]", "non_infracting_files", "=", "[", "]", "for", "path", "in", "list_files", "(", ")", ":", "rel_path", "=", "os", ".", "path", ".", "relpath", "(", "path", ",", "theano", ".", "__path__", "[", "0", "]", ")", "number_of_infractions", "=", "flake8", ".", "main", ".", "check_file", "(", "path", ",", "ignore", "=", "ignore", ")", "if", "(", "number_of_infractions", ">", "0", ")", ":", "if", "(", "rel_path", "not", "in", "whitelist_flake8", ")", ":", "infracting_files", ".", "append", "(", "rel_path", ")", "elif", "(", "rel_path", "in", "whitelist_flake8", ")", ":", "non_infracting_files", ".", "append", "(", "rel_path", ")", "print", "(", "'Files that must be corrected or added to whitelist:'", ")", "for", "file", "in", "infracting_files", ":", "print", "(", "file", ")", "print", "(", "'Files that can be removed from whitelist:'", ")", "for", "file", "in", "non_infracting_files", ":", "print", "(", "file", ")" ]
print the list of files which can be removed from the whitelist and the list of files which do not respect flake8 formatting that arent in the whitelist .
train
false
36,259
def read_regexp_block(stream, start_re, end_re=None): while True: line = stream.readline() if (not line): return [] if re.match(start_re, line): break lines = [line] while True: oldpos = stream.tell() line = stream.readline() if (not line): return [''.join(lines)] if ((end_re is not None) and re.match(end_re, line)): return [''.join(lines)] if ((end_re is None) and re.match(start_re, line)): stream.seek(oldpos) return [''.join(lines)] lines.append(line)
[ "def", "read_regexp_block", "(", "stream", ",", "start_re", ",", "end_re", "=", "None", ")", ":", "while", "True", ":", "line", "=", "stream", ".", "readline", "(", ")", "if", "(", "not", "line", ")", ":", "return", "[", "]", "if", "re", ".", "match", "(", "start_re", ",", "line", ")", ":", "break", "lines", "=", "[", "line", "]", "while", "True", ":", "oldpos", "=", "stream", ".", "tell", "(", ")", "line", "=", "stream", ".", "readline", "(", ")", "if", "(", "not", "line", ")", ":", "return", "[", "''", ".", "join", "(", "lines", ")", "]", "if", "(", "(", "end_re", "is", "not", "None", ")", "and", "re", ".", "match", "(", "end_re", ",", "line", ")", ")", ":", "return", "[", "''", ".", "join", "(", "lines", ")", "]", "if", "(", "(", "end_re", "is", "None", ")", "and", "re", ".", "match", "(", "start_re", ",", "line", ")", ")", ":", "stream", ".", "seek", "(", "oldpos", ")", "return", "[", "''", ".", "join", "(", "lines", ")", "]", "lines", ".", "append", "(", "line", ")" ]
read a sequence of tokens from a stream .
train
false
36,260
def test_account_create_should_fail(): credentials = [((c['user'], c['password']), e) for (c, e) in broken_credentials] for ((email, password), error) in credentials: error_obj = getattr(errors, error) with session_scope() as db_session: with pytest.raises(error_obj): create_account(db_session, email, password)
[ "def", "test_account_create_should_fail", "(", ")", ":", "credentials", "=", "[", "(", "(", "c", "[", "'user'", "]", ",", "c", "[", "'password'", "]", ")", ",", "e", ")", "for", "(", "c", ",", "e", ")", "in", "broken_credentials", "]", "for", "(", "(", "email", ",", "password", ")", ",", "error", ")", "in", "credentials", ":", "error_obj", "=", "getattr", "(", "errors", ",", "error", ")", "with", "session_scope", "(", ")", "as", "db_session", ":", "with", "pytest", ".", "raises", "(", "error_obj", ")", ":", "create_account", "(", "db_session", ",", "email", ",", "password", ")" ]
test that creation fails with appropriate errors .
train
false
36,261
def getTopPath(path): top = (-9.876543219876543e+17) for point in path: top = max(top, point.z) return top
[ "def", "getTopPath", "(", "path", ")", ":", "top", "=", "(", "-", "9.876543219876543e+17", ")", "for", "point", "in", "path", ":", "top", "=", "max", "(", "top", ",", "point", ".", "z", ")", "return", "top" ]
get the top of the path .
train
false
36,264
def empirical_covariance(X, assume_centered=False): X = np.asarray(X) if (X.ndim == 1): X = np.reshape(X, (1, (-1))) if (X.shape[0] == 1): warnings.warn('Only one sample available. You may want to reshape your data array') if assume_centered: covariance = (np.dot(X.T, X) / X.shape[0]) else: covariance = np.cov(X.T, bias=1) if (covariance.ndim == 0): covariance = np.array([[covariance]]) return covariance
[ "def", "empirical_covariance", "(", "X", ",", "assume_centered", "=", "False", ")", ":", "X", "=", "np", ".", "asarray", "(", "X", ")", "if", "(", "X", ".", "ndim", "==", "1", ")", ":", "X", "=", "np", ".", "reshape", "(", "X", ",", "(", "1", ",", "(", "-", "1", ")", ")", ")", "if", "(", "X", ".", "shape", "[", "0", "]", "==", "1", ")", ":", "warnings", ".", "warn", "(", "'Only one sample available. You may want to reshape your data array'", ")", "if", "assume_centered", ":", "covariance", "=", "(", "np", ".", "dot", "(", "X", ".", "T", ",", "X", ")", "/", "X", ".", "shape", "[", "0", "]", ")", "else", ":", "covariance", "=", "np", ".", "cov", "(", "X", ".", "T", ",", "bias", "=", "1", ")", "if", "(", "covariance", ".", "ndim", "==", "0", ")", ":", "covariance", "=", "np", ".", "array", "(", "[", "[", "covariance", "]", "]", ")", "return", "covariance" ]
computes the maximum likelihood covariance estimator parameters x : ndarray .
train
false
36,267
def move_to_parent_folder(workdir): workdir = os.path.abspath(os.path.normpath(workdir)) dest = os.path.abspath(os.path.normpath(os.path.join(workdir, '..'))) for item in os.listdir(workdir): if (item.lower() in ('video_ts', 'audio_ts', 'bdmv')): return (workdir, True) for (root, dirs, files) in os.walk(workdir): for _file in files: path = os.path.join(root, _file) new_path = path.replace(workdir, dest) (ok, new_path) = move_to_path(path, new_path) if (not ok): return (dest, False) cleanup_empty_directories(workdir) return (dest, True)
[ "def", "move_to_parent_folder", "(", "workdir", ")", ":", "workdir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "normpath", "(", "workdir", ")", ")", "dest", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "workdir", ",", "'..'", ")", ")", ")", "for", "item", "in", "os", ".", "listdir", "(", "workdir", ")", ":", "if", "(", "item", ".", "lower", "(", ")", "in", "(", "'video_ts'", ",", "'audio_ts'", ",", "'bdmv'", ")", ")", ":", "return", "(", "workdir", ",", "True", ")", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "workdir", ")", ":", "for", "_file", "in", "files", ":", "path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "_file", ")", "new_path", "=", "path", ".", "replace", "(", "workdir", ",", "dest", ")", "(", "ok", ",", "new_path", ")", "=", "move_to_path", "(", "path", ",", "new_path", ")", "if", "(", "not", "ok", ")", ":", "return", "(", "dest", ",", "False", ")", "cleanup_empty_directories", "(", "workdir", ")", "return", "(", "dest", ",", "True", ")" ]
move all in workdir into workdir/ .
train
false
36,268
def dataset_follower_list(context, data_dict): _check_access('dataset_follower_list', context, data_dict) return _follower_list(context, data_dict, ckan.logic.schema.default_follow_dataset_schema(), context['model'].UserFollowingDataset)
[ "def", "dataset_follower_list", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'dataset_follower_list'", ",", "context", ",", "data_dict", ")", "return", "_follower_list", "(", "context", ",", "data_dict", ",", "ckan", ".", "logic", ".", "schema", ".", "default_follow_dataset_schema", "(", ")", ",", "context", "[", "'model'", "]", ".", "UserFollowingDataset", ")" ]
return the list of users that are following the given dataset .
train
false
36,269
def dirname(p): return split(p)[0]
[ "def", "dirname", "(", "p", ")", ":", "return", "split", "(", "p", ")", "[", "0", "]" ]
returns the directory component of a pathname .
train
false
36,270
def split_at_offsets(line, offsets): result = [] previous_offset = 0 current_offset = 0 for current_offset in sorted(offsets): if ((current_offset < len(line)) and (previous_offset != current_offset)): result.append(line[previous_offset:current_offset].strip()) previous_offset = current_offset result.append(line[current_offset:]) return result
[ "def", "split_at_offsets", "(", "line", ",", "offsets", ")", ":", "result", "=", "[", "]", "previous_offset", "=", "0", "current_offset", "=", "0", "for", "current_offset", "in", "sorted", "(", "offsets", ")", ":", "if", "(", "(", "current_offset", "<", "len", "(", "line", ")", ")", "and", "(", "previous_offset", "!=", "current_offset", ")", ")", ":", "result", ".", "append", "(", "line", "[", "previous_offset", ":", "current_offset", "]", ".", "strip", "(", ")", ")", "previous_offset", "=", "current_offset", "result", ".", "append", "(", "line", "[", "current_offset", ":", "]", ")", "return", "result" ]
split line at offsets .
train
true
36,272
def axis_reverse(a, axis=(-1)): return axis_slice(a, step=(-1), axis=axis)
[ "def", "axis_reverse", "(", "a", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "return", "axis_slice", "(", "a", ",", "step", "=", "(", "-", "1", ")", ",", "axis", "=", "axis", ")" ]
reverse the 1-d slices of a along axis axis .
train
false
36,273
def test_cleanup_after_install_from_local_directory(script, data): to_install = data.packages.join('FSPkg') script.pip('install', to_install, expect_error=False) build = (script.venv_path / 'build') src = (script.venv_path / 'src') assert (not exists(build)), ('unexpected build/ dir exists: %s' % build) assert (not exists(src)), ('unexpected src/ dir exist: %s' % src) script.assert_no_temp()
[ "def", "test_cleanup_after_install_from_local_directory", "(", "script", ",", "data", ")", ":", "to_install", "=", "data", ".", "packages", ".", "join", "(", "'FSPkg'", ")", "script", ".", "pip", "(", "'install'", ",", "to_install", ",", "expect_error", "=", "False", ")", "build", "=", "(", "script", ".", "venv_path", "/", "'build'", ")", "src", "=", "(", "script", ".", "venv_path", "/", "'src'", ")", "assert", "(", "not", "exists", "(", "build", ")", ")", ",", "(", "'unexpected build/ dir exists: %s'", "%", "build", ")", "assert", "(", "not", "exists", "(", "src", ")", ")", ",", "(", "'unexpected src/ dir exist: %s'", "%", "src", ")", "script", ".", "assert_no_temp", "(", ")" ]
test clean up after installing from a local directory .
train
false
36,274
def _MakeSignature(app_id=None, url=None, kind=None, db_filename=None, perform_map=None, download=None, has_header=None, result_db_filename=None, dump=None, restore=None): if download: result_db_line = ('result_db: %s' % result_db_filename) else: result_db_line = '' return (u'\n app_id: %s\n url: %s\n kind: %s\n download: %s\n map: %s\n dump: %s\n restore: %s\n progress_db: %s\n has_header: %s\n %s\n ' % (app_id, url, kind, download, perform_map, dump, restore, db_filename, has_header, result_db_line))
[ "def", "_MakeSignature", "(", "app_id", "=", "None", ",", "url", "=", "None", ",", "kind", "=", "None", ",", "db_filename", "=", "None", ",", "perform_map", "=", "None", ",", "download", "=", "None", ",", "has_header", "=", "None", ",", "result_db_filename", "=", "None", ",", "dump", "=", "None", ",", "restore", "=", "None", ")", ":", "if", "download", ":", "result_db_line", "=", "(", "'result_db: %s'", "%", "result_db_filename", ")", "else", ":", "result_db_line", "=", "''", "return", "(", "u'\\n app_id: %s\\n url: %s\\n kind: %s\\n download: %s\\n map: %s\\n dump: %s\\n restore: %s\\n progress_db: %s\\n has_header: %s\\n %s\\n '", "%", "(", "app_id", ",", "url", ",", "kind", ",", "download", ",", "perform_map", ",", "dump", ",", "restore", ",", "db_filename", ",", "has_header", ",", "result_db_line", ")", ")" ]
returns a string that identifies the important options for the database .
train
false
36,278
def convert_image(source, dest, out_format, run_as_root=False): cmd = ('qemu-img', 'convert', '-O', out_format, source, dest) utils.execute(run_as_root=run_as_root, *cmd)
[ "def", "convert_image", "(", "source", ",", "dest", ",", "out_format", ",", "run_as_root", "=", "False", ")", ":", "cmd", "=", "(", "'qemu-img'", ",", "'convert'", ",", "'-O'", ",", "out_format", ",", "source", ",", "dest", ")", "utils", ".", "execute", "(", "run_as_root", "=", "run_as_root", ",", "*", "cmd", ")" ]
convert an image file using sips .
train
false
36,279
@LocalContext def printable(raw_bytes, *a, **kw): return encode(raw_bytes, expr=re_printable, *a, **kw)
[ "@", "LocalContext", "def", "printable", "(", "raw_bytes", ",", "*", "a", ",", "**", "kw", ")", ":", "return", "encode", "(", "raw_bytes", ",", "expr", "=", "re_printable", ",", "*", "a", ",", "**", "kw", ")" ]
printable -> str encode the shellcode raw_bytes such that it only contains non-space printable bytes .
train
false
36,280
def have_ntfosd(): return bool(_HAVE_NTFOSD)
[ "def", "have_ntfosd", "(", ")", ":", "return", "bool", "(", "_HAVE_NTFOSD", ")" ]
return if any pynotify support is present .
train
false
36,285
def quotes_historical_yahoo(ticker, date1, date2, asobject=False, adjusted=True, cachename=None): fh = fetch_historical_yahoo(ticker, date1, date2, cachename) try: ret = parse_yahoo_historical(fh, asobject, adjusted) except IOError as exc: warnings.warn(((('urlopen() failure\n' + url) + '\n') + exc.strerror[1])) return None return ret
[ "def", "quotes_historical_yahoo", "(", "ticker", ",", "date1", ",", "date2", ",", "asobject", "=", "False", ",", "adjusted", "=", "True", ",", "cachename", "=", "None", ")", ":", "fh", "=", "fetch_historical_yahoo", "(", "ticker", ",", "date1", ",", "date2", ",", "cachename", ")", "try", ":", "ret", "=", "parse_yahoo_historical", "(", "fh", ",", "asobject", ",", "adjusted", ")", "except", "IOError", "as", "exc", ":", "warnings", ".", "warn", "(", "(", "(", "(", "'urlopen() failure\\n'", "+", "url", ")", "+", "'\\n'", ")", "+", "exc", ".", "strerror", "[", "1", "]", ")", ")", "return", "None", "return", "ret" ]
get historical data for ticker between date1 and date2 .
train
false
36,286
def formatFailure(myFailure): result = [] flattenString(None, FailureElement(myFailure)).addBoth(result.append) if isinstance(result[0], bytes): return result[0].decode('utf-8').encode('ascii', 'xmlcharrefreplace') result[0].raiseException()
[ "def", "formatFailure", "(", "myFailure", ")", ":", "result", "=", "[", "]", "flattenString", "(", "None", ",", "FailureElement", "(", "myFailure", ")", ")", ".", "addBoth", "(", "result", ".", "append", ")", "if", "isinstance", "(", "result", "[", "0", "]", ",", "bytes", ")", ":", "return", "result", "[", "0", "]", ".", "decode", "(", "'utf-8'", ")", ".", "encode", "(", "'ascii'", ",", "'xmlcharrefreplace'", ")", "result", "[", "0", "]", ".", "raiseException", "(", ")" ]
construct an html representation of the given failure .
train
false
36,288
def inherit_nomination(sender, instance, **kw): if kw.get('raw'): return addon = instance.addon if addon.is_packaged: last_ver = Version.objects.filter(addon=addon).exclude(pk=instance.pk).order_by('-nomination') if (last_ver.exists() and (last_ver[0].all_files[0].status == mkt.STATUS_PENDING)): instance.update(nomination=last_ver[0].nomination, _signal=False) log.debug(('[Webapp:%s] Inheriting nomination from prior pending version' % addon.id)) elif ((addon.status in mkt.WEBAPPS_APPROVED_STATUSES) and (not instance.nomination)): log.debug(('[Webapp:%s] Setting nomination date to now for new version.' % addon.id)) instance.update(nomination=datetime.datetime.now(), _signal=False)
[ "def", "inherit_nomination", "(", "sender", ",", "instance", ",", "**", "kw", ")", ":", "if", "kw", ".", "get", "(", "'raw'", ")", ":", "return", "addon", "=", "instance", ".", "addon", "if", "addon", ".", "is_packaged", ":", "last_ver", "=", "Version", ".", "objects", ".", "filter", "(", "addon", "=", "addon", ")", ".", "exclude", "(", "pk", "=", "instance", ".", "pk", ")", ".", "order_by", "(", "'-nomination'", ")", "if", "(", "last_ver", ".", "exists", "(", ")", "and", "(", "last_ver", "[", "0", "]", ".", "all_files", "[", "0", "]", ".", "status", "==", "mkt", ".", "STATUS_PENDING", ")", ")", ":", "instance", ".", "update", "(", "nomination", "=", "last_ver", "[", "0", "]", ".", "nomination", ",", "_signal", "=", "False", ")", "log", ".", "debug", "(", "(", "'[Webapp:%s] Inheriting nomination from prior pending version'", "%", "addon", ".", "id", ")", ")", "elif", "(", "(", "addon", ".", "status", "in", "mkt", ".", "WEBAPPS_APPROVED_STATUSES", ")", "and", "(", "not", "instance", ".", "nomination", ")", ")", ":", "log", ".", "debug", "(", "(", "'[Webapp:%s] Setting nomination date to now for new version.'", "%", "addon", ".", "id", ")", ")", "instance", ".", "update", "(", "nomination", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "_signal", "=", "False", ")" ]
inherit nomination date for new packaged app versions .
train
false
36,289
def _serialize_hstore(val): def esc(s, position): if ((position == 'value') and (s is None)): return 'NULL' elif isinstance(s, util.string_types): return ('"%s"' % s.replace('\\', '\\\\').replace('"', '\\"')) else: raise ValueError(('%r in %s position is not a string.' % (s, position))) return ', '.join((('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))) for (k, v) in val.items()))
[ "def", "_serialize_hstore", "(", "val", ")", ":", "def", "esc", "(", "s", ",", "position", ")", ":", "if", "(", "(", "position", "==", "'value'", ")", "and", "(", "s", "is", "None", ")", ")", ":", "return", "'NULL'", "elif", "isinstance", "(", "s", ",", "util", ".", "string_types", ")", ":", "return", "(", "'\"%s\"'", "%", "s", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ".", "replace", "(", "'\"'", ",", "'\\\\\"'", ")", ")", "else", ":", "raise", "ValueError", "(", "(", "'%r in %s position is not a string.'", "%", "(", "s", ",", "position", ")", ")", ")", "return", "', '", ".", "join", "(", "(", "(", "'%s=>%s'", "%", "(", "esc", "(", "k", ",", "'key'", ")", ",", "esc", "(", "v", ",", "'value'", ")", ")", ")", "for", "(", "k", ",", "v", ")", "in", "val", ".", "items", "(", ")", ")", ")" ]
serialize a dictionary into an hstore literal .
train
false
36,290
@pytest.fixture(autouse=True) def patched_settings(): settings.ENABLE_EMAIL_SUBSCRIPTIONS = False settings.BCRYPT_LOG_ROUNDS = 1
[ "@", "pytest", ".", "fixture", "(", "autouse", "=", "True", ")", "def", "patched_settings", "(", ")", ":", "settings", ".", "ENABLE_EMAIL_SUBSCRIPTIONS", "=", "False", "settings", ".", "BCRYPT_LOG_ROUNDS", "=", "1" ]
patch settings for tests .
train
false
36,292
def trunk_bridge_does_not_exist(trunk_id): bridge = trunk_manager.TrunkBridge(trunk_id) return (not bridge.exists())
[ "def", "trunk_bridge_does_not_exist", "(", "trunk_id", ")", ":", "bridge", "=", "trunk_manager", ".", "TrunkBridge", "(", "trunk_id", ")", "return", "(", "not", "bridge", ".", "exists", "(", ")", ")" ]
return true if trunk bridge for given id does not exists .
train
false
36,295
def get_chassis_name(host=None, admin_username=None, admin_password=None): return bare_rac_cmd('getchassisname', host=host, admin_username=admin_username, admin_password=admin_password)
[ "def", "get_chassis_name", "(", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "return", "bare_rac_cmd", "(", "'getchassisname'", ",", "host", "=", "host", ",", "admin_username", "=", "admin_username", ",", "admin_password", "=", "admin_password", ")" ]
get the name of a chassis .
train
true
36,296
def _exclude_noncrit(in_list): from . import conf out_list = [] for s in in_list: n = 0 if (s is not None): for w in conf.noncritical_warnings: n += s.count(w) if (n == 0): out_list.append(s) return out_list
[ "def", "_exclude_noncrit", "(", "in_list", ")", ":", "from", ".", "import", "conf", "out_list", "=", "[", "]", "for", "s", "in", "in_list", ":", "n", "=", "0", "if", "(", "s", "is", "not", "None", ")", ":", "for", "w", "in", "conf", ".", "noncritical_warnings", ":", "n", "+=", "s", ".", "count", "(", "w", ")", "if", "(", "n", "==", "0", ")", ":", "out_list", ".", "append", "(", "s", ")", "return", "out_list" ]
exclude any items in input list containing astropy .
train
false
36,297
def GetSortedTimeZoneNames(): tzs = list(GetIndexedTimeZoneNames()) tzs.sort() return zip(*tzs)[1]
[ "def", "GetSortedTimeZoneNames", "(", ")", ":", "tzs", "=", "list", "(", "GetIndexedTimeZoneNames", "(", ")", ")", "tzs", ".", "sort", "(", ")", "return", "zip", "(", "*", "tzs", ")", "[", "1", "]" ]
uses getindexedtimezonenames to return the time zone names sorted longitudinally .
train
false
36,298
def get_callable_name(name): try: return native_(name, 'ascii') except (UnicodeEncodeError, UnicodeDecodeError): msg = '`name="%s"` is invalid. `name` must be ascii because it is used on __name__ of the method' raise ConfigurationError((msg % name))
[ "def", "get_callable_name", "(", "name", ")", ":", "try", ":", "return", "native_", "(", "name", ",", "'ascii'", ")", "except", "(", "UnicodeEncodeError", ",", "UnicodeDecodeError", ")", ":", "msg", "=", "'`name=\"%s\"` is invalid. `name` must be ascii because it is used on __name__ of the method'", "raise", "ConfigurationError", "(", "(", "msg", "%", "name", ")", ")" ]
returns the best available display name for the given function/callable .
train
false
36,299
def privacy(): _custom_view('privacy') response.title = T('Privacy') return dict()
[ "def", "privacy", "(", ")", ":", "_custom_view", "(", "'privacy'", ")", "response", ".", "title", "=", "T", "(", "'Privacy'", ")", "return", "dict", "(", ")" ]
custom view .
train
false
36,300
@partial def require_email(strategy, backend, details, user=None, is_new=False, **kwargs): if (backend.name == u'github'): email = get_github_email(kwargs[u'response'][u'access_token']) if (email is not None): details[u'email'] = email if (user and user.email): if (backend.name == u'email'): return {u'is_new': True} return elif (is_new and (not details.get(u'email'))): return redirect(u'register')
[ "@", "partial", "def", "require_email", "(", "strategy", ",", "backend", ",", "details", ",", "user", "=", "None", ",", "is_new", "=", "False", ",", "**", "kwargs", ")", ":", "if", "(", "backend", ".", "name", "==", "u'github'", ")", ":", "email", "=", "get_github_email", "(", "kwargs", "[", "u'response'", "]", "[", "u'access_token'", "]", ")", "if", "(", "email", "is", "not", "None", ")", ":", "details", "[", "u'email'", "]", "=", "email", "if", "(", "user", "and", "user", ".", "email", ")", ":", "if", "(", "backend", ".", "name", "==", "u'email'", ")", ":", "return", "{", "u'is_new'", ":", "True", "}", "return", "elif", "(", "is_new", "and", "(", "not", "details", ".", "get", "(", "u'email'", ")", ")", ")", ":", "return", "redirect", "(", "u'register'", ")" ]
forces entering email for backends which dont provide it .
train
false
36,301
def test_non_C_locale_with_fast_reader(): current = locale.setlocale(locale.LC_ALL) try: if (platform.system() == 'Darwin'): locale.setlocale(locale.LC_ALL, str('de_DE')) else: locale.setlocale(locale.LC_ALL, str('de_DE.utf8')) for fast_reader in (True, False, {'use_fast_converter': False}, {'use_fast_converter': True}): t = ascii.read(['a b', '1.5 2'], format='basic', guess=False, fast_reader=fast_reader) assert (t['a'].dtype.kind == 'f') except locale.Error as e: pytest.skip('Locale error: {}'.format(e)) finally: locale.setlocale(locale.LC_ALL, current)
[ "def", "test_non_C_locale_with_fast_reader", "(", ")", ":", "current", "=", "locale", ".", "setlocale", "(", "locale", ".", "LC_ALL", ")", "try", ":", "if", "(", "platform", ".", "system", "(", ")", "==", "'Darwin'", ")", ":", "locale", ".", "setlocale", "(", "locale", ".", "LC_ALL", ",", "str", "(", "'de_DE'", ")", ")", "else", ":", "locale", ".", "setlocale", "(", "locale", ".", "LC_ALL", ",", "str", "(", "'de_DE.utf8'", ")", ")", "for", "fast_reader", "in", "(", "True", ",", "False", ",", "{", "'use_fast_converter'", ":", "False", "}", ",", "{", "'use_fast_converter'", ":", "True", "}", ")", ":", "t", "=", "ascii", ".", "read", "(", "[", "'a b'", ",", "'1.5 2'", "]", ",", "format", "=", "'basic'", ",", "guess", "=", "False", ",", "fast_reader", "=", "fast_reader", ")", "assert", "(", "t", "[", "'a'", "]", ".", "dtype", ".", "kind", "==", "'f'", ")", "except", "locale", ".", "Error", "as", "e", ":", "pytest", ".", "skip", "(", "'Locale error: {}'", ".", "format", "(", "e", ")", ")", "finally", ":", "locale", ".", "setlocale", "(", "locale", ".", "LC_ALL", ",", "current", ")" ]
test code that forces "c" locale while calling fast reader .
train
false
36,303
def mock_software_secure_post_unavailable(url, headers=None, data=None, **kwargs): raise requests.exceptions.ConnectionError
[ "def", "mock_software_secure_post_unavailable", "(", "url", ",", "headers", "=", "None", ",", "data", "=", "None", ",", "**", "kwargs", ")", ":", "raise", "requests", ".", "exceptions", ".", "ConnectionError" ]
simulates a connection failure when we try to submit to software secure .
train
false
36,304
def jsonable_error(status=500, message='The Studio servers encountered an error'): def outer(func): @functools.wraps(func) def inner(request, *args, **kwargs): if request.is_ajax(): content = dump_js_escaped_json({'error': message}) return HttpResponse(content, content_type='application/json', status=status) else: return func(request, *args, **kwargs) return inner return outer
[ "def", "jsonable_error", "(", "status", "=", "500", ",", "message", "=", "'The Studio servers encountered an error'", ")", ":", "def", "outer", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "request", ".", "is_ajax", "(", ")", ":", "content", "=", "dump_js_escaped_json", "(", "{", "'error'", ":", "message", "}", ")", "return", "HttpResponse", "(", "content", ",", "content_type", "=", "'application/json'", ",", "status", "=", "status", ")", "else", ":", "return", "func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "inner", "return", "outer" ]
a decorator to make an error view return an json-formatted message if it was requested via ajax .
train
false
36,305
def process_paths(options, candidates=None, error=True): errors = check_path(options, rootdir=CURDIR, candidates=candidates) pattern = '%(filename)s:%(lnum)s:%(col)s: %(text)s' if (options.format == 'pylint'): pattern = '%(filename)s:%(lnum)s: [%(type)s] %(text)s' for er in errors: if options.abspath: er._info['filename'] = op.abspath(er.filename) LOGGER.warning(pattern, er._info) if error: sys.exit(int(bool(errors))) return errors
[ "def", "process_paths", "(", "options", ",", "candidates", "=", "None", ",", "error", "=", "True", ")", ":", "errors", "=", "check_path", "(", "options", ",", "rootdir", "=", "CURDIR", ",", "candidates", "=", "candidates", ")", "pattern", "=", "'%(filename)s:%(lnum)s:%(col)s: %(text)s'", "if", "(", "options", ".", "format", "==", "'pylint'", ")", ":", "pattern", "=", "'%(filename)s:%(lnum)s: [%(type)s] %(text)s'", "for", "er", "in", "errors", ":", "if", "options", ".", "abspath", ":", "er", ".", "_info", "[", "'filename'", "]", "=", "op", ".", "abspath", "(", "er", ".", "filename", ")", "LOGGER", ".", "warning", "(", "pattern", ",", "er", ".", "_info", ")", "if", "error", ":", "sys", ".", "exit", "(", "int", "(", "bool", "(", "errors", ")", ")", ")", "return", "errors" ]
process files and log errors .
train
true
36,306
def send_order_email(request, order): settings.clear_cache() order_context = {u'order': order, u'request': request, u'order_items': order.items.all()} order_context.update(order.details_as_dict()) try: get_template(u'shop/email/order_receipt.html') except TemplateDoesNotExist: receipt_template = u'email/order_receipt' else: receipt_template = u'shop/email/order_receipt' from warnings import warn warn(u'Shop email receipt templates have moved from templates/shop/email/ to templates/email/') send_mail_template(settings.SHOP_ORDER_EMAIL_SUBJECT, receipt_template, settings.SHOP_ORDER_FROM_EMAIL, order.billing_detail_email, context=order_context, addr_bcc=(settings.SHOP_ORDER_EMAIL_BCC or None))
[ "def", "send_order_email", "(", "request", ",", "order", ")", ":", "settings", ".", "clear_cache", "(", ")", "order_context", "=", "{", "u'order'", ":", "order", ",", "u'request'", ":", "request", ",", "u'order_items'", ":", "order", ".", "items", ".", "all", "(", ")", "}", "order_context", ".", "update", "(", "order", ".", "details_as_dict", "(", ")", ")", "try", ":", "get_template", "(", "u'shop/email/order_receipt.html'", ")", "except", "TemplateDoesNotExist", ":", "receipt_template", "=", "u'email/order_receipt'", "else", ":", "receipt_template", "=", "u'shop/email/order_receipt'", "from", "warnings", "import", "warn", "warn", "(", "u'Shop email receipt templates have moved from templates/shop/email/ to templates/email/'", ")", "send_mail_template", "(", "settings", ".", "SHOP_ORDER_EMAIL_SUBJECT", ",", "receipt_template", ",", "settings", ".", "SHOP_ORDER_FROM_EMAIL", ",", "order", ".", "billing_detail_email", ",", "context", "=", "order_context", ",", "addr_bcc", "=", "(", "settings", ".", "SHOP_ORDER_EMAIL_BCC", "or", "None", ")", ")" ]
send order receipt email on successful order .
train
false
36,308
def cs_filter(f, filter_, ignore_comment_lines=True): exp = '^\\s*#\\s+CS:.*%s.*ignore\\D*((\\d+)\\s+line)*' re_ignore = re.compile((exp % filter_)) ignore = 0 out = [] count = 1 for line in f: if (ignore > 0): line = '' ignore -= 1 matches = re_ignore.search(line) if matches: ignore = int((matches.group(2) or 1)) if (ignore_comment_lines and line.lstrip().startswith('#')): line = '' out.append(line) count += 1 return out
[ "def", "cs_filter", "(", "f", ",", "filter_", ",", "ignore_comment_lines", "=", "True", ")", ":", "exp", "=", "'^\\\\s*#\\\\s+CS:.*%s.*ignore\\\\D*((\\\\d+)\\\\s+line)*'", "re_ignore", "=", "re", ".", "compile", "(", "(", "exp", "%", "filter_", ")", ")", "ignore", "=", "0", "out", "=", "[", "]", "count", "=", "1", "for", "line", "in", "f", ":", "if", "(", "ignore", ">", "0", ")", ":", "line", "=", "''", "ignore", "-=", "1", "matches", "=", "re_ignore", ".", "search", "(", "line", ")", "if", "matches", ":", "ignore", "=", "int", "(", "(", "matches", ".", "group", "(", "2", ")", "or", "1", ")", ")", "if", "(", "ignore_comment_lines", "and", "line", ".", "lstrip", "(", ")", ".", "startswith", "(", "'#'", ")", ")", ":", "line", "=", "''", "out", ".", "append", "(", "line", ")", "count", "+=", "1", "return", "out" ]
filter the file removing comments if requested .
train
false
36,309
def facebook(): channel = s3db.msg_facebook_login() if (not channel): redirect(URL(f='user', args=request.args, vars=get_vars)) from s3oauth import FaceBookAccount auth.settings.login_form = FaceBookAccount(channel) form = auth() return {'form': form}
[ "def", "facebook", "(", ")", ":", "channel", "=", "s3db", ".", "msg_facebook_login", "(", ")", "if", "(", "not", "channel", ")", ":", "redirect", "(", "URL", "(", "f", "=", "'user'", ",", "args", "=", "request", ".", "args", ",", "vars", "=", "get_vars", ")", ")", "from", "s3oauth", "import", "FaceBookAccount", "auth", ".", "settings", ".", "login_form", "=", "FaceBookAccount", "(", "channel", ")", "form", "=", "auth", "(", ")", "return", "{", "'form'", ":", "form", "}" ]
login using facebook .
train
false
36,311
def hough_circle(image, radius, normalize=True, full_output=False): if (type(radius) is list): radius = np.array(radius) elif (type(radius) is not np.ndarray): radius = np.array([radius]) return _hough_circle(image, radius.astype(np.intp), normalize=normalize, full_output=full_output)
[ "def", "hough_circle", "(", "image", ",", "radius", ",", "normalize", "=", "True", ",", "full_output", "=", "False", ")", ":", "if", "(", "type", "(", "radius", ")", "is", "list", ")", ":", "radius", "=", "np", ".", "array", "(", "radius", ")", "elif", "(", "type", "(", "radius", ")", "is", "not", "np", ".", "ndarray", ")", ":", "radius", "=", "np", ".", "array", "(", "[", "radius", "]", ")", "return", "_hough_circle", "(", "image", ",", "radius", ".", "astype", "(", "np", ".", "intp", ")", ",", "normalize", "=", "normalize", ",", "full_output", "=", "full_output", ")" ]
perform a circular hough transform .
train
false
36,312
def get_ngrams(sent_iterator, n): for sent in sent_iterator: w_boundary = ((n - 1) * [(None, '*')]) w_boundary.extend(sent) w_boundary.append((None, 'STOP')) ngrams = (tuple(w_boundary[i:(i + n)]) for i in xrange(((len(w_boundary) - n) + 1))) for n_gram in ngrams: (yield n_gram)
[ "def", "get_ngrams", "(", "sent_iterator", ",", "n", ")", ":", "for", "sent", "in", "sent_iterator", ":", "w_boundary", "=", "(", "(", "n", "-", "1", ")", "*", "[", "(", "None", ",", "'*'", ")", "]", ")", "w_boundary", ".", "extend", "(", "sent", ")", "w_boundary", ".", "append", "(", "(", "None", ",", "'STOP'", ")", ")", "ngrams", "=", "(", "tuple", "(", "w_boundary", "[", "i", ":", "(", "i", "+", "n", ")", "]", ")", "for", "i", "in", "xrange", "(", "(", "(", "len", "(", "w_boundary", ")", "-", "n", ")", "+", "1", ")", ")", ")", "for", "n_gram", "in", "ngrams", ":", "(", "yield", "n_gram", ")" ]
get a generator that returns n-grams over the entire corpus .
train
false
36,313
def unregisterReapProcessHandler(pid, process): if (not ((pid in reapProcessHandlers) and (reapProcessHandlers[pid] == process))): raise RuntimeError('Try to unregister a process not registered.') del reapProcessHandlers[pid]
[ "def", "unregisterReapProcessHandler", "(", "pid", ",", "process", ")", ":", "if", "(", "not", "(", "(", "pid", "in", "reapProcessHandlers", ")", "and", "(", "reapProcessHandlers", "[", "pid", "]", "==", "process", ")", ")", ")", ":", "raise", "RuntimeError", "(", "'Try to unregister a process not registered.'", ")", "del", "reapProcessHandlers", "[", "pid", "]" ]
unregister a process handler previously registered with l{registerreapprocesshandler} .
train
false
36,314
def squared_hinge(y_true, y_pred): y_true = tf.cast(y_true, tf.float32) y_pred = tf.cast(y_pred, tf.float32) return tf.reduce_mean(tf.square(tf.maximum((1.0 - (y_true * y_pred)), 0.0)))
[ "def", "squared_hinge", "(", "y_true", ",", "y_pred", ")", ":", "y_true", "=", "tf", ".", "cast", "(", "y_true", ",", "tf", ".", "float32", ")", "y_pred", "=", "tf", ".", "cast", "(", "y_pred", ",", "tf", ".", "float32", ")", "return", "tf", ".", "reduce_mean", "(", "tf", ".", "square", "(", "tf", ".", "maximum", "(", "(", "1.0", "-", "(", "y_true", "*", "y_pred", ")", ")", ",", "0.0", ")", ")", ")" ]
squared hinge loss .
train
false
36,315
def _html_subindex(args): (out_dir, subset, total) = args html.write_index_table(out_dir, total=total, *subset)
[ "def", "_html_subindex", "(", "args", ")", ":", "(", "out_dir", ",", "subset", ",", "total", ")", "=", "args", "html", ".", "write_index_table", "(", "out_dir", ",", "total", "=", "total", ",", "*", "subset", ")" ]
html writer for multiprocessing support .
train
false
36,316
@register.tag def ifchanged(parser, token): bits = token.split_contents() nodelist_true = parser.parse(('else', 'endifchanged')) token = parser.next_token() if (token.contents == 'else'): nodelist_false = parser.parse(('endifchanged',)) parser.delete_first_token() else: nodelist_false = NodeList() values = [parser.compile_filter(bit) for bit in bits[1:]] return IfChangedNode(nodelist_true, nodelist_false, *values)
[ "@", "register", ".", "tag", "def", "ifchanged", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "nodelist_true", "=", "parser", ".", "parse", "(", "(", "'else'", ",", "'endifchanged'", ")", ")", "token", "=", "parser", ".", "next_token", "(", ")", "if", "(", "token", ".", "contents", "==", "'else'", ")", ":", "nodelist_false", "=", "parser", ".", "parse", "(", "(", "'endifchanged'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "else", ":", "nodelist_false", "=", "NodeList", "(", ")", "values", "=", "[", "parser", ".", "compile_filter", "(", "bit", ")", "for", "bit", "in", "bits", "[", "1", ":", "]", "]", "return", "IfChangedNode", "(", "nodelist_true", ",", "nodelist_false", ",", "*", "values", ")" ]
checks if a value has changed from the last iteration of a loop .
train
false
36,317
def signed_natsort(data): if (not data): return data if (not all([(isinstance(element, tuple) or isinstance(element, list) or isinstance(element, dict)) for element in data])): try: return sorted(data, key=float) except ValueError: return natsort(data) try: return sorted(data, key=(lambda tup: float(tup[0]))) except ValueError: return natsort(data)
[ "def", "signed_natsort", "(", "data", ")", ":", "if", "(", "not", "data", ")", ":", "return", "data", "if", "(", "not", "all", "(", "[", "(", "isinstance", "(", "element", ",", "tuple", ")", "or", "isinstance", "(", "element", ",", "list", ")", "or", "isinstance", "(", "element", ",", "dict", ")", ")", "for", "element", "in", "data", "]", ")", ")", ":", "try", ":", "return", "sorted", "(", "data", ",", "key", "=", "float", ")", "except", "ValueError", ":", "return", "natsort", "(", "data", ")", "try", ":", "return", "sorted", "(", "data", ",", "key", "=", "(", "lambda", "tup", ":", "float", "(", "tup", "[", "0", "]", ")", ")", ")", "except", "ValueError", ":", "return", "natsort", "(", "data", ")" ]
sort an iterable considering the cases where elements are signed data: list of tuples or strings .
train
false
36,318
def _report_invalid_attribute(name, value, reason): logging.error('invalid Cookie attribute (%s): %r=%r', reason, name, value)
[ "def", "_report_invalid_attribute", "(", "name", ",", "value", ",", "reason", ")", ":", "logging", ".", "error", "(", "'invalid Cookie attribute (%s): %r=%r'", ",", "reason", ",", "name", ",", "value", ")" ]
how this module logs a bad attribute when exception suppressed .
train
false
36,319
def clear_default(key=None, value=None, parent=None, name=None, parenttype=None): conditions = [] values = [] if name: conditions.append(u'name=%s') values.append(name) else: if key: conditions.append(u'defkey=%s') values.append(key) if value: conditions.append(u'defvalue=%s') values.append(value) if parent: conditions.append(u'parent=%s') values.append(parent) if parenttype: conditions.append(u'parenttype=%s') values.append(parenttype) if parent: clear_cache(parent) else: clear_cache(u'__default') clear_cache(u'__global') if (not conditions): raise Exception, u'[clear_default] No key specified.' frappe.db.sql(u'delete from tabDefaultValue where {0}'.format(u' and '.join(conditions)), tuple(values)) _clear_cache(parent)
[ "def", "clear_default", "(", "key", "=", "None", ",", "value", "=", "None", ",", "parent", "=", "None", ",", "name", "=", "None", ",", "parenttype", "=", "None", ")", ":", "conditions", "=", "[", "]", "values", "=", "[", "]", "if", "name", ":", "conditions", ".", "append", "(", "u'name=%s'", ")", "values", ".", "append", "(", "name", ")", "else", ":", "if", "key", ":", "conditions", ".", "append", "(", "u'defkey=%s'", ")", "values", ".", "append", "(", "key", ")", "if", "value", ":", "conditions", ".", "append", "(", "u'defvalue=%s'", ")", "values", ".", "append", "(", "value", ")", "if", "parent", ":", "conditions", ".", "append", "(", "u'parent=%s'", ")", "values", ".", "append", "(", "parent", ")", "if", "parenttype", ":", "conditions", ".", "append", "(", "u'parenttype=%s'", ")", "values", ".", "append", "(", "parenttype", ")", "if", "parent", ":", "clear_cache", "(", "parent", ")", "else", ":", "clear_cache", "(", "u'__default'", ")", "clear_cache", "(", "u'__global'", ")", "if", "(", "not", "conditions", ")", ":", "raise", "Exception", ",", "u'[clear_default] No key specified.'", "frappe", ".", "db", ".", "sql", "(", "u'delete from tabDefaultValue where {0}'", ".", "format", "(", "u' and '", ".", "join", "(", "conditions", ")", ")", ",", "tuple", "(", "values", ")", ")", "_clear_cache", "(", "parent", ")" ]
clear a default value by any of the given parameters and delete caches .
train
false
36,320
def _get_session_id(url, username, password): res = _req_json_rpc(url, '00000000000000000000000000000000', 'call', 'session', 'login', username=username, password=password) return res['ubus_rpc_session']
[ "def", "_get_session_id", "(", "url", ",", "username", ",", "password", ")", ":", "res", "=", "_req_json_rpc", "(", "url", ",", "'00000000000000000000000000000000'", ",", "'call'", ",", "'session'", ",", "'login'", ",", "username", "=", "username", ",", "password", "=", "password", ")", "return", "res", "[", "'ubus_rpc_session'", "]" ]
get the authentication token for the given host+username+password .
train
false
36,322
def list2sym(lst): sym = _join(lst, '_', untag=True) sym = sym.lower() ENT = re.compile('&(\\w+?);') sym = ENT.sub(descape_entity, sym) sym = sym.replace('.', '') return sym
[ "def", "list2sym", "(", "lst", ")", ":", "sym", "=", "_join", "(", "lst", ",", "'_'", ",", "untag", "=", "True", ")", "sym", "=", "sym", ".", "lower", "(", ")", "ENT", "=", "re", ".", "compile", "(", "'&(\\\\w+?);'", ")", "sym", "=", "ENT", ".", "sub", "(", "descape_entity", ",", "sym", ")", "sym", "=", "sym", ".", "replace", "(", "'.'", ",", "''", ")", "return", "sym" ]
convert a list of strings into a canonical symbol .
train
false
36,323
def discrimination(prob, outcome, n_bins=10): prob = np.array(prob) outcome = np.array(outcome) d = 0.0 base_prob = np.mean(outcome) judgement_bins = (np.arange((n_bins + 1)) / n_bins) bin_num = np.digitize(prob, judgement_bins) for j_bin in np.unique(bin_num): in_bin = (bin_num == j_bin) true_bin_prob = np.mean(outcome[in_bin]) d += (np.sum(in_bin) * ((true_bin_prob - base_prob) ** 2)) return (d / len(prob))
[ "def", "discrimination", "(", "prob", ",", "outcome", ",", "n_bins", "=", "10", ")", ":", "prob", "=", "np", ".", "array", "(", "prob", ")", "outcome", "=", "np", ".", "array", "(", "outcome", ")", "d", "=", "0.0", "base_prob", "=", "np", ".", "mean", "(", "outcome", ")", "judgement_bins", "=", "(", "np", ".", "arange", "(", "(", "n_bins", "+", "1", ")", ")", "/", "n_bins", ")", "bin_num", "=", "np", ".", "digitize", "(", "prob", ",", "judgement_bins", ")", "for", "j_bin", "in", "np", ".", "unique", "(", "bin_num", ")", ":", "in_bin", "=", "(", "bin_num", "==", "j_bin", ")", "true_bin_prob", "=", "np", ".", "mean", "(", "outcome", "[", "in_bin", "]", ")", "d", "+=", "(", "np", ".", "sum", "(", "in_bin", ")", "*", "(", "(", "true_bin_prob", "-", "base_prob", ")", "**", "2", ")", ")", "return", "(", "d", "/", "len", "(", "prob", ")", ")" ]
discrimination measurement for a set of predictions .
train
false
36,324
def hermite(n, monic=False): if (n < 0): raise ValueError('n must be nonnegative.') if (n == 0): n1 = (n + 1) else: n1 = n (x, w, mu0) = roots_hermite(n1, mu=True) wfunc = (lambda x: exp(((- x) * x))) if (n == 0): (x, w) = ([], []) hn = (((2 ** n) * _gam((n + 1))) * sqrt(pi)) kn = (2 ** n) p = orthopoly1d(x, w, hn, kn, wfunc, ((- inf), inf), monic, (lambda x: eval_hermite(n, x))) return p
[ "def", "hermite", "(", "n", ",", "monic", "=", "False", ")", ":", "if", "(", "n", "<", "0", ")", ":", "raise", "ValueError", "(", "'n must be nonnegative.'", ")", "if", "(", "n", "==", "0", ")", ":", "n1", "=", "(", "n", "+", "1", ")", "else", ":", "n1", "=", "n", "(", "x", ",", "w", ",", "mu0", ")", "=", "roots_hermite", "(", "n1", ",", "mu", "=", "True", ")", "wfunc", "=", "(", "lambda", "x", ":", "exp", "(", "(", "(", "-", "x", ")", "*", "x", ")", ")", ")", "if", "(", "n", "==", "0", ")", ":", "(", "x", ",", "w", ")", "=", "(", "[", "]", ",", "[", "]", ")", "hn", "=", "(", "(", "(", "2", "**", "n", ")", "*", "_gam", "(", "(", "n", "+", "1", ")", ")", ")", "*", "sqrt", "(", "pi", ")", ")", "kn", "=", "(", "2", "**", "n", ")", "p", "=", "orthopoly1d", "(", "x", ",", "w", ",", "hn", ",", "kn", ",", "wfunc", ",", "(", "(", "-", "inf", ")", ",", "inf", ")", ",", "monic", ",", "(", "lambda", "x", ":", "eval_hermite", "(", "n", ",", "x", ")", ")", ")", "return", "p" ]
physicists hermite polynomial .
train
false
36,329
@core_helper def popular(type_, number, min=1, title=None): if (type_ == 'views'): title = ungettext('{number} view', '{number} views', number) elif (type_ == 'recent views'): title = ungettext('{number} recent view', '{number} recent views', number) elif (not title): raise Exception('popular() did not recieve a valid type_ or title') return snippet('snippets/popular.html', title=title, number=number, min=min)
[ "@", "core_helper", "def", "popular", "(", "type_", ",", "number", ",", "min", "=", "1", ",", "title", "=", "None", ")", ":", "if", "(", "type_", "==", "'views'", ")", ":", "title", "=", "ungettext", "(", "'{number} view'", ",", "'{number} views'", ",", "number", ")", "elif", "(", "type_", "==", "'recent views'", ")", ":", "title", "=", "ungettext", "(", "'{number} recent view'", ",", "'{number} recent views'", ",", "number", ")", "elif", "(", "not", "title", ")", ":", "raise", "Exception", "(", "'popular() did not recieve a valid type_ or title'", ")", "return", "snippet", "(", "'snippets/popular.html'", ",", "title", "=", "title", ",", "number", "=", "number", ",", "min", "=", "min", ")" ]
display a popular icon .
train
false
36,330
def list_clean(string): string = string.split(',') rstring = list() for element in string: rstring.append(element.strip(' ')) return rstring
[ "def", "list_clean", "(", "string", ")", ":", "string", "=", "string", ".", "split", "(", "','", ")", "rstring", "=", "list", "(", ")", "for", "element", "in", "string", ":", "rstring", ".", "append", "(", "element", ".", "strip", "(", "' '", ")", ")", "return", "rstring" ]
transforms a comma seperated string to a list .
train
false
36,332
def _get_non_gfk_field(opts, name): field = opts.get_field(name) if (field.is_relation and ((field.many_to_one and (not field.related_model)) or field.one_to_many)): raise FieldDoesNotExist() if (field.is_relation and (not field.many_to_many) and hasattr(field, 'attname') and (field.attname == name)): raise FieldIsAForeignKeyColumnName() return field
[ "def", "_get_non_gfk_field", "(", "opts", ",", "name", ")", ":", "field", "=", "opts", ".", "get_field", "(", "name", ")", "if", "(", "field", ".", "is_relation", "and", "(", "(", "field", ".", "many_to_one", "and", "(", "not", "field", ".", "related_model", ")", ")", "or", "field", ".", "one_to_many", ")", ")", ":", "raise", "FieldDoesNotExist", "(", ")", "if", "(", "field", ".", "is_relation", "and", "(", "not", "field", ".", "many_to_many", ")", "and", "hasattr", "(", "field", ",", "'attname'", ")", "and", "(", "field", ".", "attname", "==", "name", ")", ")", ":", "raise", "FieldIsAForeignKeyColumnName", "(", ")", "return", "field" ]
for historical reasons .
train
false
36,333
@open_file(1, mode='w') def write_dot(G, path): P = to_pydot(G) path.write(P.to_string()) return
[ "@", "open_file", "(", "1", ",", "mode", "=", "'w'", ")", "def", "write_dot", "(", "G", ",", "path", ")", ":", "P", "=", "to_pydot", "(", "G", ")", "path", ".", "write", "(", "P", ".", "to_string", "(", ")", ")", "return" ]
write networkx graph g to graphviz dot format on path .
train
false
36,334
def get_module_score(user, course, module): def inner_get_module(descriptor): '\n Delegate to get_module_for_descriptor\n ' field_data_cache = FieldDataCache([descriptor], course.id, user) return get_module_for_descriptor(user, _get_mock_request(user), descriptor, field_data_cache, course.id, course=course) modules = yield_dynamic_descriptor_descendants(module, user.id, inner_get_module) return _calculate_score_for_modules(user.id, course, modules)
[ "def", "get_module_score", "(", "user", ",", "course", ",", "module", ")", ":", "def", "inner_get_module", "(", "descriptor", ")", ":", "field_data_cache", "=", "FieldDataCache", "(", "[", "descriptor", "]", ",", "course", ".", "id", ",", "user", ")", "return", "get_module_for_descriptor", "(", "user", ",", "_get_mock_request", "(", "user", ")", ",", "descriptor", ",", "field_data_cache", ",", "course", ".", "id", ",", "course", "=", "course", ")", "modules", "=", "yield_dynamic_descriptor_descendants", "(", "module", ",", "user", ".", "id", ",", "inner_get_module", ")", "return", "_calculate_score_for_modules", "(", "user", ".", "id", ",", "course", ",", "modules", ")" ]
collects all children of the given module and calculates the cumulative score for this set of modules for the given user .
train
false
36,335
def clean_url(url): url = url.encode('utf8') url = ''.join([(urllib.parse.quote(c) if (ord(c) >= 127) else c) for c in url.decode('utf-8')]) return url
[ "def", "clean_url", "(", "url", ")", ":", "url", "=", "url", ".", "encode", "(", "'utf8'", ")", "url", "=", "''", ".", "join", "(", "[", "(", "urllib", ".", "parse", ".", "quote", "(", "c", ")", "if", "(", "ord", "(", "c", ")", ">=", "127", ")", "else", "c", ")", "for", "c", "in", "url", ".", "decode", "(", "'utf-8'", ")", "]", ")", "return", "url" ]
clean a url instance to string following these rules: * if there is a query string .
train
false
36,336
def normalize_callback(cb): if isinstance(cb, Callback): return cb._callback elif isinstance(cb, tuple): return cb else: raise TypeError('Callbacks must be either `Callback` or `tuple`')
[ "def", "normalize_callback", "(", "cb", ")", ":", "if", "isinstance", "(", "cb", ",", "Callback", ")", ":", "return", "cb", ".", "_callback", "elif", "isinstance", "(", "cb", ",", "tuple", ")", ":", "return", "cb", "else", ":", "raise", "TypeError", "(", "'Callbacks must be either `Callback` or `tuple`'", ")" ]
normalizes a callback to a tuple .
train
false
36,337
def is_tool_shed_client(app): return hasattr(app, 'install_model')
[ "def", "is_tool_shed_client", "(", "app", ")", ":", "return", "hasattr", "(", "app", ",", "'install_model'", ")" ]
the tool shed and clients to the tool require a lot of similar functionality in this file but with small differences .
train
false
36,338
def job_from_id(id): try: return Job.fetch(id, connection=_connect()) except NoSuchJobError: raise KeyError(u'There is no job with ID "{}".'.format(id))
[ "def", "job_from_id", "(", "id", ")", ":", "try", ":", "return", "Job", ".", "fetch", "(", "id", ",", "connection", "=", "_connect", "(", ")", ")", "except", "NoSuchJobError", ":", "raise", "KeyError", "(", "u'There is no job with ID \"{}\".'", ".", "format", "(", "id", ")", ")" ]
look up an enqueued job by its id .
train
false
36,339
def sdg_demo(): from nltk.parse import DependencyGraph dg = DependencyGraph(u'\n 1 Ze ze Pron Pron per|3|evofmv|nom 2 su _ _\n 2 had heb V V trans|ovt|1of2of3|ev 0 ROOT _ _\n 3 met met Prep Prep voor 8 mod _ _\n 4 haar haar Pron Pron bez|3|ev|neut|attr 5 det _ _\n 5 moeder moeder N N soort|ev|neut 3 obj1 _ _\n 6 kunnen kan V V hulp|ott|1of2of3|mv 2 vc _ _\n 7 gaan ga V V hulp|inf 6 vc _ _\n 8 winkelen winkel V V intrans|inf 11 cnj _ _\n 9 , , Punc Punc komma 8 punct _ _\n 10 zwemmen zwem V V intrans|inf 11 cnj _ _\n 11 of of Conj Conj neven 7 vc _ _\n 12 terrassen terras N N soort|mv|neut 11 cnj _ _\n 13 . . Punc Punc punt 12 punct _ _\n ') tree = dg.tree() print(tree.pprint())
[ "def", "sdg_demo", "(", ")", ":", "from", "nltk", ".", "parse", "import", "DependencyGraph", "dg", "=", "DependencyGraph", "(", "u'\\n 1 Ze ze Pron Pron per|3|evofmv|nom 2 su _ _\\n 2 had heb V V trans|ovt|1of2of3|ev 0 ROOT _ _\\n 3 met met Prep Prep voor 8 mod _ _\\n 4 haar haar Pron Pron bez|3|ev|neut|attr 5 det _ _\\n 5 moeder moeder N N soort|ev|neut 3 obj1 _ _\\n 6 kunnen kan V V hulp|ott|1of2of3|mv 2 vc _ _\\n 7 gaan ga V V hulp|inf 6 vc _ _\\n 8 winkelen winkel V V intrans|inf 11 cnj _ _\\n 9 , , Punc Punc komma 8 punct _ _\\n 10 zwemmen zwem V V intrans|inf 11 cnj _ _\\n 11 of of Conj Conj neven 7 vc _ _\\n 12 terrassen terras N N soort|mv|neut 11 cnj _ _\\n 13 . . Punc Punc punt 12 punct _ _\\n '", ")", "tree", "=", "dg", ".", "tree", "(", ")", "print", "(", "tree", ".", "pprint", "(", ")", ")" ]
a demonstration of how to read a string representation of a conll format dependency tree .
train
false
36,340
def path_stabilize(filepath): newpath = filepath.replace(os.path.sep, SEP) if isinstance(newpath, text_type): newpath = unicodedata.normalize('NFC', newpath) return newpath
[ "def", "path_stabilize", "(", "filepath", ")", ":", "newpath", "=", "filepath", ".", "replace", "(", "os", ".", "path", ".", "sep", ",", "SEP", ")", "if", "isinstance", "(", "newpath", ",", "text_type", ")", ":", "newpath", "=", "unicodedata", ".", "normalize", "(", "'NFC'", ",", "newpath", ")", "return", "newpath" ]
normalize path separater and unicode string .
train
false
36,341
def ufftn(inarray, dim=None): if (dim is None): dim = inarray.ndim outarray = np.fft.fftn(inarray, axes=range((- dim), 0)) return (outarray / np.sqrt(np.prod(inarray.shape[(- dim):])))
[ "def", "ufftn", "(", "inarray", ",", "dim", "=", "None", ")", ":", "if", "(", "dim", "is", "None", ")", ":", "dim", "=", "inarray", ".", "ndim", "outarray", "=", "np", ".", "fft", ".", "fftn", "(", "inarray", ",", "axes", "=", "range", "(", "(", "-", "dim", ")", ",", "0", ")", ")", "return", "(", "outarray", "/", "np", ".", "sqrt", "(", "np", ".", "prod", "(", "inarray", ".", "shape", "[", "(", "-", "dim", ")", ":", "]", ")", ")", ")" ]
n-dimensional unitary fourier transform .
train
false
36,342
def encode64chops(chops): chips = [] for value in chops: chips.append(int2str64(value)) encoded = ','.join(chips) return encoded
[ "def", "encode64chops", "(", "chops", ")", ":", "chips", "=", "[", "]", "for", "value", "in", "chops", ":", "chips", ".", "append", "(", "int2str64", "(", "value", ")", ")", "encoded", "=", "','", ".", "join", "(", "chips", ")", "return", "encoded" ]
base64encodes chops and combines them into a .
train
false
36,343
def _configure_job_metadata(metadata, allow_jagged_rows, allow_quoted_newlines, create_disposition, encoding, field_delimiter, ignore_unknown_values, max_bad_records, quote_character, skip_leading_rows, write_disposition): load_config = metadata['configuration']['load'] if (allow_jagged_rows is not None): load_config['allowJaggedRows'] = allow_jagged_rows if (allow_quoted_newlines is not None): load_config['allowQuotedNewlines'] = allow_quoted_newlines if (create_disposition is not None): load_config['createDisposition'] = create_disposition if (encoding is not None): load_config['encoding'] = encoding if (field_delimiter is not None): load_config['fieldDelimiter'] = field_delimiter if (ignore_unknown_values is not None): load_config['ignoreUnknownValues'] = ignore_unknown_values if (max_bad_records is not None): load_config['maxBadRecords'] = max_bad_records if (quote_character is not None): load_config['quote'] = quote_character if (skip_leading_rows is not None): load_config['skipLeadingRows'] = skip_leading_rows if (write_disposition is not None): load_config['writeDisposition'] = write_disposition
[ "def", "_configure_job_metadata", "(", "metadata", ",", "allow_jagged_rows", ",", "allow_quoted_newlines", ",", "create_disposition", ",", "encoding", ",", "field_delimiter", ",", "ignore_unknown_values", ",", "max_bad_records", ",", "quote_character", ",", "skip_leading_rows", ",", "write_disposition", ")", ":", "load_config", "=", "metadata", "[", "'configuration'", "]", "[", "'load'", "]", "if", "(", "allow_jagged_rows", "is", "not", "None", ")", ":", "load_config", "[", "'allowJaggedRows'", "]", "=", "allow_jagged_rows", "if", "(", "allow_quoted_newlines", "is", "not", "None", ")", ":", "load_config", "[", "'allowQuotedNewlines'", "]", "=", "allow_quoted_newlines", "if", "(", "create_disposition", "is", "not", "None", ")", ":", "load_config", "[", "'createDisposition'", "]", "=", "create_disposition", "if", "(", "encoding", "is", "not", "None", ")", ":", "load_config", "[", "'encoding'", "]", "=", "encoding", "if", "(", "field_delimiter", "is", "not", "None", ")", ":", "load_config", "[", "'fieldDelimiter'", "]", "=", "field_delimiter", "if", "(", "ignore_unknown_values", "is", "not", "None", ")", ":", "load_config", "[", "'ignoreUnknownValues'", "]", "=", "ignore_unknown_values", "if", "(", "max_bad_records", "is", "not", "None", ")", ":", "load_config", "[", "'maxBadRecords'", "]", "=", "max_bad_records", "if", "(", "quote_character", "is", "not", "None", ")", ":", "load_config", "[", "'quote'", "]", "=", "quote_character", "if", "(", "skip_leading_rows", "is", "not", "None", ")", ":", "load_config", "[", "'skipLeadingRows'", "]", "=", "skip_leading_rows", "if", "(", "write_disposition", "is", "not", "None", ")", ":", "load_config", "[", "'writeDisposition'", "]", "=", "write_disposition" ]
helper for :meth:table .
train
false
36,344
@pytest.mark.skipif((not cairosvg), reason='CairoSVG not installed') def test_render_to_png(Chart, datas): file_name = ('/tmp/test_graph-%s.png' % uuid.uuid4()) if os.path.exists(file_name): os.remove(file_name) chart = Chart() chart = make_data(chart, datas) chart.render_to_png(file_name) png = chart._repr_png_() with open(file_name, 'rb') as f: assert (png == f.read()) os.remove(file_name)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "(", "not", "cairosvg", ")", ",", "reason", "=", "'CairoSVG not installed'", ")", "def", "test_render_to_png", "(", "Chart", ",", "datas", ")", ":", "file_name", "=", "(", "'/tmp/test_graph-%s.png'", "%", "uuid", ".", "uuid4", "(", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "file_name", ")", ":", "os", ".", "remove", "(", "file_name", ")", "chart", "=", "Chart", "(", ")", "chart", "=", "make_data", "(", "chart", ",", "datas", ")", "chart", ".", "render_to_png", "(", "file_name", ")", "png", "=", "chart", ".", "_repr_png_", "(", ")", "with", "open", "(", "file_name", ",", "'rb'", ")", "as", "f", ":", "assert", "(", "png", "==", "f", ".", "read", "(", ")", ")", "os", ".", "remove", "(", "file_name", ")" ]
test in file png rendering .
train
false
36,345
def enable_sigusr1_handler(): enable_sig_handler('SIGUSR1', _handle_sigusr1) enable_sig_handler('SIGINFO', _handle_sigusr1)
[ "def", "enable_sigusr1_handler", "(", ")", ":", "enable_sig_handler", "(", "'SIGUSR1'", ",", "_handle_sigusr1", ")", "enable_sig_handler", "(", "'SIGINFO'", ",", "_handle_sigusr1", ")" ]
pretty print a stack trace to the console or a debug log under /tmp when any of the salt daemons such as salt-master are sent a sigusr1 .
train
false
36,346
def MimeReplacePart(part, newpart): part.set_payload(newpart.get_payload()) for h in newpart.keys(): del part[h] if ('content-type' in part): del part['content-type'] if ('content-transfer-encoding' in part): del part['content-transfer-encoding'] for (h, v) in newpart.items(): part.add_header(h, v)
[ "def", "MimeReplacePart", "(", "part", ",", "newpart", ")", ":", "part", ".", "set_payload", "(", "newpart", ".", "get_payload", "(", ")", ")", "for", "h", "in", "newpart", ".", "keys", "(", ")", ":", "del", "part", "[", "h", "]", "if", "(", "'content-type'", "in", "part", ")", ":", "del", "part", "[", "'content-type'", "]", "if", "(", "'content-transfer-encoding'", "in", "part", ")", ":", "del", "part", "[", "'content-transfer-encoding'", "]", "for", "(", "h", ",", "v", ")", "in", "newpart", ".", "items", "(", ")", ":", "part", ".", "add_header", "(", "h", ",", "v", ")" ]
replace a mime part with new version retaining headers from the old part that are not in the new part .
train
false
36,348
def hyphenate_date(date_str): match = re.match(u'^(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)$', date_str) if (match is not None): return u'-'.join(match.groups()) else: return date_str
[ "def", "hyphenate_date", "(", "date_str", ")", ":", "match", "=", "re", ".", "match", "(", "u'^(\\\\d\\\\d\\\\d\\\\d)(\\\\d\\\\d)(\\\\d\\\\d)$'", ",", "date_str", ")", "if", "(", "match", "is", "not", "None", ")", ":", "return", "u'-'", ".", "join", "(", "match", ".", "groups", "(", ")", ")", "else", ":", "return", "date_str" ]
convert a date in yyyymmdd format to yyyy-mm-dd format .
train
false
36,349
def test_version_compare(): assert (highest_version(['1.0', '2.0', '0.1']) == '2.0') assert (highest_version(['1.0a1', '1.0']) == '1.0')
[ "def", "test_version_compare", "(", ")", ":", "assert", "(", "highest_version", "(", "[", "'1.0'", ",", "'2.0'", ",", "'0.1'", "]", ")", "==", "'2.0'", ")", "assert", "(", "highest_version", "(", "[", "'1.0a1'", ",", "'1.0'", "]", ")", "==", "'1.0'", ")" ]
test version comparison .
train
false
36,351
def getSidesBasedOnPrecision(elementNode, radius): return int(math.ceil((math.sqrt(((0.5 * radius) / setting.getPrecision(elementNode))) * math.pi)))
[ "def", "getSidesBasedOnPrecision", "(", "elementNode", ",", "radius", ")", ":", "return", "int", "(", "math", ".", "ceil", "(", "(", "math", ".", "sqrt", "(", "(", "(", "0.5", "*", "radius", ")", "/", "setting", ".", "getPrecision", "(", "elementNode", ")", ")", ")", "*", "math", ".", "pi", ")", ")", ")" ]
get the number of poygon sides .
train
false
36,353
def test_steps_parsed_by_scenarios_has_scenarios(): scenario = Scenario.from_string(SCENARIO1) for step in scenario.steps: assert_equals(step.scenario, scenario)
[ "def", "test_steps_parsed_by_scenarios_has_scenarios", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "SCENARIO1", ")", "for", "step", "in", "scenario", ".", "steps", ":", "assert_equals", "(", "step", ".", "scenario", ",", "scenario", ")" ]
steps parsed by scenarios has scenarios .
train
false
36,355
def test_write_noheader_no_delimiter(): out = StringIO() ascii.write(dat, out, Writer=ascii.FixedWidthNoHeader, bookend=False, delimiter=None) assert_equal_splitlines(out.getvalue(), '1.2 "hello" 1 a\n2.4 \'s worlds 2 2\n')
[ "def", "test_write_noheader_no_delimiter", "(", ")", ":", "out", "=", "StringIO", "(", ")", "ascii", ".", "write", "(", "dat", ",", "out", ",", "Writer", "=", "ascii", ".", "FixedWidthNoHeader", ",", "bookend", "=", "False", ",", "delimiter", "=", "None", ")", "assert_equal_splitlines", "(", "out", ".", "getvalue", "(", ")", ",", "'1.2 \"hello\" 1 a\\n2.4 \\'s worlds 2 2\\n'", ")" ]
write a table as a fixed width table with no delimiter .
train
false
36,356
def smart_unicode(s, strings_only=False, errors='strict'): return django.utils.encoding.smart_unicode(s, get_site_encoding(), strings_only, errors)
[ "def", "smart_unicode", "(", "s", ",", "strings_only", "=", "False", ",", "errors", "=", "'strict'", ")", ":", "return", "django", ".", "utils", ".", "encoding", ".", "smart_unicode", "(", "s", ",", "get_site_encoding", "(", ")", ",", "strings_only", ",", "errors", ")" ]
returns a unicode object representing s .
train
false
36,357
def cmd_alias(args): usage = 'usage: alias <add|remove|list>' if ((len(args) < 1) or (args[0] == 'list')): if (len(args) >= 2): wildcard = args[1].upper() else: wildcard = '*' for a in sorted(mpstate.aliases.keys()): if fnmatch.fnmatch(a.upper(), wildcard): print ('%-15s : %s' % (a, mpstate.aliases[a])) elif (args[0] == 'add'): if (len(args) < 3): print usage return a = args[1] mpstate.aliases[a] = ' '.join(args[2:]) elif (args[0] == 'remove'): if (len(args) != 2): print usage return a = args[1] if (a in mpstate.aliases): mpstate.aliases.pop(a) else: print ('no alias %s' % a) else: print usage return
[ "def", "cmd_alias", "(", "args", ")", ":", "usage", "=", "'usage: alias <add|remove|list>'", "if", "(", "(", "len", "(", "args", ")", "<", "1", ")", "or", "(", "args", "[", "0", "]", "==", "'list'", ")", ")", ":", "if", "(", "len", "(", "args", ")", ">=", "2", ")", ":", "wildcard", "=", "args", "[", "1", "]", ".", "upper", "(", ")", "else", ":", "wildcard", "=", "'*'", "for", "a", "in", "sorted", "(", "mpstate", ".", "aliases", ".", "keys", "(", ")", ")", ":", "if", "fnmatch", ".", "fnmatch", "(", "a", ".", "upper", "(", ")", ",", "wildcard", ")", ":", "print", "(", "'%-15s : %s'", "%", "(", "a", ",", "mpstate", ".", "aliases", "[", "a", "]", ")", ")", "elif", "(", "args", "[", "0", "]", "==", "'add'", ")", ":", "if", "(", "len", "(", "args", ")", "<", "3", ")", ":", "print", "usage", "return", "a", "=", "args", "[", "1", "]", "mpstate", ".", "aliases", "[", "a", "]", "=", "' '", ".", "join", "(", "args", "[", "2", ":", "]", ")", "elif", "(", "args", "[", "0", "]", "==", "'remove'", ")", ":", "if", "(", "len", "(", "args", ")", "!=", "2", ")", ":", "print", "usage", "return", "a", "=", "args", "[", "1", "]", "if", "(", "a", "in", "mpstate", ".", "aliases", ")", ":", "mpstate", ".", "aliases", ".", "pop", "(", "a", ")", "else", ":", "print", "(", "'no alias %s'", "%", "a", ")", "else", ":", "print", "usage", "return" ]
alias commands .
train
true
36,358
def winTime(): return (systime.clock() + START_TIME)
[ "def", "winTime", "(", ")", ":", "return", "(", "systime", ".", "clock", "(", ")", "+", "START_TIME", ")" ]
return the current time in seconds with high precision (windows version .
train
false
36,359
def restore_snapshots(): logging.info('Restoring Cassandra snapshots.') for directory in CASSANDRA_DATA_SUBDIRS: data_dir = '{0}/{1}/{2}/'.format(APPSCALE_DATA_DIR, 'cassandra', directory) logging.debug('Restoring in dir {0}'.format(data_dir)) for (path, _, filenames) in os.walk(data_dir): for filename in filenames: logging.debug('Restoring: {0}'.format(filename)) if (not filename): logging.warn('skipping...') continue full_path = '{0}/{1}'.format(path, filename) new_full_path = '{0}/../../{1}'.format(path, filename) logging.debug('{0} -> {1}'.format(full_path, new_full_path)) if (not backup_recovery_helper.rename(full_path, new_full_path)): logging.error('Error while moving Cassandra snapshot in place. Aborting restore...') return False logging.info('Done restoring Cassandra snapshots.') return True
[ "def", "restore_snapshots", "(", ")", ":", "logging", ".", "info", "(", "'Restoring Cassandra snapshots.'", ")", "for", "directory", "in", "CASSANDRA_DATA_SUBDIRS", ":", "data_dir", "=", "'{0}/{1}/{2}/'", ".", "format", "(", "APPSCALE_DATA_DIR", ",", "'cassandra'", ",", "directory", ")", "logging", ".", "debug", "(", "'Restoring in dir {0}'", ".", "format", "(", "data_dir", ")", ")", "for", "(", "path", ",", "_", ",", "filenames", ")", "in", "os", ".", "walk", "(", "data_dir", ")", ":", "for", "filename", "in", "filenames", ":", "logging", ".", "debug", "(", "'Restoring: {0}'", ".", "format", "(", "filename", ")", ")", "if", "(", "not", "filename", ")", ":", "logging", ".", "warn", "(", "'skipping...'", ")", "continue", "full_path", "=", "'{0}/{1}'", ".", "format", "(", "path", ",", "filename", ")", "new_full_path", "=", "'{0}/../../{1}'", ".", "format", "(", "path", ",", "filename", ")", "logging", ".", "debug", "(", "'{0} -> {1}'", ".", "format", "(", "full_path", ",", "new_full_path", ")", ")", "if", "(", "not", "backup_recovery_helper", ".", "rename", "(", "full_path", ",", "new_full_path", ")", ")", ":", "logging", ".", "error", "(", "'Error while moving Cassandra snapshot in place. Aborting restore...'", ")", "return", "False", "logging", ".", "info", "(", "'Done restoring Cassandra snapshots.'", ")", "return", "True" ]
restore snapshot into correct directories .
train
false
36,361
def lbp_sub(f, g): if (sig_cmp(Sign(f), Sign(g), Polyn(f).ring.order) < 0): max_poly = g else: max_poly = f ret = (Polyn(f) - Polyn(g)) return lbp(Sign(max_poly), ret, Num(max_poly))
[ "def", "lbp_sub", "(", "f", ",", "g", ")", ":", "if", "(", "sig_cmp", "(", "Sign", "(", "f", ")", ",", "Sign", "(", "g", ")", ",", "Polyn", "(", "f", ")", ".", "ring", ".", "order", ")", "<", "0", ")", ":", "max_poly", "=", "g", "else", ":", "max_poly", "=", "f", "ret", "=", "(", "Polyn", "(", "f", ")", "-", "Polyn", "(", "g", ")", ")", "return", "lbp", "(", "Sign", "(", "max_poly", ")", ",", "ret", ",", "Num", "(", "max_poly", ")", ")" ]
subtract labeled polynomial g from f .
train
false
36,362
def setup_test_files(): dir1 = tempfile.mkdtemp('dir1') dir2 = tempfile.mkdtemp('dir2') config1 = os.path.join(dir1, 'config.txt') config2 = os.path.join(dir2, 'config.txt') with open(config1, 'w') as file_fd: file_fd.write('directive-dir1') with open(config2, 'w') as file_fd: file_fd.write('directive-dir2') sets = [set([config1]), set([config2]), set([config1, config2])] return (config1, config2, dir1, dir2, sets)
[ "def", "setup_test_files", "(", ")", ":", "dir1", "=", "tempfile", ".", "mkdtemp", "(", "'dir1'", ")", "dir2", "=", "tempfile", ".", "mkdtemp", "(", "'dir2'", ")", "config1", "=", "os", ".", "path", ".", "join", "(", "dir1", ",", "'config.txt'", ")", "config2", "=", "os", ".", "path", ".", "join", "(", "dir2", ",", "'config.txt'", ")", "with", "open", "(", "config1", ",", "'w'", ")", "as", "file_fd", ":", "file_fd", ".", "write", "(", "'directive-dir1'", ")", "with", "open", "(", "config2", ",", "'w'", ")", "as", "file_fd", ":", "file_fd", ".", "write", "(", "'directive-dir2'", ")", "sets", "=", "[", "set", "(", "[", "config1", "]", ")", ",", "set", "(", "[", "config2", "]", ")", ",", "set", "(", "[", "config1", ",", "config2", "]", ")", "]", "return", "(", "config1", ",", "config2", ",", "dir1", ",", "dir2", ",", "sets", ")" ]
setup sample configuration files .
train
false