id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
11,926
def create_mock_connection(token='snowman-frosty'): if (not (token == 'snowman-frosty')): return None mock_connection = mock.create_autospec(Connection) mock_connection.token = token mock_connection.get_dataverses.return_value = [create_mock_dataverse('Example 1'), create_mock_dataverse('Example 2'), create_mock_dataverse('Example 3')] def _get_dataverse(alias): return next((dataverse for dataverse in mock_connection.get_dataverses() if ((alias is not None) and (dataverse.title[(-1)] == alias[(-1)]))), None) mock_connection.get_dataverse = mock.MagicMock(side_effect=_get_dataverse) mock_connection.get_dataverse.return_value = create_mock_dataverse() return mock_connection
[ "def", "create_mock_connection", "(", "token", "=", "'snowman-frosty'", ")", ":", "if", "(", "not", "(", "token", "==", "'snowman-frosty'", ")", ")", ":", "return", "None", "mock_connection", "=", "mock", ".", "create_autospec", "(", "Connection", ")", "mock_connection", ".", "token", "=", "token", "mock_connection", ".", "get_dataverses", ".", "return_value", "=", "[", "create_mock_dataverse", "(", "'Example 1'", ")", ",", "create_mock_dataverse", "(", "'Example 2'", ")", ",", "create_mock_dataverse", "(", "'Example 3'", ")", "]", "def", "_get_dataverse", "(", "alias", ")", ":", "return", "next", "(", "(", "dataverse", "for", "dataverse", "in", "mock_connection", ".", "get_dataverses", "(", ")", "if", "(", "(", "alias", "is", "not", "None", ")", "and", "(", "dataverse", ".", "title", "[", "(", "-", "1", ")", "]", "==", "alias", "[", "(", "-", "1", ")", "]", ")", ")", ")", ",", "None", ")", "mock_connection", ".", "get_dataverse", "=", "mock", ".", "MagicMock", "(", "side_effect", "=", "_get_dataverse", ")", "mock_connection", ".", "get_dataverse", ".", "return_value", "=", "create_mock_dataverse", "(", ")", "return", "mock_connection" ]
create a mock dataverse connection .
train
false
11,928
def assert_events_equal(expected, actual): assert_event_matches(expected, actual, tolerate=EventMatchTolerates.strict())
[ "def", "assert_events_equal", "(", "expected", ",", "actual", ")", ":", "assert_event_matches", "(", "expected", ",", "actual", ",", "tolerate", "=", "EventMatchTolerates", ".", "strict", "(", ")", ")" ]
strict comparison of two events .
train
false
11,929
def classify(model_data_path, image_paths): spec = models.get_data_spec(model_class=models.GoogleNet) input_node = tf.placeholder(tf.float32, shape=(None, spec.crop_size, spec.crop_size, spec.channels)) net = models.GoogleNet({'data': input_node}) image_producer = dataset.ImageProducer(image_paths=image_paths, data_spec=spec) with tf.Session() as sesh: coordinator = tf.train.Coordinator() threads = image_producer.start(session=sesh, coordinator=coordinator) print 'Loading the model' net.load(model_data_path, sesh) print 'Loading the images' (indices, input_images) = image_producer.get(sesh) print 'Classifying' probs = sesh.run(net.get_output(), feed_dict={input_node: input_images}) display_results([image_paths[i] for i in indices], probs) coordinator.request_stop() coordinator.join(threads, stop_grace_period_secs=2)
[ "def", "classify", "(", "model_data_path", ",", "image_paths", ")", ":", "spec", "=", "models", ".", "get_data_spec", "(", "model_class", "=", "models", ".", "GoogleNet", ")", "input_node", "=", "tf", ".", "placeholder", "(", "tf", ".", "float32", ",", "shape", "=", "(", "None", ",", "spec", ".", "crop_size", ",", "spec", ".", "crop_size", ",", "spec", ".", "channels", ")", ")", "net", "=", "models", ".", "GoogleNet", "(", "{", "'data'", ":", "input_node", "}", ")", "image_producer", "=", "dataset", ".", "ImageProducer", "(", "image_paths", "=", "image_paths", ",", "data_spec", "=", "spec", ")", "with", "tf", ".", "Session", "(", ")", "as", "sesh", ":", "coordinator", "=", "tf", ".", "train", ".", "Coordinator", "(", ")", "threads", "=", "image_producer", ".", "start", "(", "session", "=", "sesh", ",", "coordinator", "=", "coordinator", ")", "print", "'Loading the model'", "net", ".", "load", "(", "model_data_path", ",", "sesh", ")", "print", "'Loading the images'", "(", "indices", ",", "input_images", ")", "=", "image_producer", ".", "get", "(", "sesh", ")", "print", "'Classifying'", "probs", "=", "sesh", ".", "run", "(", "net", ".", "get_output", "(", ")", ",", "feed_dict", "=", "{", "input_node", ":", "input_images", "}", ")", "display_results", "(", "[", "image_paths", "[", "i", "]", "for", "i", "in", "indices", "]", ",", "probs", ")", "coordinator", ".", "request_stop", "(", ")", "coordinator", ".", "join", "(", "threads", ",", "stop_grace_period_secs", "=", "2", ")" ]
classify an observation into a class .
train
false
11,930
def boxcar(M, sym=True): if _len_guards(M): return np.ones(M) (M, needs_trunc) = _extend(M, sym) w = np.ones(M, float) return _truncate(w, needs_trunc)
[ "def", "boxcar", "(", "M", ",", "sym", "=", "True", ")", ":", "if", "_len_guards", "(", "M", ")", ":", "return", "np", ".", "ones", "(", "M", ")", "(", "M", ",", "needs_trunc", ")", "=", "_extend", "(", "M", ",", "sym", ")", "w", "=", "np", ".", "ones", "(", "M", ",", "float", ")", "return", "_truncate", "(", "w", ",", "needs_trunc", ")" ]
return a boxcar or rectangular window .
train
false
11,932
def project(v, w): coefficient = dot(v, w) return scalar_multiply(coefficient, w)
[ "def", "project", "(", "v", ",", "w", ")", ":", "coefficient", "=", "dot", "(", "v", ",", "w", ")", "return", "scalar_multiply", "(", "coefficient", ",", "w", ")" ]
runs commands within the projects directory .
train
false
11,933
def pick_context_manager_reader_allow_async(f): @functools.wraps(f) def wrapped(context, *args, **kwargs): ctxt_mgr = get_context_manager(context) with ctxt_mgr.reader.allow_async.using(context): return f(context, *args, **kwargs) return wrapped
[ "def", "pick_context_manager_reader_allow_async", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapped", "(", "context", ",", "*", "args", ",", "**", "kwargs", ")", ":", "ctxt_mgr", "=", "get_context_manager", "(", "context", ")", "with", "ctxt_mgr", ".", "reader", ".", "allow_async", ".", "using", "(", "context", ")", ":", "return", "f", "(", "context", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped" ]
decorator to use a reader .
train
false
11,936
def test_guess_with_names_arg(): dat = ascii.read(['1,2', '3,4'], names=('a', 'b')) assert (len(dat) == 2) assert (dat.colnames == ['a', 'b']) dat = ascii.read(['c,d', '3,4'], names=('a', 'b')) assert (len(dat) == 1) assert (dat.colnames == ['a', 'b']) dat = ascii.read(['c d', 'e f'], names=('a', 'b')) assert (len(dat) == 1) assert (dat.colnames == ['a', 'b'])
[ "def", "test_guess_with_names_arg", "(", ")", ":", "dat", "=", "ascii", ".", "read", "(", "[", "'1,2'", ",", "'3,4'", "]", ",", "names", "=", "(", "'a'", ",", "'b'", ")", ")", "assert", "(", "len", "(", "dat", ")", "==", "2", ")", "assert", "(", "dat", ".", "colnames", "==", "[", "'a'", ",", "'b'", "]", ")", "dat", "=", "ascii", ".", "read", "(", "[", "'c,d'", ",", "'3,4'", "]", ",", "names", "=", "(", "'a'", ",", "'b'", ")", ")", "assert", "(", "len", "(", "dat", ")", "==", "1", ")", "assert", "(", "dat", ".", "colnames", "==", "[", "'a'", ",", "'b'", "]", ")", "dat", "=", "ascii", ".", "read", "(", "[", "'c d'", ",", "'e f'", "]", ",", "names", "=", "(", "'a'", ",", "'b'", ")", ")", "assert", "(", "len", "(", "dat", ")", "==", "1", ")", "assert", "(", "dat", ".", "colnames", "==", "[", "'a'", ",", "'b'", "]", ")" ]
make sure reading a table with guess=true gives the expected result when the names arg is specified .
train
false
11,940
def delete_api_stage(restApiId, stageName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.delete_stage(restApiId=restApiId, stageName=stageName) return {'deleted': True} except ClientError as e: return {'deleted': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "delete_api_stage", "(", "restApiId", ",", "stageName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "delete_stage", "(", "restApiId", "=", "restApiId", ",", "stageName", "=", "stageName", ")", "return", "{", "'deleted'", ":", "True", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
deletes stage identified by stagename from api identified by restapiid cli example: .
train
false
11,942
def _download_dataset(dataset_dir): for filename in [_TRAIN_DATA_FILENAME, _TRAIN_LABELS_FILENAME, _TEST_DATA_FILENAME, _TEST_LABELS_FILENAME]: filepath = os.path.join(dataset_dir, filename) if (not os.path.exists(filepath)): print(('Downloading file %s...' % filename)) def _progress(count, block_size, total_size): sys.stdout.write(('\r>> Downloading %.1f%%' % ((float((count * block_size)) / float(total_size)) * 100.0))) sys.stdout.flush() (filepath, _) = urllib.request.urlretrieve((_DATA_URL + filename), filepath, _progress) print() with tf.gfile.GFile(filepath) as f: size = f.Size() print('Successfully downloaded', filename, size, 'bytes.')
[ "def", "_download_dataset", "(", "dataset_dir", ")", ":", "for", "filename", "in", "[", "_TRAIN_DATA_FILENAME", ",", "_TRAIN_LABELS_FILENAME", ",", "_TEST_DATA_FILENAME", ",", "_TEST_LABELS_FILENAME", "]", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "dataset_dir", ",", "filename", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "filepath", ")", ")", ":", "print", "(", "(", "'Downloading file %s...'", "%", "filename", ")", ")", "def", "_progress", "(", "count", ",", "block_size", ",", "total_size", ")", ":", "sys", ".", "stdout", ".", "write", "(", "(", "'\\r>> Downloading %.1f%%'", "%", "(", "(", "float", "(", "(", "count", "*", "block_size", ")", ")", "/", "float", "(", "total_size", ")", ")", "*", "100.0", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "(", "filepath", ",", "_", ")", "=", "urllib", ".", "request", ".", "urlretrieve", "(", "(", "_DATA_URL", "+", "filename", ")", ",", "filepath", ",", "_progress", ")", "print", "(", ")", "with", "tf", ".", "gfile", ".", "GFile", "(", "filepath", ")", "as", "f", ":", "size", "=", "f", ".", "Size", "(", ")", "print", "(", "'Successfully downloaded'", ",", "filename", ",", "size", ",", "'bytes.'", ")" ]
downloads mnist locally .
train
false
11,943
def instantiate(name, *args, **kwargs): return get_cls_by_name(name)(*args, **kwargs)
[ "def", "instantiate", "(", "name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "get_cls_by_name", "(", "name", ")", "(", "*", "args", ",", "**", "kwargs", ")" ]
instantiate class by name .
train
false
11,944
def in_importlib(frame): return (frame.f_code.co_filename == '<frozen importlib._bootstrap>')
[ "def", "in_importlib", "(", "frame", ")", ":", "return", "(", "frame", ".", "f_code", ".", "co_filename", "==", "'<frozen importlib._bootstrap>'", ")" ]
helper for checking if a filename is in importlib guts .
train
false
11,946
def IsImageEqual(testname, image_name): image1 = ('results/baseline/%s/%s/%s' % (options.conf, testname, image_name)) image2 = ('results/current/%s/%s/Run 1/%s' % (options.conf, testname, image_name)) return (Md5ForFile(image1) == Md5ForFile(image2))
[ "def", "IsImageEqual", "(", "testname", ",", "image_name", ")", ":", "image1", "=", "(", "'results/baseline/%s/%s/%s'", "%", "(", "options", ".", "conf", ",", "testname", ",", "image_name", ")", ")", "image2", "=", "(", "'results/current/%s/%s/Run 1/%s'", "%", "(", "options", ".", "conf", ",", "testname", ",", "image_name", ")", ")", "return", "(", "Md5ForFile", "(", "image1", ")", "==", "Md5ForFile", "(", "image2", ")", ")" ]
check if the given image is equal to the baseline image for this test .
train
false
11,947
def pull(cwd, opts=None, user=None, identity=None, repository=None): cmd = ['hg', 'pull'] if identity: cmd.extend(_ssh_flag(identity)) if opts: for opt in opts.split(): cmd.append(opt) if (repository is not None): cmd.append(repository) ret = __salt__['cmd.run_all'](cmd, cwd=cwd, runas=user, python_shell=False) if (ret['retcode'] != 0): raise CommandExecutionError('Hg command failed: {0}'.format(ret.get('stderr', ret['stdout']))) return ret['stdout']
[ "def", "pull", "(", "cwd", ",", "opts", "=", "None", ",", "user", "=", "None", ",", "identity", "=", "None", ",", "repository", "=", "None", ")", ":", "cmd", "=", "[", "'hg'", ",", "'pull'", "]", "if", "identity", ":", "cmd", ".", "extend", "(", "_ssh_flag", "(", "identity", ")", ")", "if", "opts", ":", "for", "opt", "in", "opts", ".", "split", "(", ")", ":", "cmd", ".", "append", "(", "opt", ")", "if", "(", "repository", "is", "not", "None", ")", ":", "cmd", ".", "append", "(", "repository", ")", "ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "cwd", "=", "cwd", ",", "runas", "=", "user", ",", "python_shell", "=", "False", ")", "if", "(", "ret", "[", "'retcode'", "]", "!=", "0", ")", ":", "raise", "CommandExecutionError", "(", "'Hg command failed: {0}'", ".", "format", "(", "ret", ".", "get", "(", "'stderr'", ",", "ret", "[", "'stdout'", "]", ")", ")", ")", "return", "ret", "[", "'stdout'", "]" ]
pull changes from the default remote repository .
train
true
11,949
def dmp_pexquo(f, g, u, K): (q, r) = dmp_pdiv(f, g, u, K) if dmp_zero_p(r, u): return q else: raise ExactQuotientFailed(f, g)
[ "def", "dmp_pexquo", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "(", "q", ",", "r", ")", "=", "dmp_pdiv", "(", "f", ",", "g", ",", "u", ",", "K", ")", "if", "dmp_zero_p", "(", "r", ",", "u", ")", ":", "return", "q", "else", ":", "raise", "ExactQuotientFailed", "(", "f", ",", "g", ")" ]
polynomial pseudo-quotient in k[x] .
train
false
11,950
def namenode_format(force=None): force_param = '' if force: force_param = '-force' return _hadoop_cmd('namenode', 'format', '-nonInteractive', force_param)
[ "def", "namenode_format", "(", "force", "=", "None", ")", ":", "force_param", "=", "''", "if", "force", ":", "force_param", "=", "'-force'", "return", "_hadoop_cmd", "(", "'namenode'", ",", "'format'", ",", "'-nonInteractive'", ",", "force_param", ")" ]
format a name node .
train
false
11,951
def _mod_repo_in_file(alias, repostr, filepath): with open(filepath) as fhandle: output = [] for line in fhandle: if (alias not in line): output.append(line) else: output.append((repostr + '\n')) with open(filepath, 'w') as fhandle: fhandle.writelines(output)
[ "def", "_mod_repo_in_file", "(", "alias", ",", "repostr", ",", "filepath", ")", ":", "with", "open", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "if", "(", "alias", "not", "in", "line", ")", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "(", "repostr", "+", "'\\n'", ")", ")", "with", "open", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
replace a repo entry in filepath with repostr .
train
false
11,953
def test_good_algo_option(script, tmpdir): expected = '--hash=sha512:9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043' result = script.pip('hash', '-a', 'sha512', _hello_file(tmpdir)) assert (expected in str(result))
[ "def", "test_good_algo_option", "(", "script", ",", "tmpdir", ")", ":", "expected", "=", "'--hash=sha512:9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043'", "result", "=", "script", ".", "pip", "(", "'hash'", ",", "'-a'", ",", "'sha512'", ",", "_hello_file", "(", "tmpdir", ")", ")", "assert", "(", "expected", "in", "str", "(", "result", ")", ")" ]
make sure the -a option works .
train
false
11,954
def next_setting(hass, entity_id=None): utc_next = next_setting_utc(hass, entity_id) return (dt_util.as_local(utc_next) if utc_next else None)
[ "def", "next_setting", "(", "hass", ",", "entity_id", "=", "None", ")", ":", "utc_next", "=", "next_setting_utc", "(", "hass", ",", "entity_id", ")", "return", "(", "dt_util", ".", "as_local", "(", "utc_next", ")", "if", "utc_next", "else", "None", ")" ]
local datetime object of the next sun setting .
train
false
11,956
def StripTypeInfo(rendered_data): if isinstance(rendered_data, (list, tuple)): return [StripTypeInfo(d) for d in rendered_data] elif isinstance(rendered_data, dict): if (('value' in rendered_data) and ('type' in rendered_data)): return StripTypeInfo(rendered_data['value']) else: result = {} for (k, v) in rendered_data.items(): result[k] = StripTypeInfo(v) return result else: return rendered_data
[ "def", "StripTypeInfo", "(", "rendered_data", ")", ":", "if", "isinstance", "(", "rendered_data", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "StripTypeInfo", "(", "d", ")", "for", "d", "in", "rendered_data", "]", "elif", "isinstance", "(", "rendered_data", ",", "dict", ")", ":", "if", "(", "(", "'value'", "in", "rendered_data", ")", "and", "(", "'type'", "in", "rendered_data", ")", ")", ":", "return", "StripTypeInfo", "(", "rendered_data", "[", "'value'", "]", ")", "else", ":", "result", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "rendered_data", ".", "items", "(", ")", ":", "result", "[", "k", "]", "=", "StripTypeInfo", "(", "v", ")", "return", "result", "else", ":", "return", "rendered_data" ]
strips type information from rendered data .
train
true
11,957
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
reset records that match ip or username .
train
false
11,958
def pad_dims(input, leftdims, rightdims): assert (input.ndim >= rightdims) if (input.ndim == (leftdims + rightdims)): return input img_shape = input.shape[(- rightdims):] non_pool_ndim = (input.ndim - rightdims) if (non_pool_ndim < leftdims): dummy_dims = tensor.as_tensor(([1] * (leftdims - non_pool_ndim))) new_shape = tensor.join(0, dummy_dims, input.shape[:non_pool_ndim], img_shape) else: batched_ndim = ((non_pool_ndim - leftdims) + 1) batch_size = tensor.prod(input.shape[:batched_ndim]) batch_size = tensor.shape_padright(batch_size, 1) new_shape = tensor.join(0, batch_size, input.shape[batched_ndim:non_pool_ndim], img_shape) new_shape = tensor.cast(new_shape, 'int64') input_ND = GpuReshape((leftdims + rightdims))(input, new_shape) return input_ND
[ "def", "pad_dims", "(", "input", ",", "leftdims", ",", "rightdims", ")", ":", "assert", "(", "input", ".", "ndim", ">=", "rightdims", ")", "if", "(", "input", ".", "ndim", "==", "(", "leftdims", "+", "rightdims", ")", ")", ":", "return", "input", "img_shape", "=", "input", ".", "shape", "[", "(", "-", "rightdims", ")", ":", "]", "non_pool_ndim", "=", "(", "input", ".", "ndim", "-", "rightdims", ")", "if", "(", "non_pool_ndim", "<", "leftdims", ")", ":", "dummy_dims", "=", "tensor", ".", "as_tensor", "(", "(", "[", "1", "]", "*", "(", "leftdims", "-", "non_pool_ndim", ")", ")", ")", "new_shape", "=", "tensor", ".", "join", "(", "0", ",", "dummy_dims", ",", "input", ".", "shape", "[", ":", "non_pool_ndim", "]", ",", "img_shape", ")", "else", ":", "batched_ndim", "=", "(", "(", "non_pool_ndim", "-", "leftdims", ")", "+", "1", ")", "batch_size", "=", "tensor", ".", "prod", "(", "input", ".", "shape", "[", ":", "batched_ndim", "]", ")", "batch_size", "=", "tensor", ".", "shape_padright", "(", "batch_size", ",", "1", ")", "new_shape", "=", "tensor", ".", "join", "(", "0", ",", "batch_size", ",", "input", ".", "shape", "[", "batched_ndim", ":", "non_pool_ndim", "]", ",", "img_shape", ")", "new_shape", "=", "tensor", ".", "cast", "(", "new_shape", ",", "'int64'", ")", "input_ND", "=", "GpuReshape", "(", "(", "leftdims", "+", "rightdims", ")", ")", "(", "input", ",", "new_shape", ")", "return", "input_ND" ]
reshapes the input to a tensor this helper function is used to convert pooling inputs with arbitrary non-pooling dimensions to the correct number of dimensions for the gpu pooling ops .
train
false
11,959
def at_server_reload_stop(): pass
[ "def", "at_server_reload_stop", "(", ")", ":", "pass" ]
this is called only time the server stops before a reload .
train
false
11,960
def add_organization(organization_data): if (not organizations_enabled()): return None from organizations import api as organizations_api return organizations_api.add_organization(organization_data=organization_data)
[ "def", "add_organization", "(", "organization_data", ")", ":", "if", "(", "not", "organizations_enabled", "(", ")", ")", ":", "return", "None", "from", "organizations", "import", "api", "as", "organizations_api", "return", "organizations_api", ".", "add_organization", "(", "organization_data", "=", "organization_data", ")" ]
client api operation adapter/wrapper .
train
false
11,961
def get_user_program_credentials(user): programs_credentials_data = [] credential_configuration = CredentialsApiConfig.current() if (not credential_configuration.is_learner_issuance_enabled): log.debug(u'Display of certificates for programs is disabled.') return programs_credentials_data credentials = get_user_credentials(user) if (not credentials): log.info(u'No credential earned by the given user.') return programs_credentials_data programs_credentials = [] for credential in credentials: try: if (u'program_uuid' in credential[u'credential']): programs_credentials.append(credential) except KeyError: log.exception(u'Invalid credential structure: %r', credential) if programs_credentials: programs_credentials_data = get_programs_for_credentials(user, programs_credentials) return programs_credentials_data
[ "def", "get_user_program_credentials", "(", "user", ")", ":", "programs_credentials_data", "=", "[", "]", "credential_configuration", "=", "CredentialsApiConfig", ".", "current", "(", ")", "if", "(", "not", "credential_configuration", ".", "is_learner_issuance_enabled", ")", ":", "log", ".", "debug", "(", "u'Display of certificates for programs is disabled.'", ")", "return", "programs_credentials_data", "credentials", "=", "get_user_credentials", "(", "user", ")", "if", "(", "not", "credentials", ")", ":", "log", ".", "info", "(", "u'No credential earned by the given user.'", ")", "return", "programs_credentials_data", "programs_credentials", "=", "[", "]", "for", "credential", "in", "credentials", ":", "try", ":", "if", "(", "u'program_uuid'", "in", "credential", "[", "u'credential'", "]", ")", ":", "programs_credentials", ".", "append", "(", "credential", ")", "except", "KeyError", ":", "log", ".", "exception", "(", "u'Invalid credential structure: %r'", ",", "credential", ")", "if", "programs_credentials", ":", "programs_credentials_data", "=", "get_programs_for_credentials", "(", "user", ",", "programs_credentials", ")", "return", "programs_credentials_data" ]
given a user .
train
false
11,962
def isReadable(poller): for fdmask in poller.poll(0): mask = fdmask[1] if (mask & POLLIN): return True
[ "def", "isReadable", "(", "poller", ")", ":", "for", "fdmask", "in", "poller", ".", "poll", "(", "0", ")", ":", "mask", "=", "fdmask", "[", "1", "]", "if", "(", "mask", "&", "POLLIN", ")", ":", "return", "True" ]
check whether a poll object has a readable fd .
train
false
11,963
def lag_plot(series, lag=1, ax=None, **kwds): import matplotlib.pyplot as plt kwds.setdefault('c', plt.rcParams['patch.facecolor']) data = series.values y1 = data[:(- lag)] y2 = data[lag:] if (ax is None): ax = plt.gca() ax.set_xlabel('y(t)') ax.set_ylabel(('y(t + %s)' % lag)) ax.scatter(y1, y2, **kwds) return ax
[ "def", "lag_plot", "(", "series", ",", "lag", "=", "1", ",", "ax", "=", "None", ",", "**", "kwds", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "kwds", ".", "setdefault", "(", "'c'", ",", "plt", ".", "rcParams", "[", "'patch.facecolor'", "]", ")", "data", "=", "series", ".", "values", "y1", "=", "data", "[", ":", "(", "-", "lag", ")", "]", "y2", "=", "data", "[", "lag", ":", "]", "if", "(", "ax", "is", "None", ")", ":", "ax", "=", "plt", ".", "gca", "(", ")", "ax", ".", "set_xlabel", "(", "'y(t)'", ")", "ax", ".", "set_ylabel", "(", "(", "'y(t + %s)'", "%", "lag", ")", ")", "ax", ".", "scatter", "(", "y1", ",", "y2", ",", "**", "kwds", ")", "return", "ax" ]
lag plot for time series .
train
true
11,964
def base64unpickle(value, unsafe=False): retVal = None def _(self): if (len(self.stack) > 1): func = self.stack[(-2)] if (func not in PICKLE_REDUCE_WHITELIST): raise Exception, 'abusing reduce() is bad, Mkay!' self.load_reduce() def loads(str): f = StringIO.StringIO(str) if unsafe: unpickler = picklePy.Unpickler(f) unpickler.dispatch[picklePy.REDUCE] = _ else: unpickler = pickle.Unpickler(f) return unpickler.load() try: retVal = loads(base64decode(value)) except TypeError: retVal = loads(base64decode(bytes(value))) return retVal
[ "def", "base64unpickle", "(", "value", ",", "unsafe", "=", "False", ")", ":", "retVal", "=", "None", "def", "_", "(", "self", ")", ":", "if", "(", "len", "(", "self", ".", "stack", ")", ">", "1", ")", ":", "func", "=", "self", ".", "stack", "[", "(", "-", "2", ")", "]", "if", "(", "func", "not", "in", "PICKLE_REDUCE_WHITELIST", ")", ":", "raise", "Exception", ",", "'abusing reduce() is bad, Mkay!'", "self", ".", "load_reduce", "(", ")", "def", "loads", "(", "str", ")", ":", "f", "=", "StringIO", ".", "StringIO", "(", "str", ")", "if", "unsafe", ":", "unpickler", "=", "picklePy", ".", "Unpickler", "(", "f", ")", "unpickler", ".", "dispatch", "[", "picklePy", ".", "REDUCE", "]", "=", "_", "else", ":", "unpickler", "=", "pickle", ".", "Unpickler", "(", "f", ")", "return", "unpickler", ".", "load", "(", ")", "try", ":", "retVal", "=", "loads", "(", "base64decode", "(", "value", ")", ")", "except", "TypeError", ":", "retVal", "=", "loads", "(", "base64decode", "(", "bytes", "(", "value", ")", ")", ")", "return", "retVal" ]
decodes value from base64 to plain format and deserializes its content .
train
false
11,965
def uses_deprecated(*messages): @decorator def decorate(fn, *args, **kw): with expect_deprecated(*messages): return fn(*args, **kw) return decorate
[ "def", "uses_deprecated", "(", "*", "messages", ")", ":", "@", "decorator", "def", "decorate", "(", "fn", ",", "*", "args", ",", "**", "kw", ")", ":", "with", "expect_deprecated", "(", "*", "messages", ")", ":", "return", "fn", "(", "*", "args", ",", "**", "kw", ")", "return", "decorate" ]
mark a test as immune from fatal deprecation warnings .
train
false
11,966
def shows_by_exact_name(normalized_name, session=None): return session.query(Series).filter((Series._name_normalized == normalized_name)).order_by(func.char_length(Series.name)).all()
[ "def", "shows_by_exact_name", "(", "normalized_name", ",", "session", "=", "None", ")", ":", "return", "session", ".", "query", "(", "Series", ")", ".", "filter", "(", "(", "Series", ".", "_name_normalized", "==", "normalized_name", ")", ")", ".", "order_by", "(", "func", ".", "char_length", "(", "Series", ".", "name", ")", ")", ".", "all", "(", ")" ]
returns all series matching normalized_name .
train
false
11,967
def _valueWithType(tag, tagValue): tagType = tag.get('type') if (tagType == 'int'): tagValue = int(tagValue) elif (tagType == 'float'): tagValue = float(tagValue) return tagValue
[ "def", "_valueWithType", "(", "tag", ",", "tagValue", ")", ":", "tagType", "=", "tag", ".", "get", "(", "'type'", ")", "if", "(", "tagType", "==", "'int'", ")", ":", "tagValue", "=", "int", "(", "tagValue", ")", "elif", "(", "tagType", "==", "'float'", ")", ":", "tagValue", "=", "float", "(", "tagValue", ")", "return", "tagValue" ]
return tagvalue .
train
false
11,968
def add_parser_help(p): p.add_argument(u'-h', u'--help', action=argparse._HelpAction, help=u'Show this help message and exit.')
[ "def", "add_parser_help", "(", "p", ")", ":", "p", ".", "add_argument", "(", "u'-h'", ",", "u'--help'", ",", "action", "=", "argparse", ".", "_HelpAction", ",", "help", "=", "u'Show this help message and exit.'", ")" ]
so we can use consistent capitalization and periods in the help .
train
false
11,969
def get_current_comp(info): comp = None first_comp = (-1) for (k, chan) in enumerate(info['chs']): if (chan['kind'] == FIFF.FIFFV_MEG_CH): comp = (int(chan['coil_type']) >> 16) if (first_comp < 0): first_comp = comp elif (comp != first_comp): raise ValueError('Compensation is not set equally on all MEG channels') return comp
[ "def", "get_current_comp", "(", "info", ")", ":", "comp", "=", "None", "first_comp", "=", "(", "-", "1", ")", "for", "(", "k", ",", "chan", ")", "in", "enumerate", "(", "info", "[", "'chs'", "]", ")", ":", "if", "(", "chan", "[", "'kind'", "]", "==", "FIFF", ".", "FIFFV_MEG_CH", ")", ":", "comp", "=", "(", "int", "(", "chan", "[", "'coil_type'", "]", ")", ">>", "16", ")", "if", "(", "first_comp", "<", "0", ")", ":", "first_comp", "=", "comp", "elif", "(", "comp", "!=", "first_comp", ")", ":", "raise", "ValueError", "(", "'Compensation is not set equally on all MEG channels'", ")", "return", "comp" ]
get the current compensation in effect in the data .
train
false
11,970
def _untracked_custom_unit_found(name): unit_path = os.path.join('/etc/systemd/system', _canonical_unit_name(name)) return (os.access(unit_path, os.R_OK) and (not _check_available(name)))
[ "def", "_untracked_custom_unit_found", "(", "name", ")", ":", "unit_path", "=", "os", ".", "path", ".", "join", "(", "'/etc/systemd/system'", ",", "_canonical_unit_name", "(", "name", ")", ")", "return", "(", "os", ".", "access", "(", "unit_path", ",", "os", ".", "R_OK", ")", "and", "(", "not", "_check_available", "(", "name", ")", ")", ")" ]
if the passed service name is not available .
train
false
11,971
def avg_and_total(iterable): items = 0 total = 0.0 for item in iterable: total += item items += 1 return ((total / items), total)
[ "def", "avg_and_total", "(", "iterable", ")", ":", "items", "=", "0", "total", "=", "0.0", "for", "item", "in", "iterable", ":", "total", "+=", "item", "items", "+=", "1", "return", "(", "(", "total", "/", "items", ")", ",", "total", ")" ]
compute the average over a numeric iterable .
train
false
11,972
def idz_sfrm(l, n, w, x): return _id.idz_sfrm(l, n, w, x)
[ "def", "idz_sfrm", "(", "l", ",", "n", ",", "w", ",", "x", ")", ":", "return", "_id", ".", "idz_sfrm", "(", "l", ",", "n", ",", "w", ",", "x", ")" ]
transform complex vector via a composition of rokhlins random transform .
train
false
11,973
def _api_get_scripts(name, output, kwargs): data = [unicoder(val) for val in list_scripts()] return report(output, keyword='scripts', data=data)
[ "def", "_api_get_scripts", "(", "name", ",", "output", ",", "kwargs", ")", ":", "data", "=", "[", "unicoder", "(", "val", ")", "for", "val", "in", "list_scripts", "(", ")", "]", "return", "report", "(", "output", ",", "keyword", "=", "'scripts'", ",", "data", "=", "data", ")" ]
api: accepts output .
train
false
11,974
def iadd(a, b): a += b return a
[ "def", "iadd", "(", "a", ",", "b", ")", ":", "a", "+=", "b", "return", "a" ]
same as a += b .
train
false
11,975
def _next_in_mro(cls): next_in_mro = object for (i, c) in enumerate(cls.__mro__[:(-1)]): if (isinstance(c, GenericMeta) and (_gorg(c) is Generic)): next_in_mro = cls.__mro__[(i + 1)] return next_in_mro
[ "def", "_next_in_mro", "(", "cls", ")", ":", "next_in_mro", "=", "object", "for", "(", "i", ",", "c", ")", "in", "enumerate", "(", "cls", ".", "__mro__", "[", ":", "(", "-", "1", ")", "]", ")", ":", "if", "(", "isinstance", "(", "c", ",", "GenericMeta", ")", "and", "(", "_gorg", "(", "c", ")", "is", "Generic", ")", ")", ":", "next_in_mro", "=", "cls", ".", "__mro__", "[", "(", "i", "+", "1", ")", "]", "return", "next_in_mro" ]
helper for generic .
train
true
11,976
def test_suggested_multiple_column_names_with_alias(completer, complete_event): text = u'SELECT p.id, p. from custom.products p' position = len(u'SELECT u.id, u.') result = set(completer.get_completions(Document(text=text, cursor_position=position), complete_event)) assert (set(result) == set(testdata.columns(u'products', u'custom')))
[ "def", "test_suggested_multiple_column_names_with_alias", "(", "completer", ",", "complete_event", ")", ":", "text", "=", "u'SELECT p.id, p. from custom.products p'", "position", "=", "len", "(", "u'SELECT u.id, u.'", ")", "result", "=", "set", "(", "completer", ".", "get_completions", "(", "Document", "(", "text", "=", "text", ",", "cursor_position", "=", "position", ")", ",", "complete_event", ")", ")", "assert", "(", "set", "(", "result", ")", "==", "set", "(", "testdata", ".", "columns", "(", "u'products'", ",", "u'custom'", ")", ")", ")" ]
suggest column names on table alias and dot when selecting multiple columns from table .
train
false
11,977
def get_weight(name, backend, socket='/var/run/haproxy.sock'): ha_conn = _get_conn(socket) ha_cmd = haproxy.cmds.getWeight(server=name, backend=backend) return ha_conn.sendCmd(ha_cmd)
[ "def", "get_weight", "(", "name", ",", "backend", ",", "socket", "=", "'/var/run/haproxy.sock'", ")", ":", "ha_conn", "=", "_get_conn", "(", "socket", ")", "ha_cmd", "=", "haproxy", ".", "cmds", ".", "getWeight", "(", "server", "=", "name", ",", "backend", "=", "backend", ")", "return", "ha_conn", ".", "sendCmd", "(", "ha_cmd", ")" ]
get server weight name server name backend haproxy backend socket haproxy stats socket cli example: .
train
true
11,978
def iterbytes(b): if isinstance(b, memoryview): b = b.tobytes() i = 0 while True: a = b[i:(i + 1)] i += 1 if a: (yield a) else: break
[ "def", "iterbytes", "(", "b", ")", ":", "if", "isinstance", "(", "b", ",", "memoryview", ")", ":", "b", "=", "b", ".", "tobytes", "(", ")", "i", "=", "0", "while", "True", ":", "a", "=", "b", "[", "i", ":", "(", "i", "+", "1", ")", "]", "i", "+=", "1", "if", "a", ":", "(", "yield", "a", ")", "else", ":", "break" ]
iterate over bytes .
train
false
11,979
def clean_gallery_out(build_dir): build_image_dir = os.path.join(build_dir, '_images') if os.path.exists(build_image_dir): filelist = os.listdir(build_image_dir) for filename in filelist: if (filename.startswith('sphx_glr') and filename.endswith('png')): os.remove(os.path.join(build_image_dir, filename))
[ "def", "clean_gallery_out", "(", "build_dir", ")", ":", "build_image_dir", "=", "os", ".", "path", ".", "join", "(", "build_dir", ",", "'_images'", ")", "if", "os", ".", "path", ".", "exists", "(", "build_image_dir", ")", ":", "filelist", "=", "os", ".", "listdir", "(", "build_image_dir", ")", "for", "filename", "in", "filelist", ":", "if", "(", "filename", ".", "startswith", "(", "'sphx_glr'", ")", "and", "filename", ".", "endswith", "(", "'png'", ")", ")", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "build_image_dir", ",", "filename", ")", ")" ]
deletes images under the sphx_glr namespace in the build directory .
train
true
11,980
def get_initializable_thread_fields(context): ret = get_editable_fields(Thread(user_id=context['cc_requester']['id'], type='thread'), context) ret |= NON_UPDATABLE_THREAD_FIELDS return ret
[ "def", "get_initializable_thread_fields", "(", "context", ")", ":", "ret", "=", "get_editable_fields", "(", "Thread", "(", "user_id", "=", "context", "[", "'cc_requester'", "]", "[", "'id'", "]", ",", "type", "=", "'thread'", ")", ",", "context", ")", "ret", "|=", "NON_UPDATABLE_THREAD_FIELDS", "return", "ret" ]
return the set of fields that the requester can initialize for a thread any field that is editable by the author should also be initializable .
train
false
11,981
def lookupMailRename(name, timeout=None): return getResolver().lookupMailRename(name, timeout)
[ "def", "lookupMailRename", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupMailRename", "(", "name", ",", "timeout", ")" ]
perform an mr record lookup .
train
false
11,982
def system_types(): ret = {} for line in __salt__['cmd.run']('sfdisk -T').splitlines(): if (not line): continue if line.startswith('Id'): continue comps = line.strip().split() ret[comps[0]] = comps[1] return ret
[ "def", "system_types", "(", ")", ":", "ret", "=", "{", "}", "for", "line", "in", "__salt__", "[", "'cmd.run'", "]", "(", "'sfdisk -T'", ")", ".", "splitlines", "(", ")", ":", "if", "(", "not", "line", ")", ":", "continue", "if", "line", ".", "startswith", "(", "'Id'", ")", ":", "continue", "comps", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "ret", "[", "comps", "[", "0", "]", "]", "=", "comps", "[", "1", "]", "return", "ret" ]
list the system types that are supported by the installed version of sfdisk cli example: .
train
true
11,983
def call_for_nodes(node, callback, recursive=False): result = callback(node) if (recursive and (not result)): for child in get_child_nodes(node): call_for_nodes(child, callback, recursive)
[ "def", "call_for_nodes", "(", "node", ",", "callback", ",", "recursive", "=", "False", ")", ":", "result", "=", "callback", "(", "node", ")", "if", "(", "recursive", "and", "(", "not", "result", ")", ")", ":", "for", "child", "in", "get_child_nodes", "(", "node", ")", ":", "call_for_nodes", "(", "child", ",", "callback", ",", "recursive", ")" ]
if callback returns true the child nodes are skipped .
train
true
11,984
def uniqueDLLNames(): createAssembly('', 'ZERO', 1, default_filename='0') createAssembly('', 'ONE', 1, default_filename='1') createAssembly('', 'a', 1, default_filename='a') createAssembly('', 'UNDERSCORE', 1, default_filename='_') createAssembly('', 'WHITESPACE', 1, default_filename='a A') temp = '' for i in xrange(0, 15): temp = (temp + 'aaaaaaaaaa') createAssembly('', 'BIGFILENAME', 1, default_filename=temp)
[ "def", "uniqueDLLNames", "(", ")", ":", "createAssembly", "(", "''", ",", "'ZERO'", ",", "1", ",", "default_filename", "=", "'0'", ")", "createAssembly", "(", "''", ",", "'ONE'", ",", "1", ",", "default_filename", "=", "'1'", ")", "createAssembly", "(", "''", ",", "'a'", ",", "1", ",", "default_filename", "=", "'a'", ")", "createAssembly", "(", "''", ",", "'UNDERSCORE'", ",", "1", ",", "default_filename", "=", "'_'", ")", "createAssembly", "(", "''", ",", "'WHITESPACE'", ",", "1", ",", "default_filename", "=", "'a A'", ")", "temp", "=", "''", "for", "i", "in", "xrange", "(", "0", ",", "15", ")", ":", "temp", "=", "(", "temp", "+", "'aaaaaaaaaa'", ")", "createAssembly", "(", "''", ",", "'BIGFILENAME'", ",", "1", ",", "default_filename", "=", "temp", ")" ]
creates eccentric dll names to ensure ip still loads them .
train
false
11,987
def get_wsgi_application(): return WSGIHandler()
[ "def", "get_wsgi_application", "(", ")", ":", "return", "WSGIHandler", "(", ")" ]
the public interface to djangos wsgi support .
train
false
11,988
def lchown(path, user, group=None, pgroup=None): if group: func_name = '{0}.lchown'.format(__virtualname__) if (__opts__.get('fun', '') == func_name): log.info('The group parameter has no effect when using {0} on Windows systems; see function docs for details.'.format(func_name)) log.debug('win_file.py {0} Ignoring the group parameter for {1}'.format(func_name, path)) group = None return chown(path, user, group, pgroup, follow_symlinks=False)
[ "def", "lchown", "(", "path", ",", "user", ",", "group", "=", "None", ",", "pgroup", "=", "None", ")", ":", "if", "group", ":", "func_name", "=", "'{0}.lchown'", ".", "format", "(", "__virtualname__", ")", "if", "(", "__opts__", ".", "get", "(", "'fun'", ",", "''", ")", "==", "func_name", ")", ":", "log", ".", "info", "(", "'The group parameter has no effect when using {0} on Windows systems; see function docs for details.'", ".", "format", "(", "func_name", ")", ")", "log", ".", "debug", "(", "'win_file.py {0} Ignoring the group parameter for {1}'", ".", "format", "(", "func_name", ",", "path", ")", ")", "group", "=", "None", "return", "chown", "(", "path", ",", "user", ",", "group", ",", "pgroup", ",", "follow_symlinks", "=", "False", ")" ]
chown a file .
train
true
11,989
def fontifyPythonNode(node): oldio = cStringIO.StringIO() latex.getLatexText(node, oldio.write, entities={'lt': '<', 'gt': '>', 'amp': '&'}) oldio = cStringIO.StringIO((oldio.getvalue().strip() + '\n')) howManyLines = len(oldio.getvalue().splitlines()) newio = cStringIO.StringIO() htmlizer.filter(oldio, newio, writer=htmlizer.SmallerHTMLWriter) lineLabels = _makeLineNumbers(howManyLines) newel = dom.parseString(newio.getvalue()).documentElement newel.setAttribute('class', 'python') node.parentNode.replaceChild(newel, node) newel.insertBefore(lineLabels, newel.firstChild)
[ "def", "fontifyPythonNode", "(", "node", ")", ":", "oldio", "=", "cStringIO", ".", "StringIO", "(", ")", "latex", ".", "getLatexText", "(", "node", ",", "oldio", ".", "write", ",", "entities", "=", "{", "'lt'", ":", "'<'", ",", "'gt'", ":", "'>'", ",", "'amp'", ":", "'&'", "}", ")", "oldio", "=", "cStringIO", ".", "StringIO", "(", "(", "oldio", ".", "getvalue", "(", ")", ".", "strip", "(", ")", "+", "'\\n'", ")", ")", "howManyLines", "=", "len", "(", "oldio", ".", "getvalue", "(", ")", ".", "splitlines", "(", ")", ")", "newio", "=", "cStringIO", ".", "StringIO", "(", ")", "htmlizer", ".", "filter", "(", "oldio", ",", "newio", ",", "writer", "=", "htmlizer", ".", "SmallerHTMLWriter", ")", "lineLabels", "=", "_makeLineNumbers", "(", "howManyLines", ")", "newel", "=", "dom", ".", "parseString", "(", "newio", ".", "getvalue", "(", ")", ")", ".", "documentElement", "newel", ".", "setAttribute", "(", "'class'", ",", "'python'", ")", "node", ".", "parentNode", ".", "replaceChild", "(", "newel", ",", "node", ")", "newel", ".", "insertBefore", "(", "lineLabels", ",", "newel", ".", "firstChild", ")" ]
syntax color the given node containing python source code .
train
false
11,990
def patch_user_admin(): if (not getattr(UserAdmin, '_monkeyed', False)): UserAdmin._monkeyed = True UserAdmin.actions = [_activate_users, _deactivate_users]
[ "def", "patch_user_admin", "(", ")", ":", "if", "(", "not", "getattr", "(", "UserAdmin", ",", "'_monkeyed'", ",", "False", ")", ")", ":", "UserAdmin", ".", "_monkeyed", "=", "True", "UserAdmin", ".", "actions", "=", "[", "_activate_users", ",", "_deactivate_users", "]" ]
prevent user objects from being deleted .
train
false
11,991
def ctcpExtract(message): extended_messages = [] normal_messages = [] retval = {'extended': extended_messages, 'normal': normal_messages} messages = string.split(message, X_DELIM) odd = 0 while messages: if odd: extended_messages.append(messages.pop(0)) else: normal_messages.append(messages.pop(0)) odd = (not odd) extended_messages[:] = filter(None, extended_messages) normal_messages[:] = filter(None, normal_messages) extended_messages[:] = map(ctcpDequote, extended_messages) for i in xrange(len(extended_messages)): m = string.split(extended_messages[i], SPC, 1) tag = m[0] if (len(m) > 1): data = m[1] else: data = None extended_messages[i] = (tag, data) return retval
[ "def", "ctcpExtract", "(", "message", ")", ":", "extended_messages", "=", "[", "]", "normal_messages", "=", "[", "]", "retval", "=", "{", "'extended'", ":", "extended_messages", ",", "'normal'", ":", "normal_messages", "}", "messages", "=", "string", ".", "split", "(", "message", ",", "X_DELIM", ")", "odd", "=", "0", "while", "messages", ":", "if", "odd", ":", "extended_messages", ".", "append", "(", "messages", ".", "pop", "(", "0", ")", ")", "else", ":", "normal_messages", ".", "append", "(", "messages", ".", "pop", "(", "0", ")", ")", "odd", "=", "(", "not", "odd", ")", "extended_messages", "[", ":", "]", "=", "filter", "(", "None", ",", "extended_messages", ")", "normal_messages", "[", ":", "]", "=", "filter", "(", "None", ",", "normal_messages", ")", "extended_messages", "[", ":", "]", "=", "map", "(", "ctcpDequote", ",", "extended_messages", ")", "for", "i", "in", "xrange", "(", "len", "(", "extended_messages", ")", ")", ":", "m", "=", "string", ".", "split", "(", "extended_messages", "[", "i", "]", ",", "SPC", ",", "1", ")", "tag", "=", "m", "[", "0", "]", "if", "(", "len", "(", "m", ")", ">", "1", ")", ":", "data", "=", "m", "[", "1", "]", "else", ":", "data", "=", "None", "extended_messages", "[", "i", "]", "=", "(", "tag", ",", "data", ")", "return", "retval" ]
extract ctcp data from a string .
train
false
11,992
def Char(c): if (len(c) == 1): result = CodeRange(ord(c), (ord(c) + 1)) else: result = SpecialSymbol(c) result.str = ('Char(%s)' % repr(c)) return result
[ "def", "Char", "(", "c", ")", ":", "if", "(", "len", "(", "c", ")", "==", "1", ")", ":", "result", "=", "CodeRange", "(", "ord", "(", "c", ")", ",", "(", "ord", "(", "c", ")", "+", "1", ")", ")", "else", ":", "result", "=", "SpecialSymbol", "(", "c", ")", "result", ".", "str", "=", "(", "'Char(%s)'", "%", "repr", "(", "c", ")", ")", "return", "result" ]
char(c) is an re which matches the character |c| .
train
false
11,994
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
init an enforcer class .
train
false
11,997
@_ConfigurableFilter(executable='HTML_TIDY_EXECUTABLE') def html_tidy_withconfig(infile, executable='tidy5'): return _html_tidy_runner(infile, '-quiet --show-info no --show-warnings no -utf8 -indent -config tidy5.conf -modify %1', executable=executable)
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'HTML_TIDY_EXECUTABLE'", ")", "def", "html_tidy_withconfig", "(", "infile", ",", "executable", "=", "'tidy5'", ")", ":", "return", "_html_tidy_runner", "(", "infile", ",", "'-quiet --show-info no --show-warnings no -utf8 -indent -config tidy5.conf -modify %1'", ",", "executable", "=", "executable", ")" ]
run html tidy with tidy5 .
train
false
11,998
def datetime_from_string(s): return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')
[ "def", "datetime_from_string", "(", "s", ")", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "s", ",", "'%Y-%m-%dT%H:%M:%S.%f'", ")" ]
return a standard datetime .
train
false
12,000
def builtin_format_code(index): return BUILTIN_FORMATS[index]
[ "def", "builtin_format_code", "(", "index", ")", ":", "return", "BUILTIN_FORMATS", "[", "index", "]" ]
return one of the standard format codes by index .
train
false
12,001
def _is_spark_step_type(step_type): return (step_type.split('_')[0] == 'spark')
[ "def", "_is_spark_step_type", "(", "step_type", ")", ":", "return", "(", "step_type", ".", "split", "(", "'_'", ")", "[", "0", "]", "==", "'spark'", ")" ]
does the given step type indicate that it uses spark? .
train
false
12,002
def _string_to_bool(s): if (not is_string_like(s)): return bool(s) if (s.lower() in [u'on', u'true']): return True if (s.lower() in [u'off', u'false']): return False raise ValueError((u'String "%s" must be one of: "on", "off", "true", or "false"' % s))
[ "def", "_string_to_bool", "(", "s", ")", ":", "if", "(", "not", "is_string_like", "(", "s", ")", ")", ":", "return", "bool", "(", "s", ")", "if", "(", "s", ".", "lower", "(", ")", "in", "[", "u'on'", ",", "u'true'", "]", ")", ":", "return", "True", "if", "(", "s", ".", "lower", "(", ")", "in", "[", "u'off'", ",", "u'false'", "]", ")", ":", "return", "False", "raise", "ValueError", "(", "(", "u'String \"%s\" must be one of: \"on\", \"off\", \"true\", or \"false\"'", "%", "s", ")", ")" ]
parses the string argument as a boolean .
train
false
12,004
def waypoint_upload(): return dict()
[ "def", "waypoint_upload", "(", ")", ":", "return", "dict", "(", ")" ]
custom view temporary: likely to be refactored into the main waypoint controller .
train
false
12,005
def decistmt(s): result = [] g = generate_tokens(StringIO(s).readline) for (toknum, tokval, _, _, _) in g: if ((toknum == NUMBER) and ('.' in tokval)): result.extend([(NAME, 'Decimal'), (OP, '('), (STRING, repr(tokval)), (OP, ')')]) else: result.append((toknum, tokval)) return untokenize(result)
[ "def", "decistmt", "(", "s", ")", ":", "result", "=", "[", "]", "g", "=", "generate_tokens", "(", "StringIO", "(", "s", ")", ".", "readline", ")", "for", "(", "toknum", ",", "tokval", ",", "_", ",", "_", ",", "_", ")", "in", "g", ":", "if", "(", "(", "toknum", "==", "NUMBER", ")", "and", "(", "'.'", "in", "tokval", ")", ")", ":", "result", ".", "extend", "(", "[", "(", "NAME", ",", "'Decimal'", ")", ",", "(", "OP", ",", "'('", ")", ",", "(", "STRING", ",", "repr", "(", "tokval", ")", ")", ",", "(", "OP", ",", "')'", ")", "]", ")", "else", ":", "result", ".", "append", "(", "(", "toknum", ",", "tokval", ")", ")", "return", "untokenize", "(", "result", ")" ]
substitute decimals for floats in a string of statements .
train
false
12,006
def blockingCallFromThread(reactor, f, *a, **kw): queue = Queue.Queue() def _callFromThread(): result = defer.maybeDeferred(f, *a, **kw) result.addBoth(queue.put) reactor.callFromThread(_callFromThread) result = queue.get() if isinstance(result, failure.Failure): result.raiseException() return result
[ "def", "blockingCallFromThread", "(", "reactor", ",", "f", ",", "*", "a", ",", "**", "kw", ")", ":", "queue", "=", "Queue", ".", "Queue", "(", ")", "def", "_callFromThread", "(", ")", ":", "result", "=", "defer", ".", "maybeDeferred", "(", "f", ",", "*", "a", ",", "**", "kw", ")", "result", ".", "addBoth", "(", "queue", ".", "put", ")", "reactor", ".", "callFromThread", "(", "_callFromThread", ")", "result", "=", "queue", ".", "get", "(", ")", "if", "isinstance", "(", "result", ",", "failure", ".", "Failure", ")", ":", "result", ".", "raiseException", "(", ")", "return", "result" ]
run a function in the reactor from a thread .
train
false
12,007
def determine_service_endpoints(options): result = {} default_host_list = [name for name in options.get('service_hosts', '').split(',') if name] all_services = SpectatorClient.DEFAULT_SERVICE_PORT_MAP.keys() for service in all_services: endpoints = _collect_endpoints(service, options, default_host_list) if endpoints: result[service] = endpoints return result
[ "def", "determine_service_endpoints", "(", "options", ")", ":", "result", "=", "{", "}", "default_host_list", "=", "[", "name", "for", "name", "in", "options", ".", "get", "(", "'service_hosts'", ",", "''", ")", ".", "split", "(", "','", ")", "if", "name", "]", "all_services", "=", "SpectatorClient", ".", "DEFAULT_SERVICE_PORT_MAP", ".", "keys", "(", ")", "for", "service", "in", "all_services", ":", "endpoints", "=", "_collect_endpoints", "(", "service", ",", "options", ",", "default_host_list", ")", "if", "endpoints", ":", "result", "[", "service", "]", "=", "endpoints", "return", "result" ]
determine the list of spectator endpoints to poll .
train
false
12,009
def combine_sample_dicts(sample_dicts): all_otu_ids = [] for s in sample_dicts: all_otu_ids.extend(s.keys()) all_otu_ids = list(set(all_otu_ids)) all_otu_ids = natsort(all_otu_ids) indices = {} for i in range(len(all_otu_ids)): indices[all_otu_ids[i]] = i otu_mtx = zeros((len(all_otu_ids), len(sample_dicts)), int) for (i, sample_dict) in enumerate(sample_dicts): for (otu, abund) in sample_dict.items(): otu_mtx[(indices[otu], i)] = abund return (otu_mtx, all_otu_ids)
[ "def", "combine_sample_dicts", "(", "sample_dicts", ")", ":", "all_otu_ids", "=", "[", "]", "for", "s", "in", "sample_dicts", ":", "all_otu_ids", ".", "extend", "(", "s", ".", "keys", "(", ")", ")", "all_otu_ids", "=", "list", "(", "set", "(", "all_otu_ids", ")", ")", "all_otu_ids", "=", "natsort", "(", "all_otu_ids", ")", "indices", "=", "{", "}", "for", "i", "in", "range", "(", "len", "(", "all_otu_ids", ")", ")", ":", "indices", "[", "all_otu_ids", "[", "i", "]", "]", "=", "i", "otu_mtx", "=", "zeros", "(", "(", "len", "(", "all_otu_ids", ")", ",", "len", "(", "sample_dicts", ")", ")", ",", "int", ")", "for", "(", "i", ",", "sample_dict", ")", "in", "enumerate", "(", "sample_dicts", ")", ":", "for", "(", "otu", ",", "abund", ")", "in", "sample_dict", ".", "items", "(", ")", ":", "otu_mtx", "[", "(", "indices", "[", "otu", "]", ",", "i", ")", "]", "=", "abund", "return", "(", "otu_mtx", ",", "all_otu_ids", ")" ]
combines a list of sample_dicts into one otu table sample dicts is a list of dicts .
train
false
12,010
def _lowess_update_nn(x, cur_nn, i): while True: if (cur_nn[1] < x.size): left_dist = (x[i] - x[cur_nn[0]]) new_right_dist = (x[cur_nn[1]] - x[i]) if (new_right_dist < left_dist): cur_nn[0] = (cur_nn[0] + 1) cur_nn[1] = (cur_nn[1] + 1) else: break else: break
[ "def", "_lowess_update_nn", "(", "x", ",", "cur_nn", ",", "i", ")", ":", "while", "True", ":", "if", "(", "cur_nn", "[", "1", "]", "<", "x", ".", "size", ")", ":", "left_dist", "=", "(", "x", "[", "i", "]", "-", "x", "[", "cur_nn", "[", "0", "]", "]", ")", "new_right_dist", "=", "(", "x", "[", "cur_nn", "[", "1", "]", "]", "-", "x", "[", "i", "]", ")", "if", "(", "new_right_dist", "<", "left_dist", ")", ":", "cur_nn", "[", "0", "]", "=", "(", "cur_nn", "[", "0", "]", "+", "1", ")", "cur_nn", "[", "1", "]", "=", "(", "cur_nn", "[", "1", "]", "+", "1", ")", "else", ":", "break", "else", ":", "break" ]
update the endpoints of the nearest neighbors to the ith point .
train
false
12,012
def create_pairtree_marker(folder): if (not (folder[:(-1)] == '/')): folder = (folder + '/') directory = os.path.dirname(folder) if (not os.path.exists(directory)): os.makedirs(directory) target = os.path.join(directory, 'pairtree_version0_1') if os.path.exists(target): return open(target, 'wb').close()
[ "def", "create_pairtree_marker", "(", "folder", ")", ":", "if", "(", "not", "(", "folder", "[", ":", "(", "-", "1", ")", "]", "==", "'/'", ")", ")", ":", "folder", "=", "(", "folder", "+", "'/'", ")", "directory", "=", "os", ".", "path", ".", "dirname", "(", "folder", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "directory", ")", ")", ":", "os", ".", "makedirs", "(", "directory", ")", "target", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "'pairtree_version0_1'", ")", "if", "os", ".", "path", ".", "exists", "(", "target", ")", ":", "return", "open", "(", "target", ",", "'wb'", ")", ".", "close", "(", ")" ]
creates the pairtree marker for tests if it doesnt exist .
train
false
12,013
def hash_password(password): return hash_password_PBKDF2(password)
[ "def", "hash_password", "(", "password", ")", ":", "return", "hash_password_PBKDF2", "(", "password", ")" ]
create a password hash from a given string .
train
false
12,015
def get_download_filename(url, cookie_file=None): from calibre import browser from contextlib import closing filename = '' br = browser() if cookie_file: from mechanize import MozillaCookieJar cj = MozillaCookieJar() cj.load(cookie_file) br.set_cookiejar(cj) try: with closing(br.open(url)) as r: filename = get_download_filename_from_response(r) except: import traceback traceback.print_exc() return filename
[ "def", "get_download_filename", "(", "url", ",", "cookie_file", "=", "None", ")", ":", "from", "calibre", "import", "browser", "from", "contextlib", "import", "closing", "filename", "=", "''", "br", "=", "browser", "(", ")", "if", "cookie_file", ":", "from", "mechanize", "import", "MozillaCookieJar", "cj", "=", "MozillaCookieJar", "(", ")", "cj", ".", "load", "(", "cookie_file", ")", "br", ".", "set_cookiejar", "(", "cj", ")", "try", ":", "with", "closing", "(", "br", ".", "open", "(", "url", ")", ")", "as", "r", ":", "filename", "=", "get_download_filename_from_response", "(", "r", ")", "except", ":", "import", "traceback", "traceback", ".", "print_exc", "(", ")", "return", "filename" ]
get a local filename for a url using the content disposition header returns empty string if an error occurs .
train
false
12,018
def register_cluster_groups(conf): cluster_names = [] cluster_tags = _retrieve_extra_groups(conf, 'CLUSTER') for tag in cluster_tags: cluster_name = tag.split(':')[1] conf.register_opts(cluster_opts, tag, cluster_name) cluster_names.append(cluster_name) return cluster_names
[ "def", "register_cluster_groups", "(", "conf", ")", ":", "cluster_names", "=", "[", "]", "cluster_tags", "=", "_retrieve_extra_groups", "(", "conf", ",", "'CLUSTER'", ")", "for", "tag", "in", "cluster_tags", ":", "cluster_name", "=", "tag", ".", "split", "(", "':'", ")", "[", "1", "]", "conf", ".", "register_opts", "(", "cluster_opts", ",", "tag", ",", "cluster_name", ")", "cluster_names", ".", "append", "(", "cluster_name", ")", "return", "cluster_names" ]
retrieve configuration groups for nvp clusters .
train
false
12,020
def UnavailableForLegalReasons(message=None): if message: return _UnavailableForLegalReasons(message) elif ctx.get('app_stack'): return ctx.app_stack[(-1)].unavailableforlegalreasons() else: return _UnavailableForLegalReasons()
[ "def", "UnavailableForLegalReasons", "(", "message", "=", "None", ")", ":", "if", "message", ":", "return", "_UnavailableForLegalReasons", "(", "message", ")", "elif", "ctx", ".", "get", "(", "'app_stack'", ")", ":", "return", "ctx", ".", "app_stack", "[", "(", "-", "1", ")", "]", ".", "unavailableforlegalreasons", "(", ")", "else", ":", "return", "_UnavailableForLegalReasons", "(", ")" ]
returns httperror with 415 unavailable for legal reasons error from the active application .
train
false
12,021
def get_file_dependencies(f, stdout=sys.stdout, stderr=sys.stderr, rospack=None): package = rospkg.get_package_name(f) spec = None if f.endswith(roslib.msgs.EXT): (_, spec) = roslib.msgs.load_from_file(f) elif f.endswith(roslib.srvs.EXT): (_, spec) = roslib.srvs.load_from_file(f) else: raise Exception(('[%s] does not appear to be a message or service' % spec)) return get_dependencies(spec, package, stdout, stderr, rospack=rospack)
[ "def", "get_file_dependencies", "(", "f", ",", "stdout", "=", "sys", ".", "stdout", ",", "stderr", "=", "sys", ".", "stderr", ",", "rospack", "=", "None", ")", ":", "package", "=", "rospkg", ".", "get_package_name", "(", "f", ")", "spec", "=", "None", "if", "f", ".", "endswith", "(", "roslib", ".", "msgs", ".", "EXT", ")", ":", "(", "_", ",", "spec", ")", "=", "roslib", ".", "msgs", ".", "load_from_file", "(", "f", ")", "elif", "f", ".", "endswith", "(", "roslib", ".", "srvs", ".", "EXT", ")", ":", "(", "_", ",", "spec", ")", "=", "roslib", ".", "srvs", ".", "load_from_file", "(", "f", ")", "else", ":", "raise", "Exception", "(", "(", "'[%s] does not appear to be a message or service'", "%", "spec", ")", ")", "return", "get_dependencies", "(", "spec", ",", "package", ",", "stdout", ",", "stderr", ",", "rospack", "=", "rospack", ")" ]
compute dependencies of the specified message/service file .
train
false
12,022
def addsep(path): if (path and (path[(-1)] != os.path.sep)): return (path + os.path.sep) return path
[ "def", "addsep", "(", "path", ")", ":", "if", "(", "path", "and", "(", "path", "[", "(", "-", "1", ")", "]", "!=", "os", ".", "path", ".", "sep", ")", ")", ":", "return", "(", "path", "+", "os", ".", "path", ".", "sep", ")", "return", "path" ]
add a trailing path separator .
train
false
12,023
def getInteriorOverhangAngle(elementNode): return getCascadeFloatWithoutSelf(30.0, elementNode, 'interiorOverhangAngle')
[ "def", "getInteriorOverhangAngle", "(", "elementNode", ")", ":", "return", "getCascadeFloatWithoutSelf", "(", "30.0", ",", "elementNode", ",", "'interiorOverhangAngle'", ")" ]
get the interior overhang support angle in degrees .
train
false
12,024
def pportAutoFeed(state): global ctrlReg if (state == 0): ctrlReg = (ctrlReg | 2) else: ctrlReg = (ctrlReg & (~ 2)) port.DlPortWritePortUchar(ctrlRegAdrs, ctrlReg)
[ "def", "pportAutoFeed", "(", "state", ")", ":", "global", "ctrlReg", "if", "(", "state", "==", "0", ")", ":", "ctrlReg", "=", "(", "ctrlReg", "|", "2", ")", "else", ":", "ctrlReg", "=", "(", "ctrlReg", "&", "(", "~", "2", ")", ")", "port", ".", "DlPortWritePortUchar", "(", "ctrlRegAdrs", ",", "ctrlReg", ")" ]
toggle control register auto feed bit .
train
false
12,026
def _instance_group_id(context, group_uuid): result = model_query(context, models.InstanceGroup, (models.InstanceGroup.id,)).filter_by(uuid=group_uuid).first() if (not result): raise exception.InstanceGroupNotFound(group_uuid=group_uuid) return result.id
[ "def", "_instance_group_id", "(", "context", ",", "group_uuid", ")", ":", "result", "=", "model_query", "(", "context", ",", "models", ".", "InstanceGroup", ",", "(", "models", ".", "InstanceGroup", ".", "id", ",", ")", ")", ".", "filter_by", "(", "uuid", "=", "group_uuid", ")", ".", "first", "(", ")", "if", "(", "not", "result", ")", ":", "raise", "exception", ".", "InstanceGroupNotFound", "(", "group_uuid", "=", "group_uuid", ")", "return", "result", ".", "id" ]
returns the group database id for the group uuid .
train
false
12,028
def _index_as_time(index, sfreq, first_samp=0, use_first_samp=False): times = (np.atleast_1d(index) + (first_samp if use_first_samp else 0)) return (times / sfreq)
[ "def", "_index_as_time", "(", "index", ",", "sfreq", ",", "first_samp", "=", "0", ",", "use_first_samp", "=", "False", ")", ":", "times", "=", "(", "np", ".", "atleast_1d", "(", "index", ")", "+", "(", "first_samp", "if", "use_first_samp", "else", "0", ")", ")", "return", "(", "times", "/", "sfreq", ")" ]
convert indices to time .
train
false
12,029
def libvlc_media_list_player_retain(p_mlp): f = (_Cfunctions.get('libvlc_media_list_player_retain', None) or _Cfunction('libvlc_media_list_player_retain', ((1,),), None, None, MediaListPlayer)) return f(p_mlp)
[ "def", "libvlc_media_list_player_retain", "(", "p_mlp", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_list_player_retain'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_list_player_retain'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaListPlayer", ")", ")", "return", "f", "(", "p_mlp", ")" ]
retain a reference to a media player list object .
train
false
12,030
def split_mantissa_exponent(v): x = u'{0:.8g}'.format(v).split(u'e') if (x[0] != (u'1.' + (u'0' * (len(x[0]) - 2)))): m = x[0] else: m = u'' if (len(x) == 2): ex = x[1].lstrip(u'0+') if ((len(ex) > 0) and (ex[0] == u'-')): ex = (u'-' + ex[1:].lstrip(u'0')) else: ex = u'' return (m, ex)
[ "def", "split_mantissa_exponent", "(", "v", ")", ":", "x", "=", "u'{0:.8g}'", ".", "format", "(", "v", ")", ".", "split", "(", "u'e'", ")", "if", "(", "x", "[", "0", "]", "!=", "(", "u'1.'", "+", "(", "u'0'", "*", "(", "len", "(", "x", "[", "0", "]", ")", "-", "2", ")", ")", ")", ")", ":", "m", "=", "x", "[", "0", "]", "else", ":", "m", "=", "u''", "if", "(", "len", "(", "x", ")", "==", "2", ")", ":", "ex", "=", "x", "[", "1", "]", ".", "lstrip", "(", "u'0+'", ")", "if", "(", "(", "len", "(", "ex", ")", ">", "0", ")", "and", "(", "ex", "[", "0", "]", "==", "u'-'", ")", ")", ":", "ex", "=", "(", "u'-'", "+", "ex", "[", "1", ":", "]", ".", "lstrip", "(", "u'0'", ")", ")", "else", ":", "ex", "=", "u''", "return", "(", "m", ",", "ex", ")" ]
given a number .
train
false
12,031
def _find_allocated_slots(devices): taken = {} for device in devices: if (hasattr(device, 'controllerKey') and hasattr(device, 'unitNumber')): unit_numbers = taken.setdefault(device.controllerKey, []) unit_numbers.append(device.unitNumber) if _is_scsi_controller(device): unit_numbers = taken.setdefault(device.key, []) unit_numbers.append(device.scsiCtlrUnitNumber) return taken
[ "def", "_find_allocated_slots", "(", "devices", ")", ":", "taken", "=", "{", "}", "for", "device", "in", "devices", ":", "if", "(", "hasattr", "(", "device", ",", "'controllerKey'", ")", "and", "hasattr", "(", "device", ",", "'unitNumber'", ")", ")", ":", "unit_numbers", "=", "taken", ".", "setdefault", "(", "device", ".", "controllerKey", ",", "[", "]", ")", "unit_numbers", ".", "append", "(", "device", ".", "unitNumber", ")", "if", "_is_scsi_controller", "(", "device", ")", ":", "unit_numbers", "=", "taken", ".", "setdefault", "(", "device", ".", "key", ",", "[", "]", ")", "unit_numbers", ".", "append", "(", "device", ".", "scsiCtlrUnitNumber", ")", "return", "taken" ]
return dictionary which maps controller_key to list of allocated unit numbers for that controller_key .
train
false
12,032
def find_guest_agent(base_dir): if CONF.xenserver.disable_agent: return False agent_rel_path = CONF.xenserver.agent_path agent_path = os.path.join(base_dir, agent_rel_path) if os.path.isfile(agent_path): LOG.info(_LI('XenServer tools installed in this image are capable of network injection. Networking files will not bemanipulated')) return True xe_daemon_filename = os.path.join(base_dir, 'usr', 'sbin', 'xe-daemon') if os.path.isfile(xe_daemon_filename): LOG.info(_LI('XenServer tools are present in this image but are not capable of network injection')) else: LOG.info(_LI('XenServer tools are not installed in this image')) return False
[ "def", "find_guest_agent", "(", "base_dir", ")", ":", "if", "CONF", ".", "xenserver", ".", "disable_agent", ":", "return", "False", "agent_rel_path", "=", "CONF", ".", "xenserver", ".", "agent_path", "agent_path", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "agent_rel_path", ")", "if", "os", ".", "path", ".", "isfile", "(", "agent_path", ")", ":", "LOG", ".", "info", "(", "_LI", "(", "'XenServer tools installed in this image are capable of network injection. Networking files will not bemanipulated'", ")", ")", "return", "True", "xe_daemon_filename", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "'usr'", ",", "'sbin'", ",", "'xe-daemon'", ")", "if", "os", ".", "path", ".", "isfile", "(", "xe_daemon_filename", ")", ":", "LOG", ".", "info", "(", "_LI", "(", "'XenServer tools are present in this image but are not capable of network injection'", ")", ")", "else", ":", "LOG", ".", "info", "(", "_LI", "(", "'XenServer tools are not installed in this image'", ")", ")", "return", "False" ]
tries to locate a guest agent at the path specificed by agent_rel_path .
train
false
12,034
def parseXMLKey(s, private=False, public=False, implementations=['python']): for implementation in implementations: if (implementation == 'python'): key = Python_RSAKey.parseXML(s) break else: raise ValueError('No acceptable implementations') return _parseKeyHelper(key, private, public)
[ "def", "parseXMLKey", "(", "s", ",", "private", "=", "False", ",", "public", "=", "False", ",", "implementations", "=", "[", "'python'", "]", ")", ":", "for", "implementation", "in", "implementations", ":", "if", "(", "implementation", "==", "'python'", ")", ":", "key", "=", "Python_RSAKey", ".", "parseXML", "(", "s", ")", "break", "else", ":", "raise", "ValueError", "(", "'No acceptable implementations'", ")", "return", "_parseKeyHelper", "(", "key", ",", "private", ",", "public", ")" ]
parse an xml-format key .
train
false
12,037
def filesystems(): return [re.sub('(nodev)?\\s*', '', fs) for fs in open('/proc/filesystems')]
[ "def", "filesystems", "(", ")", ":", "return", "[", "re", ".", "sub", "(", "'(nodev)?\\\\s*'", ",", "''", ",", "fs", ")", "for", "fs", "in", "open", "(", "'/proc/filesystems'", ")", "]" ]
return a list of all available filesystems .
train
false
12,039
@window_cached def nerdtree(pl): if (not bufvar_exists(None, u'NERDTreeRoot')): return None path_str = vim.eval(u'getbufvar("%", "NERDTreeRoot").path.str()') return [{u'contents': path_str, u'highlight_groups': [u'nerdtree:path', u'file_name']}]
[ "@", "window_cached", "def", "nerdtree", "(", "pl", ")", ":", "if", "(", "not", "bufvar_exists", "(", "None", ",", "u'NERDTreeRoot'", ")", ")", ":", "return", "None", "path_str", "=", "vim", ".", "eval", "(", "u'getbufvar(\"%\", \"NERDTreeRoot\").path.str()'", ")", "return", "[", "{", "u'contents'", ":", "path_str", ",", "u'highlight_groups'", ":", "[", "u'nerdtree:path'", ",", "u'file_name'", "]", "}", "]" ]
return directory that is shown by the current buffer .
train
false
12,041
def getXIntersectionsFromIntersections(xIntersectionIndexList): xIntersections = [] fill = False solid = False solidTable = {} xIntersectionIndexList.sort() for solidX in xIntersectionIndexList: if (solidX.index >= 0): toggleHashtable(solidTable, solidX.index, '') else: fill = (not fill) oldSolid = solid solid = ((len(solidTable) == 0) and fill) if (oldSolid != solid): xIntersections.append(solidX.x) return xIntersections
[ "def", "getXIntersectionsFromIntersections", "(", "xIntersectionIndexList", ")", ":", "xIntersections", "=", "[", "]", "fill", "=", "False", "solid", "=", "False", "solidTable", "=", "{", "}", "xIntersectionIndexList", ".", "sort", "(", ")", "for", "solidX", "in", "xIntersectionIndexList", ":", "if", "(", "solidX", ".", "index", ">=", "0", ")", ":", "toggleHashtable", "(", "solidTable", ",", "solidX", ".", "index", ",", "''", ")", "else", ":", "fill", "=", "(", "not", "fill", ")", "oldSolid", "=", "solid", "solid", "=", "(", "(", "len", "(", "solidTable", ")", "==", "0", ")", "and", "fill", ")", "if", "(", "oldSolid", "!=", "solid", ")", ":", "xIntersections", ".", "append", "(", "solidX", ".", "x", ")", "return", "xIntersections" ]
get x intersections from the x intersection index list .
train
false
12,042
def syncdb(verbosity=1, interactive=True): from django.db import connection, transaction, models, get_creation_module from django.conf import settings disable_termcolors() _check_for_validation_errors() for app_name in settings.INSTALLED_APPS: try: __import__((app_name + '.management'), {}, {}, ['']) except ImportError: pass data_types = get_creation_module().DATA_TYPES cursor = connection.cursor() table_list = _get_table_list() seen_models = _get_installed_models(table_list) created_models = set() pending_references = {} for app in models.get_apps(): app_name = app.__name__.split('.')[(-2)] model_list = models.get_models(app) for model in model_list: if (verbosity >= 2): print ('Processing %s.%s model' % (app_name, model._meta.object_name)) if (model._meta.db_table in table_list): continue (sql, references) = _get_sql_model_create(model, seen_models) seen_models.add(model) created_models.add(model) for (refto, refs) in references.items(): pending_references.setdefault(refto, []).extend(refs) sql.extend(_get_sql_for_pending_references(model, pending_references)) if (verbosity >= 1): print ('Creating table %s' % model._meta.db_table) for statement in sql: cursor.execute(statement) table_list.append(model._meta.db_table) for app in models.get_apps(): app_name = app.__name__.split('.')[(-2)] model_list = models.get_models(app) for model in model_list: if (model in created_models): sql = _get_many_to_many_sql_for_model(model) if sql: if (verbosity >= 2): print ('Creating many-to-many tables for %s.%s model' % (app_name, model._meta.object_name)) for statement in sql: cursor.execute(statement) transaction.commit_unless_managed() _emit_post_sync_signal(created_models, verbosity, interactive) for app in models.get_apps(): for model in models.get_models(app): if (model in created_models): custom_sql = get_custom_sql_for_model(model) if custom_sql: if (verbosity >= 1): print ('Installing custom SQL for %s.%s model' % (app_name, model._meta.object_name)) try: for sql in custom_sql: cursor.execute(sql) except Exception as e: sys.stderr.write(('Failed to install custom SQL for %s.%s model: %s' % (app_name, model._meta.object_name, e))) transaction.rollback_unless_managed() else: transaction.commit_unless_managed() for app in models.get_apps(): app_name = app.__name__.split('.')[(-2)] for model in models.get_models(app): if (model in created_models): index_sql = get_sql_indexes_for_model(model) if index_sql: if (verbosity >= 1): print ('Installing index for %s.%s model' % (app_name, model._meta.object_name)) try: for sql in index_sql: cursor.execute(sql) except Exception as e: sys.stderr.write(('Failed to install index for %s.%s model: %s' % (app_name, model._meta.object_name, e))) transaction.rollback_unless_managed() else: transaction.commit_unless_managed() load_data(['initial_data'], verbosity=verbosity)
[ "def", "syncdb", "(", "verbosity", "=", "1", ",", "interactive", "=", "True", ")", ":", "from", "django", ".", "db", "import", "connection", ",", "transaction", ",", "models", ",", "get_creation_module", "from", "django", ".", "conf", "import", "settings", "disable_termcolors", "(", ")", "_check_for_validation_errors", "(", ")", "for", "app_name", "in", "settings", ".", "INSTALLED_APPS", ":", "try", ":", "__import__", "(", "(", "app_name", "+", "'.management'", ")", ",", "{", "}", ",", "{", "}", ",", "[", "''", "]", ")", "except", "ImportError", ":", "pass", "data_types", "=", "get_creation_module", "(", ")", ".", "DATA_TYPES", "cursor", "=", "connection", ".", "cursor", "(", ")", "table_list", "=", "_get_table_list", "(", ")", "seen_models", "=", "_get_installed_models", "(", "table_list", ")", "created_models", "=", "set", "(", ")", "pending_references", "=", "{", "}", "for", "app", "in", "models", ".", "get_apps", "(", ")", ":", "app_name", "=", "app", ".", "__name__", ".", "split", "(", "'.'", ")", "[", "(", "-", "2", ")", "]", "model_list", "=", "models", ".", "get_models", "(", "app", ")", "for", "model", "in", "model_list", ":", "if", "(", "verbosity", ">=", "2", ")", ":", "print", "(", "'Processing %s.%s model'", "%", "(", "app_name", ",", "model", ".", "_meta", ".", "object_name", ")", ")", "if", "(", "model", ".", "_meta", ".", "db_table", "in", "table_list", ")", ":", "continue", "(", "sql", ",", "references", ")", "=", "_get_sql_model_create", "(", "model", ",", "seen_models", ")", "seen_models", ".", "add", "(", "model", ")", "created_models", ".", "add", "(", "model", ")", "for", "(", "refto", ",", "refs", ")", "in", "references", ".", "items", "(", ")", ":", "pending_references", ".", "setdefault", "(", "refto", ",", "[", "]", ")", ".", "extend", "(", "refs", ")", "sql", ".", "extend", "(", "_get_sql_for_pending_references", "(", "model", ",", "pending_references", ")", ")", "if", "(", "verbosity", ">=", "1", ")", ":", "print", "(", "'Creating table %s'", "%", "model", ".", "_meta", ".", "db_table", ")", "for", "statement", "in", "sql", ":", "cursor", ".", "execute", "(", "statement", ")", "table_list", ".", "append", "(", "model", ".", "_meta", ".", "db_table", ")", "for", "app", "in", "models", ".", "get_apps", "(", ")", ":", "app_name", "=", "app", ".", "__name__", ".", "split", "(", "'.'", ")", "[", "(", "-", "2", ")", "]", "model_list", "=", "models", ".", "get_models", "(", "app", ")", "for", "model", "in", "model_list", ":", "if", "(", "model", "in", "created_models", ")", ":", "sql", "=", "_get_many_to_many_sql_for_model", "(", "model", ")", "if", "sql", ":", "if", "(", "verbosity", ">=", "2", ")", ":", "print", "(", "'Creating many-to-many tables for %s.%s model'", "%", "(", "app_name", ",", "model", ".", "_meta", ".", "object_name", ")", ")", "for", "statement", "in", "sql", ":", "cursor", ".", "execute", "(", "statement", ")", "transaction", ".", "commit_unless_managed", "(", ")", "_emit_post_sync_signal", "(", "created_models", ",", "verbosity", ",", "interactive", ")", "for", "app", "in", "models", ".", "get_apps", "(", ")", ":", "for", "model", "in", "models", ".", "get_models", "(", "app", ")", ":", "if", "(", "model", "in", "created_models", ")", ":", "custom_sql", "=", "get_custom_sql_for_model", "(", "model", ")", "if", "custom_sql", ":", "if", "(", "verbosity", ">=", "1", ")", ":", "print", "(", "'Installing custom SQL for %s.%s model'", "%", "(", "app_name", ",", "model", ".", "_meta", ".", "object_name", ")", ")", "try", ":", "for", "sql", "in", "custom_sql", ":", "cursor", ".", "execute", "(", "sql", ")", "except", "Exception", "as", "e", ":", "sys", ".", "stderr", ".", "write", "(", "(", "'Failed to install custom SQL for %s.%s model: %s'", "%", "(", "app_name", ",", "model", ".", "_meta", ".", "object_name", ",", "e", ")", ")", ")", "transaction", ".", "rollback_unless_managed", "(", ")", "else", ":", "transaction", ".", "commit_unless_managed", "(", ")", "for", "app", "in", "models", ".", "get_apps", "(", ")", ":", "app_name", "=", "app", ".", "__name__", ".", "split", "(", "'.'", ")", "[", "(", "-", "2", ")", "]", "for", "model", "in", "models", ".", "get_models", "(", "app", ")", ":", "if", "(", "model", "in", "created_models", ")", ":", "index_sql", "=", "get_sql_indexes_for_model", "(", "model", ")", "if", "index_sql", ":", "if", "(", "verbosity", ">=", "1", ")", ":", "print", "(", "'Installing index for %s.%s model'", "%", "(", "app_name", ",", "model", ".", "_meta", ".", "object_name", ")", ")", "try", ":", "for", "sql", "in", "index_sql", ":", "cursor", ".", "execute", "(", "sql", ")", "except", "Exception", "as", "e", ":", "sys", ".", "stderr", ".", "write", "(", "(", "'Failed to install index for %s.%s model: %s'", "%", "(", "app_name", ",", "model", ".", "_meta", ".", "object_name", ",", "e", ")", ")", ")", "transaction", ".", "rollback_unless_managed", "(", ")", "else", ":", "transaction", ".", "commit_unless_managed", "(", ")", "load_data", "(", "[", "'initial_data'", "]", ",", "verbosity", "=", "verbosity", ")" ]
sync loacl db with remote db .
train
false
12,043
def active_cert(key): cert_str = pem_format(key) certificate = importKey(cert_str) try: not_before = to_time(str(certificate.get_not_before())) not_after = to_time(str(certificate.get_not_after())) assert (not_before < utc_now()) assert (not_after > utc_now()) return True except AssertionError: return False except AttributeError: return False
[ "def", "active_cert", "(", "key", ")", ":", "cert_str", "=", "pem_format", "(", "key", ")", "certificate", "=", "importKey", "(", "cert_str", ")", "try", ":", "not_before", "=", "to_time", "(", "str", "(", "certificate", ".", "get_not_before", "(", ")", ")", ")", "not_after", "=", "to_time", "(", "str", "(", "certificate", ".", "get_not_after", "(", ")", ")", ")", "assert", "(", "not_before", "<", "utc_now", "(", ")", ")", "assert", "(", "not_after", ">", "utc_now", "(", ")", ")", "return", "True", "except", "AssertionError", ":", "return", "False", "except", "AttributeError", ":", "return", "False" ]
verifies that a key is active that is present time is after not_before and before not_after .
train
false
12,044
@pytest.mark.network def test_git_branch_should_not_be_changed(script, tmpdir): script.pip('install', '-e', ('%s#egg=pip-test-package' % local_checkout('git+http://github.com/pypa/pip-test-package.git', tmpdir.join('cache'))), expect_error=True) source_dir = ((script.venv_path / 'src') / 'pip-test-package') result = script.run('git', 'branch', cwd=source_dir) assert ('* master' in result.stdout), result.stdout
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_git_branch_should_not_be_changed", "(", "script", ",", "tmpdir", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'-e'", ",", "(", "'%s#egg=pip-test-package'", "%", "local_checkout", "(", "'git+http://github.com/pypa/pip-test-package.git'", ",", "tmpdir", ".", "join", "(", "'cache'", ")", ")", ")", ",", "expect_error", "=", "True", ")", "source_dir", "=", "(", "(", "script", ".", "venv_path", "/", "'src'", ")", "/", "'pip-test-package'", ")", "result", "=", "script", ".", "run", "(", "'git'", ",", "'branch'", ",", "cwd", "=", "source_dir", ")", "assert", "(", "'* master'", "in", "result", ".", "stdout", ")", ",", "result", ".", "stdout" ]
editable installations should not change branch related to issue #32 and #161 .
train
false
12,045
def view_session(module, number): global HOUSE mod = get_module(module, number) if hasattr(mod, 'view'): mod.view()
[ "def", "view_session", "(", "module", ",", "number", ")", ":", "global", "HOUSE", "mod", "=", "get_module", "(", "module", ",", "number", ")", "if", "hasattr", "(", "mod", ",", "'view'", ")", ":", "mod", ".", "view", "(", ")" ]
initializes a modules view .
train
false
12,046
def getpaths(struct): if isinstance(struct, Task): return struct.output() elif isinstance(struct, dict): r = {} for (k, v) in six.iteritems(struct): r[k] = getpaths(v) return r else: try: s = list(struct) except TypeError: raise Exception(('Cannot map %s to Task/dict/list' % str(struct))) return [getpaths(r) for r in s]
[ "def", "getpaths", "(", "struct", ")", ":", "if", "isinstance", "(", "struct", ",", "Task", ")", ":", "return", "struct", ".", "output", "(", ")", "elif", "isinstance", "(", "struct", ",", "dict", ")", ":", "r", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "struct", ")", ":", "r", "[", "k", "]", "=", "getpaths", "(", "v", ")", "return", "r", "else", ":", "try", ":", "s", "=", "list", "(", "struct", ")", "except", "TypeError", ":", "raise", "Exception", "(", "(", "'Cannot map %s to Task/dict/list'", "%", "str", "(", "struct", ")", ")", ")", "return", "[", "getpaths", "(", "r", ")", "for", "r", "in", "s", "]" ]
maps all tasks in a structured data object to their .
train
true
12,047
@cleanup @needs_tex @needs_ghostscript def test_determinism_all_tex(): _determinism_check(format=u'ps', usetex=True)
[ "@", "cleanup", "@", "needs_tex", "@", "needs_ghostscript", "def", "test_determinism_all_tex", "(", ")", ":", "_determinism_check", "(", "format", "=", "u'ps'", ",", "usetex", "=", "True", ")" ]
test for reproducible ps/tex output .
train
false
12,048
def test_selectfiles_valueerror(): base_dir = op.dirname(nipype.__file__) templates = {u'model': u'interfaces/{package}/model.py', u'preprocess': u'interfaces/{package}/pre*.py'} force_lists = [u'model', u'preprocess', u'registration'] sf = nio.SelectFiles(templates, base_directory=base_dir, force_lists=force_lists) with pytest.raises(ValueError): sf.run()
[ "def", "test_selectfiles_valueerror", "(", ")", ":", "base_dir", "=", "op", ".", "dirname", "(", "nipype", ".", "__file__", ")", "templates", "=", "{", "u'model'", ":", "u'interfaces/{package}/model.py'", ",", "u'preprocess'", ":", "u'interfaces/{package}/pre*.py'", "}", "force_lists", "=", "[", "u'model'", ",", "u'preprocess'", ",", "u'registration'", "]", "sf", "=", "nio", ".", "SelectFiles", "(", "templates", ",", "base_directory", "=", "base_dir", ",", "force_lists", "=", "force_lists", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "sf", ".", "run", "(", ")" ]
test valueerror when force_lists has field that isnt in template .
train
false
12,049
def contrastfromcols(L, D, pseudo=None): L = np.asarray(L) D = np.asarray(D) (n, p) = D.shape if ((L.shape[0] != n) and (L.shape[1] != p)): raise ValueError('shape of L and D mismatched') if (pseudo is None): pseudo = np.linalg.pinv(D) if (L.shape[0] == n): C = np.dot(pseudo, L).T else: C = L C = np.dot(pseudo, np.dot(D, C.T)).T Lp = np.dot(D, C.T) if (len(Lp.shape) == 1): Lp.shape = (n, 1) if (np_matrix_rank(Lp) != Lp.shape[1]): Lp = fullrank(Lp) C = np.dot(pseudo, Lp).T return np.squeeze(C)
[ "def", "contrastfromcols", "(", "L", ",", "D", ",", "pseudo", "=", "None", ")", ":", "L", "=", "np", ".", "asarray", "(", "L", ")", "D", "=", "np", ".", "asarray", "(", "D", ")", "(", "n", ",", "p", ")", "=", "D", ".", "shape", "if", "(", "(", "L", ".", "shape", "[", "0", "]", "!=", "n", ")", "and", "(", "L", ".", "shape", "[", "1", "]", "!=", "p", ")", ")", ":", "raise", "ValueError", "(", "'shape of L and D mismatched'", ")", "if", "(", "pseudo", "is", "None", ")", ":", "pseudo", "=", "np", ".", "linalg", ".", "pinv", "(", "D", ")", "if", "(", "L", ".", "shape", "[", "0", "]", "==", "n", ")", ":", "C", "=", "np", ".", "dot", "(", "pseudo", ",", "L", ")", ".", "T", "else", ":", "C", "=", "L", "C", "=", "np", ".", "dot", "(", "pseudo", ",", "np", ".", "dot", "(", "D", ",", "C", ".", "T", ")", ")", ".", "T", "Lp", "=", "np", ".", "dot", "(", "D", ",", "C", ".", "T", ")", "if", "(", "len", "(", "Lp", ".", "shape", ")", "==", "1", ")", ":", "Lp", ".", "shape", "=", "(", "n", ",", "1", ")", "if", "(", "np_matrix_rank", "(", "Lp", ")", "!=", "Lp", ".", "shape", "[", "1", "]", ")", ":", "Lp", "=", "fullrank", "(", "Lp", ")", "C", "=", "np", ".", "dot", "(", "pseudo", ",", "Lp", ")", ".", "T", "return", "np", ".", "squeeze", "(", "C", ")" ]
from an n x p design matrix d and a matrix l .
train
false
12,050
def makeFakeServer(serverProtocol): return FakeTransport(serverProtocol, isServer=True)
[ "def", "makeFakeServer", "(", "serverProtocol", ")", ":", "return", "FakeTransport", "(", "serverProtocol", ",", "isServer", "=", "True", ")" ]
create and return a new in-memory transport hooked up to the given protocol .
train
false
12,051
def testmods_from_testdir(testdir): testdir = normpath(testdir) for testmod_path in testmod_paths_from_testdir(testdir): testmod_name = splitext(basename(testmod_path))[0] log.debug("import test module '%s'", testmod_path) try: iinfo = imp.find_module(testmod_name, [dirname(testmod_path)]) testabsdir = abspath(testdir) sys.path.insert(0, testabsdir) old_dir = os.getcwd() os.chdir(testdir) try: testmod = imp.load_module(testmod_name, *iinfo) finally: os.chdir(old_dir) sys.path.remove(testabsdir) except TestSkipped: (_, ex, _) = sys.exc_info() log.warn("'%s' module skipped: %s", testmod_name, ex) except Exception: (_, ex, _) = sys.exc_info() log.warn("could not import test module '%s': %s (skipping, run with '-d' for full traceback)", testmod_path, ex) if log.isEnabledFor(logging.DEBUG): traceback.print_exc() else: (yield testmod)
[ "def", "testmods_from_testdir", "(", "testdir", ")", ":", "testdir", "=", "normpath", "(", "testdir", ")", "for", "testmod_path", "in", "testmod_paths_from_testdir", "(", "testdir", ")", ":", "testmod_name", "=", "splitext", "(", "basename", "(", "testmod_path", ")", ")", "[", "0", "]", "log", ".", "debug", "(", "\"import test module '%s'\"", ",", "testmod_path", ")", "try", ":", "iinfo", "=", "imp", ".", "find_module", "(", "testmod_name", ",", "[", "dirname", "(", "testmod_path", ")", "]", ")", "testabsdir", "=", "abspath", "(", "testdir", ")", "sys", ".", "path", ".", "insert", "(", "0", ",", "testabsdir", ")", "old_dir", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "testdir", ")", "try", ":", "testmod", "=", "imp", ".", "load_module", "(", "testmod_name", ",", "*", "iinfo", ")", "finally", ":", "os", ".", "chdir", "(", "old_dir", ")", "sys", ".", "path", ".", "remove", "(", "testabsdir", ")", "except", "TestSkipped", ":", "(", "_", ",", "ex", ",", "_", ")", "=", "sys", ".", "exc_info", "(", ")", "log", ".", "warn", "(", "\"'%s' module skipped: %s\"", ",", "testmod_name", ",", "ex", ")", "except", "Exception", ":", "(", "_", ",", "ex", ",", "_", ")", "=", "sys", ".", "exc_info", "(", ")", "log", ".", "warn", "(", "\"could not import test module '%s': %s (skipping, run with '-d' for full traceback)\"", ",", "testmod_path", ",", "ex", ")", "if", "log", ".", "isEnabledFor", "(", "logging", ".", "DEBUG", ")", ":", "traceback", ".", "print_exc", "(", ")", "else", ":", "(", "yield", "testmod", ")" ]
generate test modules in the given test dir .
train
false
12,052
def get_script_prefix(): return _prefixes.get(currentThread(), u'/')
[ "def", "get_script_prefix", "(", ")", ":", "return", "_prefixes", ".", "get", "(", "currentThread", "(", ")", ",", "u'/'", ")" ]
returns the currently active script prefix .
train
false
12,053
def sync_domains(site, domains, bench_path='.'): changed = False existing_domains = get_domains_dict(get_domains(site, bench_path)) new_domains = get_domains_dict(domains) if (set(existing_domains.keys()) != set(new_domains.keys())): changed = True else: for d in existing_domains.values(): if (d != new_domains.get(d['domain'])): changed = True break if changed: update_site_config(site, {'domains': domains}, bench_path='.') return changed
[ "def", "sync_domains", "(", "site", ",", "domains", ",", "bench_path", "=", "'.'", ")", ":", "changed", "=", "False", "existing_domains", "=", "get_domains_dict", "(", "get_domains", "(", "site", ",", "bench_path", ")", ")", "new_domains", "=", "get_domains_dict", "(", "domains", ")", "if", "(", "set", "(", "existing_domains", ".", "keys", "(", ")", ")", "!=", "set", "(", "new_domains", ".", "keys", "(", ")", ")", ")", ":", "changed", "=", "True", "else", ":", "for", "d", "in", "existing_domains", ".", "values", "(", ")", ":", "if", "(", "d", "!=", "new_domains", ".", "get", "(", "d", "[", "'domain'", "]", ")", ")", ":", "changed", "=", "True", "break", "if", "changed", ":", "update_site_config", "(", "site", ",", "{", "'domains'", ":", "domains", "}", ",", "bench_path", "=", "'.'", ")", "return", "changed" ]
checks if there is a change in domains .
train
false
12,054
def test_topic_tracker_needs_update(database, user, topic): forumsread = ForumsRead.query.filter((ForumsRead.user_id == user.id), (ForumsRead.forum_id == topic.forum_id)).first() topicsread = TopicsRead.query.filter((TopicsRead.user_id == user.id), (TopicsRead.topic_id == topic.id)).first() with current_app.test_request_context(): assert topic.tracker_needs_update(forumsread, topicsread) topicsread = TopicsRead() topicsread.user_id = user.id topicsread.topic_id = topic.id topicsread.forum_id = topic.forum_id topicsread.last_read = datetime.utcnow() topicsread.save() forumsread = ForumsRead() forumsread.user_id = user.id forumsread.forum_id = topic.forum_id forumsread.last_read = datetime.utcnow() forumsread.save() assert (not topic.tracker_needs_update(forumsread, topicsread)) post = Post(content='Test Content') post.save(topic=topic, user=user) assert topic.tracker_needs_update(forumsread, topicsread)
[ "def", "test_topic_tracker_needs_update", "(", "database", ",", "user", ",", "topic", ")", ":", "forumsread", "=", "ForumsRead", ".", "query", ".", "filter", "(", "(", "ForumsRead", ".", "user_id", "==", "user", ".", "id", ")", ",", "(", "ForumsRead", ".", "forum_id", "==", "topic", ".", "forum_id", ")", ")", ".", "first", "(", ")", "topicsread", "=", "TopicsRead", ".", "query", ".", "filter", "(", "(", "TopicsRead", ".", "user_id", "==", "user", ".", "id", ")", ",", "(", "TopicsRead", ".", "topic_id", "==", "topic", ".", "id", ")", ")", ".", "first", "(", ")", "with", "current_app", ".", "test_request_context", "(", ")", ":", "assert", "topic", ".", "tracker_needs_update", "(", "forumsread", ",", "topicsread", ")", "topicsread", "=", "TopicsRead", "(", ")", "topicsread", ".", "user_id", "=", "user", ".", "id", "topicsread", ".", "topic_id", "=", "topic", ".", "id", "topicsread", ".", "forum_id", "=", "topic", ".", "forum_id", "topicsread", ".", "last_read", "=", "datetime", ".", "utcnow", "(", ")", "topicsread", ".", "save", "(", ")", "forumsread", "=", "ForumsRead", "(", ")", "forumsread", ".", "user_id", "=", "user", ".", "id", "forumsread", ".", "forum_id", "=", "topic", ".", "forum_id", "forumsread", ".", "last_read", "=", "datetime", ".", "utcnow", "(", ")", "forumsread", ".", "save", "(", ")", "assert", "(", "not", "topic", ".", "tracker_needs_update", "(", "forumsread", ",", "topicsread", ")", ")", "post", "=", "Post", "(", "content", "=", "'Test Content'", ")", "post", ".", "save", "(", "topic", "=", "topic", ",", "user", "=", "user", ")", "assert", "topic", ".", "tracker_needs_update", "(", "forumsread", ",", "topicsread", ")" ]
tests if the topicsread tracker needs an update if a new post has been submitted .
train
false
12,055
def lock_key(): return (config.KEY_PREFIX + config.LOCK_SUFFIX)
[ "def", "lock_key", "(", ")", ":", "return", "(", "config", ".", "KEY_PREFIX", "+", "config", ".", "LOCK_SUFFIX", ")" ]
return the key name to use for the memcache lock .
train
false
12,056
def _get_cython_type(dtype): type_name = _get_dtype(dtype).name ctype = _cython_types.get(type_name, 'object') if (ctype == 'error'): raise MergeError(('unsupported type: ' + type_name)) return ctype
[ "def", "_get_cython_type", "(", "dtype", ")", ":", "type_name", "=", "_get_dtype", "(", "dtype", ")", ".", "name", "ctype", "=", "_cython_types", ".", "get", "(", "type_name", ",", "'object'", ")", "if", "(", "ctype", "==", "'error'", ")", ":", "raise", "MergeError", "(", "(", "'unsupported type: '", "+", "type_name", ")", ")", "return", "ctype" ]
given a dtype .
train
false
12,057
def get_default_retry_steps(delay=timedelta(seconds=0.1), max_time=timedelta(minutes=2)): repetitions = (max_time.total_seconds() / delay.total_seconds()) return repeat(delay, int(repetitions))
[ "def", "get_default_retry_steps", "(", "delay", "=", "timedelta", "(", "seconds", "=", "0.1", ")", ",", "max_time", "=", "timedelta", "(", "minutes", "=", "2", ")", ")", ":", "repetitions", "=", "(", "max_time", ".", "total_seconds", "(", ")", "/", "delay", ".", "total_seconds", "(", ")", ")", "return", "repeat", "(", "delay", ",", "int", "(", "repetitions", ")", ")" ]
retry every 0 .
train
false