id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
44,728
def zip_folder_content(folder, filename): with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED) as dest: for (root, dirs, files) in os.walk(folder): relative_dir = os.path.relpath(root, folder) for file_ in files: dest.write(os.path.join(root, file_), arcname=os.path.join(relative_dir, file_))
[ "def", "zip_folder_content", "(", "folder", ",", "filename", ")", ":", "with", "zipfile", ".", "ZipFile", "(", "filename", ",", "'w'", ",", "zipfile", ".", "ZIP_DEFLATED", ")", "as", "dest", ":", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "folder", ")", ":", "relative_dir", "=", "os", ".", "path", ".", "relpath", "(", "root", ",", "folder", ")", "for", "file_", "in", "files", ":", "dest", ".", "write", "(", "os", ".", "path", ".", "join", "(", "root", ",", "file_", ")", ",", "arcname", "=", "os", ".", "path", ".", "join", "(", "relative_dir", ",", "file_", ")", ")" ]
compress the _content_ of a folder .
train
false
44,729
def service_get_by_compute_host(context, host): return IMPL.service_get_by_compute_host(context, host)
[ "def", "service_get_by_compute_host", "(", "context", ",", "host", ")", ":", "return", "IMPL", ".", "service_get_by_compute_host", "(", "context", ",", "host", ")" ]
get the service entry for a given compute host .
train
false
44,730
@task def prepare_index(index_pk): from kuma.wiki.search import WikiDocumentType from kuma.search.models import Index cls = WikiDocumentType es = cls.get_connection('indexing') index = Index.objects.get(pk=index_pk) Index.objects.recreate_index(es=es, index=index) temporary_settings = {'index': {'refresh_interval': '-1', 'number_of_replicas': '0'}} es.indices.put_settings(temporary_settings, index=index.prefixed_name)
[ "@", "task", "def", "prepare_index", "(", "index_pk", ")", ":", "from", "kuma", ".", "wiki", ".", "search", "import", "WikiDocumentType", "from", "kuma", ".", "search", ".", "models", "import", "Index", "cls", "=", "WikiDocumentType", "es", "=", "cls", ".", "get_connection", "(", "'indexing'", ")", "index", "=", "Index", ".", "objects", ".", "get", "(", "pk", "=", "index_pk", ")", "Index", ".", "objects", ".", "recreate_index", "(", "es", "=", "es", ",", "index", "=", "index", ")", "temporary_settings", "=", "{", "'index'", ":", "{", "'refresh_interval'", ":", "'-1'", ",", "'number_of_replicas'", ":", "'0'", "}", "}", "es", ".", "indices", ".", "put_settings", "(", "temporary_settings", ",", "index", "=", "index", ".", "prefixed_name", ")" ]
prepare a new index for indexing documents into .
train
false
44,731
def endpoint_create(service, publicurl=None, internalurl=None, adminurl=None, region=None, profile=None, url=None, interface=None, **connection_args): kstone = auth(profile, **connection_args) keystone_service = service_get(name=service, profile=profile, **connection_args) if ((not keystone_service) or ('Error' in keystone_service)): return {'Error': 'Could not find the specified service'} if (_OS_IDENTITY_API_VERSION > 2): kstone.endpoints.create(service=keystone_service[service]['id'], region_id=region, url=url, interface=interface) else: kstone.endpoints.create(region=region, service_id=keystone_service[service]['id'], publicurl=publicurl, adminurl=adminurl, internalurl=internalurl) return endpoint_get(service, profile, **connection_args)
[ "def", "endpoint_create", "(", "service", ",", "publicurl", "=", "None", ",", "internalurl", "=", "None", ",", "adminurl", "=", "None", ",", "region", "=", "None", ",", "profile", "=", "None", ",", "url", "=", "None", ",", "interface", "=", "None", ",", "**", "connection_args", ")", ":", "kstone", "=", "auth", "(", "profile", ",", "**", "connection_args", ")", "keystone_service", "=", "service_get", "(", "name", "=", "service", ",", "profile", "=", "profile", ",", "**", "connection_args", ")", "if", "(", "(", "not", "keystone_service", ")", "or", "(", "'Error'", "in", "keystone_service", ")", ")", ":", "return", "{", "'Error'", ":", "'Could not find the specified service'", "}", "if", "(", "_OS_IDENTITY_API_VERSION", ">", "2", ")", ":", "kstone", ".", "endpoints", ".", "create", "(", "service", "=", "keystone_service", "[", "service", "]", "[", "'id'", "]", ",", "region_id", "=", "region", ",", "url", "=", "url", ",", "interface", "=", "interface", ")", "else", ":", "kstone", ".", "endpoints", ".", "create", "(", "region", "=", "region", ",", "service_id", "=", "keystone_service", "[", "service", "]", "[", "'id'", "]", ",", "publicurl", "=", "publicurl", ",", "adminurl", "=", "adminurl", ",", "internalurl", "=", "internalurl", ")", "return", "endpoint_get", "(", "service", ",", "profile", ",", "**", "connection_args", ")" ]
create an endpoint for an openstack service cli examples: .
train
true
44,732
def aggregate_keywords(keywords, sep, prefix, raw=False): processed = [] encode = encode_to_py3bytes_or_py2str for (k, v) in keywords.items(): if (len(k) == 1): if (v is not False): processed.append(encode(('-' + k))) if (v is not True): processed.append(encode(v)) else: if (not raw): k = k.replace('_', '-') if (v is True): processed.append(encode(('--' + k))) elif (v is False): pass elif ((sep is None) or (sep == ' ')): processed.append(encode((prefix + k))) processed.append(encode(v)) else: arg = encode(('%s%s%s%s' % (prefix, k, sep, v))) processed.append(arg) return processed
[ "def", "aggregate_keywords", "(", "keywords", ",", "sep", ",", "prefix", ",", "raw", "=", "False", ")", ":", "processed", "=", "[", "]", "encode", "=", "encode_to_py3bytes_or_py2str", "for", "(", "k", ",", "v", ")", "in", "keywords", ".", "items", "(", ")", ":", "if", "(", "len", "(", "k", ")", "==", "1", ")", ":", "if", "(", "v", "is", "not", "False", ")", ":", "processed", ".", "append", "(", "encode", "(", "(", "'-'", "+", "k", ")", ")", ")", "if", "(", "v", "is", "not", "True", ")", ":", "processed", ".", "append", "(", "encode", "(", "v", ")", ")", "else", ":", "if", "(", "not", "raw", ")", ":", "k", "=", "k", ".", "replace", "(", "'_'", ",", "'-'", ")", "if", "(", "v", "is", "True", ")", ":", "processed", ".", "append", "(", "encode", "(", "(", "'--'", "+", "k", ")", ")", ")", "elif", "(", "v", "is", "False", ")", ":", "pass", "elif", "(", "(", "sep", "is", "None", ")", "or", "(", "sep", "==", "' '", ")", ")", ":", "processed", ".", "append", "(", "encode", "(", "(", "prefix", "+", "k", ")", ")", ")", "processed", ".", "append", "(", "encode", "(", "v", ")", ")", "else", ":", "arg", "=", "encode", "(", "(", "'%s%s%s%s'", "%", "(", "prefix", ",", "k", ",", "sep", ",", "v", ")", ")", ")", "processed", ".", "append", "(", "arg", ")", "return", "processed" ]
take our keyword arguments .
train
true
44,733
@task def bdist_wininst_sse2(options): bdist_wininst_arch(options.python_version, 'sse2')
[ "@", "task", "def", "bdist_wininst_sse2", "(", "options", ")", ":", "bdist_wininst_arch", "(", "options", ".", "python_version", ",", "'sse2'", ")" ]
build the sse2 wininst installer .
train
false
44,735
def augment(matlist, column, K): return [(row + element) for (row, element) in zip(matlist, column)]
[ "def", "augment", "(", "matlist", ",", "column", ",", "K", ")", ":", "return", "[", "(", "row", "+", "element", ")", "for", "(", "row", ",", "element", ")", "in", "zip", "(", "matlist", ",", "column", ")", "]" ]
augments a matrix and a column .
train
false
44,736
@pytest.mark.django_db def test_admin_regular_user(client, default): client.login(username=default.username, password='') response = client.get(ADMIN_URL) assert (response.status_code == 403)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_admin_regular_user", "(", "client", ",", "default", ")", ":", "client", ".", "login", "(", "username", "=", "default", ".", "username", ",", "password", "=", "''", ")", "response", "=", "client", ".", "get", "(", "ADMIN_URL", ")", "assert", "(", "response", ".", "status_code", "==", "403", ")" ]
checks regular users cannot access the admin site .
train
false
44,737
@pytest.mark.parametrize('i, item', enumerate(ITEMS)) def test_original_urls(objects, i, item): assert (objects.history.itemAt(i).originalUrl() == item.original_url)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'i, item'", ",", "enumerate", "(", "ITEMS", ")", ")", "def", "test_original_urls", "(", "objects", ",", "i", ",", "item", ")", ":", "assert", "(", "objects", ".", "history", ".", "itemAt", "(", "i", ")", ".", "originalUrl", "(", ")", "==", "item", ".", "original_url", ")" ]
check if the original urls were loaded correctly .
train
false
44,739
def run_upgrades(store, upgrades): lock = store.get_lock() try: run_upgrades_locked(store, upgrades) finally: store.release_lock(lock)
[ "def", "run_upgrades", "(", "store", ",", "upgrades", ")", ":", "lock", "=", "store", ".", "get_lock", "(", ")", "try", ":", "run_upgrades_locked", "(", "store", ",", "upgrades", ")", "finally", ":", "store", ".", "release_lock", "(", "lock", ")" ]
guard against concurrent upgrades .
train
false
44,741
def _num_cpus_darwin(): p = subprocess.Popen(['sysctl', '-n', 'hw.ncpu'], stdout=subprocess.PIPE) return p.stdout.read()
[ "def", "_num_cpus_darwin", "(", ")", ":", "p", "=", "subprocess", ".", "Popen", "(", "[", "'sysctl'", ",", "'-n'", ",", "'hw.ncpu'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "return", "p", ".", "stdout", ".", "read", "(", ")" ]
return the number of active cpus on a darwin system .
train
true
44,742
def alternating(n): for perm in variations(list(range(n)), n): p = Permutation(perm) if p.is_even: (yield p)
[ "def", "alternating", "(", "n", ")", ":", "for", "perm", "in", "variations", "(", "list", "(", "range", "(", "n", ")", ")", ",", "n", ")", ":", "p", "=", "Permutation", "(", "perm", ")", "if", "p", ".", "is_even", ":", "(", "yield", "p", ")" ]
generates the alternating group of order n .
train
false
44,743
def domain_match(A, B): A = A.lower() B = B.lower() if (A == B): return True if (not is_HDN(A)): return False i = A.rfind(B) if ((i == (-1)) or (i == 0)): return False if (not B.startswith('.')): return False if (not is_HDN(B[1:])): return False return True
[ "def", "domain_match", "(", "A", ",", "B", ")", ":", "A", "=", "A", ".", "lower", "(", ")", "B", "=", "B", ".", "lower", "(", ")", "if", "(", "A", "==", "B", ")", ":", "return", "True", "if", "(", "not", "is_HDN", "(", "A", ")", ")", ":", "return", "False", "i", "=", "A", ".", "rfind", "(", "B", ")", "if", "(", "(", "i", "==", "(", "-", "1", ")", ")", "or", "(", "i", "==", "0", ")", ")", ":", "return", "False", "if", "(", "not", "B", ".", "startswith", "(", "'.'", ")", ")", ":", "return", "False", "if", "(", "not", "is_HDN", "(", "B", "[", "1", ":", "]", ")", ")", ":", "return", "False", "return", "True" ]
return true if domain a domain-matches domain b .
train
true
44,745
def _test_factory(test, dtype=np.double): olderr = np.seterr(all='ignore') try: test.check(dtype=dtype) finally: np.seterr(**olderr)
[ "def", "_test_factory", "(", "test", ",", "dtype", "=", "np", ".", "double", ")", ":", "olderr", "=", "np", ".", "seterr", "(", "all", "=", "'ignore'", ")", "try", ":", "test", ".", "check", "(", "dtype", "=", "dtype", ")", "finally", ":", "np", ".", "seterr", "(", "**", "olderr", ")" ]
boost test .
train
false
44,747
def _py_WX28_convert_agg_to_wx_bitmap(agg, bbox): if (bbox is None): return wx.BitmapFromBufferRGBA(int(agg.width), int(agg.height), agg.buffer_rgba(0, 0)) else: return _WX28_clipped_agg_as_bitmap(agg, bbox)
[ "def", "_py_WX28_convert_agg_to_wx_bitmap", "(", "agg", ",", "bbox", ")", ":", "if", "(", "bbox", "is", "None", ")", ":", "return", "wx", ".", "BitmapFromBufferRGBA", "(", "int", "(", "agg", ".", "width", ")", ",", "int", "(", "agg", ".", "height", ")", ",", "agg", ".", "buffer_rgba", "(", "0", ",", "0", ")", ")", "else", ":", "return", "_WX28_clipped_agg_as_bitmap", "(", "agg", ",", "bbox", ")" ]
convert the region of the agg buffer bounded by bbox to a wx .
train
true
44,751
def log_startup_info(): LOG.always('Starting mongo-connector version: %s', __version__) if ('dev' in __version__): LOG.warning('This is a development version (%s) of mongo-connector', __version__) LOG.always('Python version: %s', sys.version) LOG.always('Platform: %s', platform.platform()) LOG.always('pymongo version: %s', pymongo.__version__) if (not pymongo.has_c()): LOG.warning('pymongo version %s was installed without the C extensions. "InvalidBSON: Date value out of range" errors may occur if there are documents with BSON Datetimes that represent times outside of Python\'s datetime limit.', pymongo.__version__)
[ "def", "log_startup_info", "(", ")", ":", "LOG", ".", "always", "(", "'Starting mongo-connector version: %s'", ",", "__version__", ")", "if", "(", "'dev'", "in", "__version__", ")", ":", "LOG", ".", "warning", "(", "'This is a development version (%s) of mongo-connector'", ",", "__version__", ")", "LOG", ".", "always", "(", "'Python version: %s'", ",", "sys", ".", "version", ")", "LOG", ".", "always", "(", "'Platform: %s'", ",", "platform", ".", "platform", "(", ")", ")", "LOG", ".", "always", "(", "'pymongo version: %s'", ",", "pymongo", ".", "__version__", ")", "if", "(", "not", "pymongo", ".", "has_c", "(", ")", ")", ":", "LOG", ".", "warning", "(", "'pymongo version %s was installed without the C extensions. \"InvalidBSON: Date value out of range\" errors may occur if there are documents with BSON Datetimes that represent times outside of Python\\'s datetime limit.'", ",", "pymongo", ".", "__version__", ")" ]
log info about the current environment .
train
true
44,752
def isolate_query_ctes(full_text, text_before_cursor): if (not full_text): return (full_text, text_before_cursor, tuple()) (ctes, remainder) = extract_ctes(full_text) if (not ctes): return (full_text, text_before_cursor, ()) current_position = len(text_before_cursor) meta = [] for cte in ctes: if (cte.start < current_position < cte.stop): text_before_cursor = full_text[cte.start:current_position] full_text = full_text[cte.start:cte.stop] return (full_text, text_before_cursor, meta) cols = (ColumnMetadata(name, None, ()) for name in cte.columns) meta.append(TableMetadata(cte.name, cols)) full_text = full_text[ctes[(-1)].stop:] text_before_cursor = text_before_cursor[ctes[(-1)].stop:current_position] return (full_text, text_before_cursor, tuple(meta))
[ "def", "isolate_query_ctes", "(", "full_text", ",", "text_before_cursor", ")", ":", "if", "(", "not", "full_text", ")", ":", "return", "(", "full_text", ",", "text_before_cursor", ",", "tuple", "(", ")", ")", "(", "ctes", ",", "remainder", ")", "=", "extract_ctes", "(", "full_text", ")", "if", "(", "not", "ctes", ")", ":", "return", "(", "full_text", ",", "text_before_cursor", ",", "(", ")", ")", "current_position", "=", "len", "(", "text_before_cursor", ")", "meta", "=", "[", "]", "for", "cte", "in", "ctes", ":", "if", "(", "cte", ".", "start", "<", "current_position", "<", "cte", ".", "stop", ")", ":", "text_before_cursor", "=", "full_text", "[", "cte", ".", "start", ":", "current_position", "]", "full_text", "=", "full_text", "[", "cte", ".", "start", ":", "cte", ".", "stop", "]", "return", "(", "full_text", ",", "text_before_cursor", ",", "meta", ")", "cols", "=", "(", "ColumnMetadata", "(", "name", ",", "None", ",", "(", ")", ")", "for", "name", "in", "cte", ".", "columns", ")", "meta", ".", "append", "(", "TableMetadata", "(", "cte", ".", "name", ",", "cols", ")", ")", "full_text", "=", "full_text", "[", "ctes", "[", "(", "-", "1", ")", "]", ".", "stop", ":", "]", "text_before_cursor", "=", "text_before_cursor", "[", "ctes", "[", "(", "-", "1", ")", "]", ".", "stop", ":", "current_position", "]", "return", "(", "full_text", ",", "text_before_cursor", ",", "tuple", "(", "meta", ")", ")" ]
simplify a query by converting ctes into table metadata objects .
train
false
44,753
def rgb2lms(rgb_Nx3, conversionMatrix=None): rgb_3xN = numpy.transpose(rgb_Nx3) if (conversionMatrix is None): cones_to_rgb = numpy.asarray([[4.97068857, (-4.14354132), 0.17285275], [(-0.90913894), 2.15671326, (-0.24757432)], [(-0.03976551), (-0.14253782), 1.18230333]]) logging.warning('This monitor has not been color-calibrated. Using default LMS conversion matrix.') else: cones_to_rgb = conversionMatrix rgb_to_cones = numpy.linalg.inv(cones_to_rgb) lms = numpy.dot(rgb_to_cones, rgb_3xN) return numpy.transpose(lms)
[ "def", "rgb2lms", "(", "rgb_Nx3", ",", "conversionMatrix", "=", "None", ")", ":", "rgb_3xN", "=", "numpy", ".", "transpose", "(", "rgb_Nx3", ")", "if", "(", "conversionMatrix", "is", "None", ")", ":", "cones_to_rgb", "=", "numpy", ".", "asarray", "(", "[", "[", "4.97068857", ",", "(", "-", "4.14354132", ")", ",", "0.17285275", "]", ",", "[", "(", "-", "0.90913894", ")", ",", "2.15671326", ",", "(", "-", "0.24757432", ")", "]", ",", "[", "(", "-", "0.03976551", ")", ",", "(", "-", "0.14253782", ")", ",", "1.18230333", "]", "]", ")", "logging", ".", "warning", "(", "'This monitor has not been color-calibrated. Using default LMS conversion matrix.'", ")", "else", ":", "cones_to_rgb", "=", "conversionMatrix", "rgb_to_cones", "=", "numpy", ".", "linalg", ".", "inv", "(", "cones_to_rgb", ")", "lms", "=", "numpy", ".", "dot", "(", "rgb_to_cones", ",", "rgb_3xN", ")", "return", "numpy", ".", "transpose", "(", "lms", ")" ]
convert from rgb to cone space .
train
false
44,755
def dateof(tag_name, tags): for tag in tags: if (tag['name'] == tag_name): commit = read_url(tag['commit']['url']) return parse_timestamp(commit['commit']['committer']['date']) return None
[ "def", "dateof", "(", "tag_name", ",", "tags", ")", ":", "for", "tag", "in", "tags", ":", "if", "(", "tag", "[", "'name'", "]", "==", "tag_name", ")", ":", "commit", "=", "read_url", "(", "tag", "[", "'commit'", "]", "[", "'url'", "]", ")", "return", "parse_timestamp", "(", "commit", "[", "'commit'", "]", "[", "'committer'", "]", "[", "'date'", "]", ")", "return", "None" ]
given a list of tags .
train
true
44,756
def get_yaml_entry(yaml_dict, name): entry = yaml_dict.get(name) if (entry is None): return None if isinstance(entry, basestring): return [entry] return entry
[ "def", "get_yaml_entry", "(", "yaml_dict", ",", "name", ")", ":", "entry", "=", "yaml_dict", ".", "get", "(", "name", ")", "if", "(", "entry", "is", "None", ")", ":", "return", "None", "if", "isinstance", "(", "entry", ",", "basestring", ")", ":", "return", "[", "entry", "]", "return", "entry" ]
get entry name from dict yaml_dict parameters yaml_dict : dict dict or subdict from parsing .
train
false
44,758
def rpXRDS(request): return util.renderXRDS(request, [RP_RETURN_TO_URL_TYPE], [util.getViewURL(request, finishOpenID)])
[ "def", "rpXRDS", "(", "request", ")", ":", "return", "util", ".", "renderXRDS", "(", "request", ",", "[", "RP_RETURN_TO_URL_TYPE", "]", ",", "[", "util", ".", "getViewURL", "(", "request", ",", "finishOpenID", ")", "]", ")" ]
return a relying party verification xrds document .
train
true
44,760
def prep_bootstrap(mpt): bs_ = __salt__['config.gather_bootstrap_script']() fpd_ = os.path.join(mpt, 'tmp', '{0}'.format(uuid.uuid4())) if (not os.path.exists(fpd_)): os.makedirs(fpd_) os.chmod(fpd_, 448) fp_ = os.path.join(fpd_, os.path.basename(bs_)) shutil.copy(bs_, fp_) tmppath = fpd_.replace(mpt, '') return (fp_, tmppath)
[ "def", "prep_bootstrap", "(", "mpt", ")", ":", "bs_", "=", "__salt__", "[", "'config.gather_bootstrap_script'", "]", "(", ")", "fpd_", "=", "os", ".", "path", ".", "join", "(", "mpt", ",", "'tmp'", ",", "'{0}'", ".", "format", "(", "uuid", ".", "uuid4", "(", ")", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "fpd_", ")", ")", ":", "os", ".", "makedirs", "(", "fpd_", ")", "os", ".", "chmod", "(", "fpd_", ",", "448", ")", "fp_", "=", "os", ".", "path", ".", "join", "(", "fpd_", ",", "os", ".", "path", ".", "basename", "(", "bs_", ")", ")", "shutil", ".", "copy", "(", "bs_", ",", "fp_", ")", "tmppath", "=", "fpd_", ".", "replace", "(", "mpt", ",", "''", ")", "return", "(", "fp_", ",", "tmppath", ")" ]
update and get the random script to a random place cli example: .
train
true
44,764
def tiers_for_dev(dev): t1 = dev['region'] t2 = dev['zone'] t3 = dev['ip'] t4 = dev['id'] return ((t1,), (t1, t2), (t1, t2, t3), (t1, t2, t3, t4))
[ "def", "tiers_for_dev", "(", "dev", ")", ":", "t1", "=", "dev", "[", "'region'", "]", "t2", "=", "dev", "[", "'zone'", "]", "t3", "=", "dev", "[", "'ip'", "]", "t4", "=", "dev", "[", "'id'", "]", "return", "(", "(", "t1", ",", ")", ",", "(", "t1", ",", "t2", ")", ",", "(", "t1", ",", "t2", ",", "t3", ")", ",", "(", "t1", ",", "t2", ",", "t3", ",", "t4", ")", ")" ]
returns a tuple of tiers for a given device in ascending order by length .
train
false
44,765
def is_pull_request(issue): return bool(issue.get('pull_request', {}).get('html_url', None))
[ "def", "is_pull_request", "(", "issue", ")", ":", "return", "bool", "(", "issue", ".", "get", "(", "'pull_request'", ",", "{", "}", ")", ".", "get", "(", "'html_url'", ",", "None", ")", ")" ]
return true if the given issue is a pull request .
train
false
44,767
def _get_flim(flim, fscale, freq, sfreq=None): if (flim is None): if (freq is None): flim = [(0.1 if (fscale == 'log') else 0.0), (sfreq / 2.0)] else: if (fscale == 'linear'): flim = [freq[0]] else: flim = [(freq[0] if (freq[0] > 0) else (0.1 * freq[1]))] flim += [freq[(-1)]] if (fscale == 'log'): if (flim[0] <= 0): raise ValueError(('flim[0] must be positive, got %s' % flim[0])) elif (flim[0] < 0): raise ValueError(('flim[0] must be non-negative, got %s' % flim[0])) return flim
[ "def", "_get_flim", "(", "flim", ",", "fscale", ",", "freq", ",", "sfreq", "=", "None", ")", ":", "if", "(", "flim", "is", "None", ")", ":", "if", "(", "freq", "is", "None", ")", ":", "flim", "=", "[", "(", "0.1", "if", "(", "fscale", "==", "'log'", ")", "else", "0.0", ")", ",", "(", "sfreq", "/", "2.0", ")", "]", "else", ":", "if", "(", "fscale", "==", "'linear'", ")", ":", "flim", "=", "[", "freq", "[", "0", "]", "]", "else", ":", "flim", "=", "[", "(", "freq", "[", "0", "]", "if", "(", "freq", "[", "0", "]", ">", "0", ")", "else", "(", "0.1", "*", "freq", "[", "1", "]", ")", ")", "]", "flim", "+=", "[", "freq", "[", "(", "-", "1", ")", "]", "]", "if", "(", "fscale", "==", "'log'", ")", ":", "if", "(", "flim", "[", "0", "]", "<=", "0", ")", ":", "raise", "ValueError", "(", "(", "'flim[0] must be positive, got %s'", "%", "flim", "[", "0", "]", ")", ")", "elif", "(", "flim", "[", "0", "]", "<", "0", ")", ":", "raise", "ValueError", "(", "(", "'flim[0] must be non-negative, got %s'", "%", "flim", "[", "0", "]", ")", ")", "return", "flim" ]
get reasonable frequency limits .
train
false
44,769
def scipy_sparse_to_sympy(m, **options): return Matrix(m.todense())
[ "def", "scipy_sparse_to_sympy", "(", "m", ",", "**", "options", ")", ":", "return", "Matrix", "(", "m", ".", "todense", "(", ")", ")" ]
convert a scipy .
train
false
44,771
@treeio_login_required @handle_response_format def subscription_edit(request, subscription_id, response_format='html'): subscription = get_object_or_404(Subscription, pk=subscription_id) if ((not request.user.profile.has_permission(subscription, mode='w')) and (not request.user.profile.is_admin('treeio.sales'))): return user_denied(request, "You don't have access to this Subscription", response_format) if request.POST: form = SubscriptionForm(request.user.profile, request.POST, instance=subscription) else: form = SubscriptionForm(request.user.profile, instance=subscription) if form.is_valid(): subscription = form.save() return HttpResponseRedirect(reverse('sales_subscription_view', args=[subscription.id])) return render_to_response('sales/subscription_edit', {'form': form, 'subscription': subscription}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "subscription_edit", "(", "request", ",", "subscription_id", ",", "response_format", "=", "'html'", ")", ":", "subscription", "=", "get_object_or_404", "(", "Subscription", ",", "pk", "=", "subscription_id", ")", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "subscription", ",", "mode", "=", "'w'", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio.sales'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Subscription\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "form", "=", "SubscriptionForm", "(", "request", ".", "user", ".", "profile", ",", "request", ".", "POST", ",", "instance", "=", "subscription", ")", "else", ":", "form", "=", "SubscriptionForm", "(", "request", ".", "user", ".", "profile", ",", "instance", "=", "subscription", ")", "if", "form", ".", "is_valid", "(", ")", ":", "subscription", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'sales_subscription_view'", ",", "args", "=", "[", "subscription", ".", "id", "]", ")", ")", "return", "render_to_response", "(", "'sales/subscription_edit'", ",", "{", "'form'", ":", "form", ",", "'subscription'", ":", "subscription", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
subscription edit .
train
false
44,772
def ignore_not_package_admin(key, data, errors, context): model = context['model'] user = context.get('user') if ('ignore_auth' in context): return if (user and authz.is_sysadmin(user)): return authorized = False pkg = context.get('package') if pkg: try: logic.check_access('package_change_state', context) authorized = True except logic.NotAuthorized: authorized = False if (user and pkg and authorized): return if ((key == ('state',)) and context.get('allow_state_change')): return data.pop(key)
[ "def", "ignore_not_package_admin", "(", "key", ",", "data", ",", "errors", ",", "context", ")", ":", "model", "=", "context", "[", "'model'", "]", "user", "=", "context", ".", "get", "(", "'user'", ")", "if", "(", "'ignore_auth'", "in", "context", ")", ":", "return", "if", "(", "user", "and", "authz", ".", "is_sysadmin", "(", "user", ")", ")", ":", "return", "authorized", "=", "False", "pkg", "=", "context", ".", "get", "(", "'package'", ")", "if", "pkg", ":", "try", ":", "logic", ".", "check_access", "(", "'package_change_state'", ",", "context", ")", "authorized", "=", "True", "except", "logic", ".", "NotAuthorized", ":", "authorized", "=", "False", "if", "(", "user", "and", "pkg", "and", "authorized", ")", ":", "return", "if", "(", "(", "key", "==", "(", "'state'", ",", ")", ")", "and", "context", ".", "get", "(", "'allow_state_change'", ")", ")", ":", "return", "data", ".", "pop", "(", "key", ")" ]
ignore if the user is not allowed to administer the package specified .
train
false
44,774
def jres_id(n): return (chr((ord('A') + ((n - 1) % 25))) * ((n / 26) + 1))
[ "def", "jres_id", "(", "n", ")", ":", "return", "(", "chr", "(", "(", "ord", "(", "'A'", ")", "+", "(", "(", "n", "-", "1", ")", "%", "25", ")", ")", ")", "*", "(", "(", "n", "/", "26", ")", "+", "1", ")", ")" ]
joint res ids go from a-z .
train
false
44,775
def nvgre(version=0, vsid=0, flow_id=0): return gre(version=version, protocol=ether_types.ETH_TYPE_TEB, vsid=vsid, flow_id=flow_id)
[ "def", "nvgre", "(", "version", "=", "0", ",", "vsid", "=", "0", ",", "flow_id", "=", "0", ")", ":", "return", "gre", "(", "version", "=", "version", ",", "protocol", "=", "ether_types", ".", "ETH_TYPE_TEB", ",", "vsid", "=", "vsid", ",", "flow_id", "=", "flow_id", ")" ]
generate instance of gre class with information for nvgre .
train
true
44,777
def idzr_aid(A, k): A = np.asfortranarray(A) (m, n) = A.shape w = idzr_aidi(m, n, k) (idx, proj) = _id.idzr_aid(A, k, w) if (k == n): proj = np.array([], dtype='complex128', order='F') else: proj = proj.reshape((k, (n - k)), order='F') return (idx, proj)
[ "def", "idzr_aid", "(", "A", ",", "k", ")", ":", "A", "=", "np", ".", "asfortranarray", "(", "A", ")", "(", "m", ",", "n", ")", "=", "A", ".", "shape", "w", "=", "idzr_aidi", "(", "m", ",", "n", ",", "k", ")", "(", "idx", ",", "proj", ")", "=", "_id", ".", "idzr_aid", "(", "A", ",", "k", ",", "w", ")", "if", "(", "k", "==", "n", ")", ":", "proj", "=", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "'complex128'", ",", "order", "=", "'F'", ")", "else", ":", "proj", "=", "proj", ".", "reshape", "(", "(", "k", ",", "(", "n", "-", "k", ")", ")", ",", "order", "=", "'F'", ")", "return", "(", "idx", ",", "proj", ")" ]
compute id of a complex matrix to a specified rank using random sampling .
train
false
44,778
def isClassAdvisor(ob): return (isinstance(ob, FunctionType) and hasattr(ob, 'previousMetaclass'))
[ "def", "isClassAdvisor", "(", "ob", ")", ":", "return", "(", "isinstance", "(", "ob", ",", "FunctionType", ")", "and", "hasattr", "(", "ob", ",", "'previousMetaclass'", ")", ")" ]
true if ob is a class advisor function .
train
false
44,779
def test_setup_py_with_dos_line_endings(script, data): to_install = data.packages.join('LineEndings') script.pip('install', to_install, expect_error=False)
[ "def", "test_setup_py_with_dos_line_endings", "(", "script", ",", "data", ")", ":", "to_install", "=", "data", ".", "packages", ".", "join", "(", "'LineEndings'", ")", "script", ".", "pip", "(", "'install'", ",", "to_install", ",", "expect_error", "=", "False", ")" ]
it doesnt choke on a setup .
train
false
44,780
def _to_bytes(value, encoding='ascii'): result = (value.encode(encoding) if isinstance(value, six.text_type) else value) if isinstance(result, six.binary_type): return result else: raise TypeError(('%r could not be converted to bytes' % (value,)))
[ "def", "_to_bytes", "(", "value", ",", "encoding", "=", "'ascii'", ")", ":", "result", "=", "(", "value", ".", "encode", "(", "encoding", ")", "if", "isinstance", "(", "value", ",", "six", ".", "text_type", ")", "else", "value", ")", "if", "isinstance", "(", "result", ",", "six", ".", "binary_type", ")", ":", "return", "result", "else", ":", "raise", "TypeError", "(", "(", "'%r could not be converted to bytes'", "%", "(", "value", ",", ")", ")", ")" ]
converts a string value to bytes .
train
true
44,781
def RPCVariation(reqsummary, rpcsummaries): rpc_variation = [] markers = [0.1, 0.25, 0.75, 0.9] percentiles = _GetPercentileList(reqsummary.totaltimes, markers) percentiles.insert(0, 'Total') rpc_variation.append(percentiles) percentiles = _GetPercentileList(reqsummary.totalrpctimes, markers) percentiles.insert(0, 'TotalRPCTime') rpc_variation.append(percentiles) for pair in rpcsummaries: percentiles = _GetPercentileList(pair[1].times, markers) percentiles.insert(0, pair[0]) rpc_variation.append(percentiles) return rpc_variation
[ "def", "RPCVariation", "(", "reqsummary", ",", "rpcsummaries", ")", ":", "rpc_variation", "=", "[", "]", "markers", "=", "[", "0.1", ",", "0.25", ",", "0.75", ",", "0.9", "]", "percentiles", "=", "_GetPercentileList", "(", "reqsummary", ".", "totaltimes", ",", "markers", ")", "percentiles", ".", "insert", "(", "0", ",", "'Total'", ")", "rpc_variation", ".", "append", "(", "percentiles", ")", "percentiles", "=", "_GetPercentileList", "(", "reqsummary", ".", "totalrpctimes", ",", "markers", ")", "percentiles", ".", "insert", "(", "0", ",", "'TotalRPCTime'", ")", "rpc_variation", ".", "append", "(", "percentiles", ")", "for", "pair", "in", "rpcsummaries", ":", "percentiles", "=", "_GetPercentileList", "(", "pair", "[", "1", "]", ".", "times", ",", "markers", ")", "percentiles", ".", "insert", "(", "0", ",", "pair", "[", "0", "]", ")", "rpc_variation", ".", "append", "(", "percentiles", ")", "return", "rpc_variation" ]
generates desired percentiles of times spent in each rpc .
train
false
44,782
def os_constant(key): os_info = util.get_os_info() try: constants = CLI_DEFAULTS[os_info[0].lower()] except KeyError: constants = os_like_constants() if (not constants): constants = CLI_DEFAULTS['default'] return constants[key]
[ "def", "os_constant", "(", "key", ")", ":", "os_info", "=", "util", ".", "get_os_info", "(", ")", "try", ":", "constants", "=", "CLI_DEFAULTS", "[", "os_info", "[", "0", "]", ".", "lower", "(", ")", "]", "except", "KeyError", ":", "constants", "=", "os_like_constants", "(", ")", "if", "(", "not", "constants", ")", ":", "constants", "=", "CLI_DEFAULTS", "[", "'default'", "]", "return", "constants", "[", "key", "]" ]
get a constant value for operating system .
train
false
44,784
@simple_tag def get_providers(): return providers.registry.get_list()
[ "@", "simple_tag", "def", "get_providers", "(", ")", ":", "return", "providers", ".", "registry", ".", "get_list", "(", ")" ]
returns a list of social authentication providers .
train
false
44,785
def get_default_view_plugins(get_datastore_views=False): if (config.get('ckan.views.default_views') is None): default_view_types = DEFAULT_RESOURCE_VIEW_TYPES else: default_view_types = config.get('ckan.views.default_views').split() default_view_plugins = [] for view_type in default_view_types: view_plugin = get_view_plugin(view_type) if (not view_plugin): log.warn('Plugin for view {0} could not be found'.format(view_type)) continue info = view_plugin.info() plugin_requires_datastore = info.get('requires_datastore', False) if (plugin_requires_datastore == get_datastore_views): default_view_plugins.append(view_plugin) return default_view_plugins
[ "def", "get_default_view_plugins", "(", "get_datastore_views", "=", "False", ")", ":", "if", "(", "config", ".", "get", "(", "'ckan.views.default_views'", ")", "is", "None", ")", ":", "default_view_types", "=", "DEFAULT_RESOURCE_VIEW_TYPES", "else", ":", "default_view_types", "=", "config", ".", "get", "(", "'ckan.views.default_views'", ")", ".", "split", "(", ")", "default_view_plugins", "=", "[", "]", "for", "view_type", "in", "default_view_types", ":", "view_plugin", "=", "get_view_plugin", "(", "view_type", ")", "if", "(", "not", "view_plugin", ")", ":", "log", ".", "warn", "(", "'Plugin for view {0} could not be found'", ".", "format", "(", "view_type", ")", ")", "continue", "info", "=", "view_plugin", ".", "info", "(", ")", "plugin_requires_datastore", "=", "info", ".", "get", "(", "'requires_datastore'", ",", "False", ")", "if", "(", "plugin_requires_datastore", "==", "get_datastore_views", ")", ":", "default_view_plugins", ".", "append", "(", "view_plugin", ")", "return", "default_view_plugins" ]
returns the list of view plugins to be created by default on new resources the default view types are defined via the ckan .
train
false
44,786
@control_command(args=[(u'n', int)], signature=u'[N=1]') def pool_grow(state, n=1, **kwargs): if state.consumer.controller.autoscaler: state.consumer.controller.autoscaler.force_scale_up(n) else: state.consumer.pool.grow(n) state.consumer._update_prefetch_count(n) return ok(u'pool will grow')
[ "@", "control_command", "(", "args", "=", "[", "(", "u'n'", ",", "int", ")", "]", ",", "signature", "=", "u'[N=1]'", ")", "def", "pool_grow", "(", "state", ",", "n", "=", "1", ",", "**", "kwargs", ")", ":", "if", "state", ".", "consumer", ".", "controller", ".", "autoscaler", ":", "state", ".", "consumer", ".", "controller", ".", "autoscaler", ".", "force_scale_up", "(", "n", ")", "else", ":", "state", ".", "consumer", ".", "pool", ".", "grow", "(", "n", ")", "state", ".", "consumer", ".", "_update_prefetch_count", "(", "n", ")", "return", "ok", "(", "u'pool will grow'", ")" ]
grow pool by n processes/threads .
train
false
44,787
def reverse_enumerate(l): return izip(xrange((len(l) - 1), (-1), (-1)), reversed(l))
[ "def", "reverse_enumerate", "(", "l", ")", ":", "return", "izip", "(", "xrange", "(", "(", "len", "(", "l", ")", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ",", "reversed", "(", "l", ")", ")" ]
like enumerate but in the other sens usage:: .
train
false
44,788
def setup_backoff(config): def on_new_response(event): backoff = config.registry.settings['backoff'] if (backoff is not None): backoff = utils.encode_header(('%s' % backoff)) event.response.headers['Backoff'] = backoff config.add_subscriber(on_new_response, NewResponse)
[ "def", "setup_backoff", "(", "config", ")", ":", "def", "on_new_response", "(", "event", ")", ":", "backoff", "=", "config", ".", "registry", ".", "settings", "[", "'backoff'", "]", "if", "(", "backoff", "is", "not", "None", ")", ":", "backoff", "=", "utils", ".", "encode_header", "(", "(", "'%s'", "%", "backoff", ")", ")", "event", ".", "response", ".", "headers", "[", "'Backoff'", "]", "=", "backoff", "config", ".", "add_subscriber", "(", "on_new_response", ",", "NewResponse", ")" ]
attach http requests/responses objects .
train
false
44,789
def from_delayed(values): from dask.delayed import Delayed if isinstance(values, Delayed): values = [values] dsk = merge((v.dask for v in values)) name = ('bag-from-delayed-' + tokenize(*values)) names = [(name, i) for i in range(len(values))] values = [v.key for v in values] dsk2 = dict(zip(names, values)) return Bag(merge(dsk, dsk2), name, len(values))
[ "def", "from_delayed", "(", "values", ")", ":", "from", "dask", ".", "delayed", "import", "Delayed", "if", "isinstance", "(", "values", ",", "Delayed", ")", ":", "values", "=", "[", "values", "]", "dsk", "=", "merge", "(", "(", "v", ".", "dask", "for", "v", "in", "values", ")", ")", "name", "=", "(", "'bag-from-delayed-'", "+", "tokenize", "(", "*", "values", ")", ")", "names", "=", "[", "(", "name", ",", "i", ")", "for", "i", "in", "range", "(", "len", "(", "values", ")", ")", "]", "values", "=", "[", "v", ".", "key", "for", "v", "in", "values", "]", "dsk2", "=", "dict", "(", "zip", "(", "names", ",", "values", ")", ")", "return", "Bag", "(", "merge", "(", "dsk", ",", "dsk2", ")", ",", "name", ",", "len", "(", "values", ")", ")" ]
create bag from many dask .
train
false
44,795
@statfunc def autocov(x, lag=1): x = np.asarray(x) if (not lag): return 1 if (lag < 0): raise ValueError('Autocovariance lag must be a positive integer') return np.cov(x[:(- lag)], x[lag:], bias=1)
[ "@", "statfunc", "def", "autocov", "(", "x", ",", "lag", "=", "1", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "if", "(", "not", "lag", ")", ":", "return", "1", "if", "(", "lag", "<", "0", ")", ":", "raise", "ValueError", "(", "'Autocovariance lag must be a positive integer'", ")", "return", "np", ".", "cov", "(", "x", "[", ":", "(", "-", "lag", ")", "]", ",", "x", "[", "lag", ":", "]", ",", "bias", "=", "1", ")" ]
sample autocovariance at specified lag .
train
false
44,796
@pytest.mark.network def test_git_with_ambiguous_revs(script): version_pkg_path = _create_test_package(script) package_url = ('git+file://%s@0.1#egg=version_pkg' % version_pkg_path.abspath.replace('\\', '/')) script.run('git', 'tag', '0.1', cwd=version_pkg_path) result = script.pip('install', '-e', package_url) assert ('Could not find a tag or branch' not in result.stdout) result.assert_installed('version-pkg', with_files=['.git'])
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_git_with_ambiguous_revs", "(", "script", ")", ":", "version_pkg_path", "=", "_create_test_package", "(", "script", ")", "package_url", "=", "(", "'git+file://%s@0.1#egg=version_pkg'", "%", "version_pkg_path", ".", "abspath", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", ")", "script", ".", "run", "(", "'git'", ",", "'tag'", ",", "'0.1'", ",", "cwd", "=", "version_pkg_path", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-e'", ",", "package_url", ")", "assert", "(", "'Could not find a tag or branch'", "not", "in", "result", ".", "stdout", ")", "result", ".", "assert_installed", "(", "'version-pkg'", ",", "with_files", "=", "[", "'.git'", "]", ")" ]
test git with two "names" pointing to the same commit .
train
false
44,797
def proc_exists(pid): if (not os.path.exists(('/proc/%s' % pid))): raise NoSuchProcess()
[ "def", "proc_exists", "(", "pid", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "(", "'/proc/%s'", "%", "pid", ")", ")", ")", ":", "raise", "NoSuchProcess", "(", ")" ]
check the a pid is registered in /proc raise nosuchprocess exception if not .
train
false
44,801
def winTime(): return (systime.clock() + START_TIME)
[ "def", "winTime", "(", ")", ":", "return", "(", "systime", ".", "clock", "(", ")", "+", "START_TIME", ")" ]
return the current time in seconds with high precision (windows version .
train
false
44,802
def base64_decodefile(instr, outfile): encoded_f = StringIO.StringIO(instr) with open(outfile, 'wb') as f: base64.decode(encoded_f, f) return True
[ "def", "base64_decodefile", "(", "instr", ",", "outfile", ")", ":", "encoded_f", "=", "StringIO", ".", "StringIO", "(", "instr", ")", "with", "open", "(", "outfile", ",", "'wb'", ")", "as", "f", ":", "base64", ".", "decode", "(", "encoded_f", ",", "f", ")", "return", "True" ]
decode a base64-encoded string and write the result to a file .
train
false
44,803
def get_oauth_request(request): headers = {} if ('HTTP_AUTHORIZATION' in request.META): headers['Authorization'] = request.META['HTTP_AUTHORIZATION'] return oauth.Request.from_request(request.method, request.build_absolute_uri(request.path), headers, dict(request.REQUEST))
[ "def", "get_oauth_request", "(", "request", ")", ":", "headers", "=", "{", "}", "if", "(", "'HTTP_AUTHORIZATION'", "in", "request", ".", "META", ")", ":", "headers", "[", "'Authorization'", "]", "=", "request", ".", "META", "[", "'HTTP_AUTHORIZATION'", "]", "return", "oauth", ".", "Request", ".", "from_request", "(", "request", ".", "method", ",", "request", ".", "build_absolute_uri", "(", "request", ".", "path", ")", ",", "headers", ",", "dict", "(", "request", ".", "REQUEST", ")", ")" ]
converts a django request object into an oauth2 .
train
false
44,805
def _finish_concat(info, data, events, event_id, tmin, tmax, baseline, selection, drop_log, verbose): events[:, 0] = np.arange(len(events)) selection = np.where([(len(d) == 0) for d in drop_log])[0] out = BaseEpochs(info, data, events, event_id, tmin, tmax, baseline=baseline, selection=selection, drop_log=drop_log, proj=False, on_missing='ignore', verbose=verbose) out.drop_bad() return out
[ "def", "_finish_concat", "(", "info", ",", "data", ",", "events", ",", "event_id", ",", "tmin", ",", "tmax", ",", "baseline", ",", "selection", ",", "drop_log", ",", "verbose", ")", ":", "events", "[", ":", ",", "0", "]", "=", "np", ".", "arange", "(", "len", "(", "events", ")", ")", "selection", "=", "np", ".", "where", "(", "[", "(", "len", "(", "d", ")", "==", "0", ")", "for", "d", "in", "drop_log", "]", ")", "[", "0", "]", "out", "=", "BaseEpochs", "(", "info", ",", "data", ",", "events", ",", "event_id", ",", "tmin", ",", "tmax", ",", "baseline", "=", "baseline", ",", "selection", "=", "selection", ",", "drop_log", "=", "drop_log", ",", "proj", "=", "False", ",", "on_missing", "=", "'ignore'", ",", "verbose", "=", "verbose", ")", "out", ".", "drop_bad", "(", ")", "return", "out" ]
helper to finish concatenation for epochs not read from disk .
train
false
44,807
def log_warn_only(): coloredlogs.install(level='WARNING', level_styles={'warn': {'color': 'yellow'}, 'error': {'color': 'red', 'bold': True}}, fmt='%(message)s', isatty=True)
[ "def", "log_warn_only", "(", ")", ":", "coloredlogs", ".", "install", "(", "level", "=", "'WARNING'", ",", "level_styles", "=", "{", "'warn'", ":", "{", "'color'", ":", "'yellow'", "}", ",", "'error'", ":", "{", "'color'", ":", "'red'", ",", "'bold'", ":", "True", "}", "}", ",", "fmt", "=", "'%(message)s'", ",", "isatty", "=", "True", ")" ]
drop to warning level and down to get around gen .
train
false
44,808
def var_mock(request, q_var_name, **kwargs): _patch = patch(q_var_name, **kwargs) request.addfinalizer(_patch.stop) return _patch.start()
[ "def", "var_mock", "(", "request", ",", "q_var_name", ",", "**", "kwargs", ")", ":", "_patch", "=", "patch", "(", "q_var_name", ",", "**", "kwargs", ")", "request", ".", "addfinalizer", "(", "_patch", ".", "stop", ")", "return", "_patch", ".", "start", "(", ")" ]
return a mock patching the variable with qualified name *q_var_name* .
train
false
44,809
def instance_str(instance): return state_str(instance_state(instance))
[ "def", "instance_str", "(", "instance", ")", ":", "return", "state_str", "(", "instance_state", "(", "instance", ")", ")" ]
return a string describing an instance .
train
false
44,811
def unabc(msg): if isinstance(msg, type): return _unabc(msg) else: return partial(_unabc, msg=msg)
[ "def", "unabc", "(", "msg", ")", ":", "if", "isinstance", "(", "msg", ",", "type", ")", ":", "return", "_unabc", "(", "msg", ")", "else", ":", "return", "partial", "(", "_unabc", ",", "msg", "=", "msg", ")" ]
add dummy methods to a class to satisfy abstract base class constraints .
train
false
44,813
def _validate_input(dist_matrix_header, dist_matrix, mapping_header, mapping, field): if ((dist_matrix_header is None) or (dist_matrix is None) or (mapping_header is None) or (mapping is None) or (field is None)): raise ValueError("The input(s) cannot be 'None'.") for input_arg in (dist_matrix_header, dist_matrix, mapping_header, mapping): try: iter(input_arg) except: raise ValueError('The headers, distance matrix, and mapping data must be iterable.') if (not isinstance(field, str)): raise ValueError('The field must be a string.') if (field not in mapping_header): raise ValueError(("The field '%s' is not in the mapping file header." % field)) if (not (set(zip(*mapping)[0]) & set(dist_matrix_header))): raise ValueError('The mapping file does not share at least one sample with the distance matrix.')
[ "def", "_validate_input", "(", "dist_matrix_header", ",", "dist_matrix", ",", "mapping_header", ",", "mapping", ",", "field", ")", ":", "if", "(", "(", "dist_matrix_header", "is", "None", ")", "or", "(", "dist_matrix", "is", "None", ")", "or", "(", "mapping_header", "is", "None", ")", "or", "(", "mapping", "is", "None", ")", "or", "(", "field", "is", "None", ")", ")", ":", "raise", "ValueError", "(", "\"The input(s) cannot be 'None'.\"", ")", "for", "input_arg", "in", "(", "dist_matrix_header", ",", "dist_matrix", ",", "mapping_header", ",", "mapping", ")", ":", "try", ":", "iter", "(", "input_arg", ")", "except", ":", "raise", "ValueError", "(", "'The headers, distance matrix, and mapping data must be iterable.'", ")", "if", "(", "not", "isinstance", "(", "field", ",", "str", ")", ")", ":", "raise", "ValueError", "(", "'The field must be a string.'", ")", "if", "(", "field", "not", "in", "mapping_header", ")", ":", "raise", "ValueError", "(", "(", "\"The field '%s' is not in the mapping file header.\"", "%", "field", ")", ")", "if", "(", "not", "(", "set", "(", "zip", "(", "*", "mapping", ")", "[", "0", "]", ")", "&", "set", "(", "dist_matrix_header", ")", ")", ")", ":", "raise", "ValueError", "(", "'The mapping file does not share at least one sample with the distance matrix.'", ")" ]
validates the input data to make sure it can be used and makes sense .
train
false
44,814
def _raise_if_updates_provider_attributes(attrs): if any((validators.is_attr_set(attrs.get(a)) for a in ATTRIBUTES)): msg = _('Plugin does not support updating provider attributes') raise n_exc.InvalidInput(error_message=msg)
[ "def", "_raise_if_updates_provider_attributes", "(", "attrs", ")", ":", "if", "any", "(", "(", "validators", ".", "is_attr_set", "(", "attrs", ".", "get", "(", "a", ")", ")", "for", "a", "in", "ATTRIBUTES", ")", ")", ":", "msg", "=", "_", "(", "'Plugin does not support updating provider attributes'", ")", "raise", "n_exc", ".", "InvalidInput", "(", "error_message", "=", "msg", ")" ]
raise exception if provider attributes are present .
train
false
44,816
def desktop(): return app().desktop()
[ "def", "desktop", "(", ")", ":", "return", "app", "(", ")", ".", "desktop", "(", ")" ]
return the desktop .
train
false
44,817
@domain_constructor() def coin_flip(): return {'loss': hp.choice('flip', [0.0, 1.0]), 'status': base.STATUS_OK}
[ "@", "domain_constructor", "(", ")", "def", "coin_flip", "(", ")", ":", "return", "{", "'loss'", ":", "hp", ".", "choice", "(", "'flip'", ",", "[", "0.0", ",", "1.0", "]", ")", ",", "'status'", ":", "base", ".", "STATUS_OK", "}" ]
possibly the simplest possible bandit implementation .
train
false
44,818
def test_simple_class_based_method_view(): @hug.object.http_methods() class EndPoint(object, ): def get(self): return 'hi there!' def post(self): return 'bye' assert (hug.test.get(api, 'endpoint').data == 'hi there!') assert (hug.test.post(api, 'endpoint').data == 'bye')
[ "def", "test_simple_class_based_method_view", "(", ")", ":", "@", "hug", ".", "object", ".", "http_methods", "(", ")", "class", "EndPoint", "(", "object", ",", ")", ":", "def", "get", "(", "self", ")", ":", "return", "'hi there!'", "def", "post", "(", "self", ")", ":", "return", "'bye'", "assert", "(", "hug", ".", "test", ".", "get", "(", "api", ",", "'endpoint'", ")", ".", "data", "==", "'hi there!'", ")", "assert", "(", "hug", ".", "test", ".", "post", "(", "api", ",", "'endpoint'", ")", ".", "data", "==", "'bye'", ")" ]
test creating class based routers using method mappings .
train
false
44,819
def exec_sorted(statement, *args, **kw): return sorted([tuple(row) for row in statement.execute(*args, **kw).fetchall()])
[ "def", "exec_sorted", "(", "statement", ",", "*", "args", ",", "**", "kw", ")", ":", "return", "sorted", "(", "[", "tuple", "(", "row", ")", "for", "row", "in", "statement", ".", "execute", "(", "*", "args", ",", "**", "kw", ")", ".", "fetchall", "(", ")", "]", ")" ]
executes a statement and returns a sorted list plain tuple rows .
train
false
44,820
def pts_to_poststep(x, *args): steps = np.zeros(((1 + len(args)), ((2 * len(x)) - 1))) steps[0, 0::2] = x steps[0, 1::2] = steps[0, 2::2] steps[1:, 0::2] = args steps[1:, 1::2] = steps[1:, 0:(-2):2] return steps
[ "def", "pts_to_poststep", "(", "x", ",", "*", "args", ")", ":", "steps", "=", "np", ".", "zeros", "(", "(", "(", "1", "+", "len", "(", "args", ")", ")", ",", "(", "(", "2", "*", "len", "(", "x", ")", ")", "-", "1", ")", ")", ")", "steps", "[", "0", ",", "0", ":", ":", "2", "]", "=", "x", "steps", "[", "0", ",", "1", ":", ":", "2", "]", "=", "steps", "[", "0", ",", "2", ":", ":", "2", "]", "steps", "[", "1", ":", ",", "0", ":", ":", "2", "]", "=", "args", "steps", "[", "1", ":", ",", "1", ":", ":", "2", "]", "=", "steps", "[", "1", ":", ",", "0", ":", "(", "-", "2", ")", ":", "2", "]", "return", "steps" ]
convert continuous line to post-steps .
train
false
44,821
def describe_launch_configuration(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: lc = conn.get_all_launch_configurations(names=[name]) if lc: return lc[0] else: msg = 'The launch configuration does not exist in region {0}'.format(region) log.debug(msg) return None except boto.exception.BotoServerError as e: log.error(e) return None
[ "def", "describe_launch_configuration", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "lc", "=", "conn", ".", "get_all_launch_configurations", "(", "names", "=", "[", "name", "]", ")", "if", "lc", ":", "return", "lc", "[", "0", "]", "else", ":", "msg", "=", "'The launch configuration does not exist in region {0}'", ".", "format", "(", "region", ")", "log", ".", "debug", "(", "msg", ")", "return", "None", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "None" ]
dump details of a given launch configuration .
train
false
44,822
def _fill_mi_header(row, control_row): last = row[0] for i in range(1, len(row)): if (not control_row[i]): last = row[i] if ((row[i] == '') or (row[i] is None)): row[i] = last else: control_row[i] = False last = row[i] return (row, control_row)
[ "def", "_fill_mi_header", "(", "row", ",", "control_row", ")", ":", "last", "=", "row", "[", "0", "]", "for", "i", "in", "range", "(", "1", ",", "len", "(", "row", ")", ")", ":", "if", "(", "not", "control_row", "[", "i", "]", ")", ":", "last", "=", "row", "[", "i", "]", "if", "(", "(", "row", "[", "i", "]", "==", "''", ")", "or", "(", "row", "[", "i", "]", "is", "None", ")", ")", ":", "row", "[", "i", "]", "=", "last", "else", ":", "control_row", "[", "i", "]", "=", "False", "last", "=", "row", "[", "i", "]", "return", "(", "row", ",", "control_row", ")" ]
forward fills blank entries in row .
train
true
44,823
def is_dvr_serviced(device_owner): return (device_owner.startswith(n_const.DEVICE_OWNER_COMPUTE_PREFIX) or (device_owner in get_other_dvr_serviced_device_owners()))
[ "def", "is_dvr_serviced", "(", "device_owner", ")", ":", "return", "(", "device_owner", ".", "startswith", "(", "n_const", ".", "DEVICE_OWNER_COMPUTE_PREFIX", ")", "or", "(", "device_owner", "in", "get_other_dvr_serviced_device_owners", "(", ")", ")", ")" ]
check if the port need to be serviced by dvr helper function to check the device owners of the ports in the compute and service node to make sure if they are required for dvr or any service directly or indirectly associated with dvr .
train
false
44,824
def RunMapForKinds(operation_key, kinds, job_name_template, handler_spec, reader_spec, writer_spec, mapper_params, mapreduce_params=None, queue_name=None, max_shard_count=None): jobs = [] try: for kind in kinds: mapper_params['entity_kind'] = kind job_name = (job_name_template % {'kind': kind, 'namespace': mapper_params.get('namespace', '')}) shard_count = GetShardCount(kind, max_shard_count) jobs.append(StartMap(operation_key, job_name, handler_spec, reader_spec, writer_spec, mapper_params, mapreduce_params, queue_name=queue_name, shard_count=shard_count)) return jobs except BaseException as ex: AbortAdminOperation(operation_key, _status=DatastoreAdminOperation.STATUS_FAILED, _status_info=('%s: %s' % (ex.__class__.__name__, ex))) raise
[ "def", "RunMapForKinds", "(", "operation_key", ",", "kinds", ",", "job_name_template", ",", "handler_spec", ",", "reader_spec", ",", "writer_spec", ",", "mapper_params", ",", "mapreduce_params", "=", "None", ",", "queue_name", "=", "None", ",", "max_shard_count", "=", "None", ")", ":", "jobs", "=", "[", "]", "try", ":", "for", "kind", "in", "kinds", ":", "mapper_params", "[", "'entity_kind'", "]", "=", "kind", "job_name", "=", "(", "job_name_template", "%", "{", "'kind'", ":", "kind", ",", "'namespace'", ":", "mapper_params", ".", "get", "(", "'namespace'", ",", "''", ")", "}", ")", "shard_count", "=", "GetShardCount", "(", "kind", ",", "max_shard_count", ")", "jobs", ".", "append", "(", "StartMap", "(", "operation_key", ",", "job_name", ",", "handler_spec", ",", "reader_spec", ",", "writer_spec", ",", "mapper_params", ",", "mapreduce_params", ",", "queue_name", "=", "queue_name", ",", "shard_count", "=", "shard_count", ")", ")", "return", "jobs", "except", "BaseException", "as", "ex", ":", "AbortAdminOperation", "(", "operation_key", ",", "_status", "=", "DatastoreAdminOperation", ".", "STATUS_FAILED", ",", "_status_info", "=", "(", "'%s: %s'", "%", "(", "ex", ".", "__class__", ".", "__name__", ",", "ex", ")", ")", ")", "raise" ]
run mapper job for all entities in specified kinds .
train
false
44,825
def current_time_ms(): now = datetime.datetime.utcnow() new_microsecond = (int((now.microsecond / 1000)) * 1000) return now.replace(microsecond=new_microsecond)
[ "def", "current_time_ms", "(", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "new_microsecond", "=", "(", "int", "(", "(", "now", ".", "microsecond", "/", "1000", ")", ")", "*", "1000", ")", "return", "now", ".", "replace", "(", "microsecond", "=", "new_microsecond", ")" ]
gets the current time with millisecond precision .
train
false
44,826
def _is_counter_log4j_record(record): return bool(_INDENTED_COUNTERS_MESSAGE_RE.match(record['message']))
[ "def", "_is_counter_log4j_record", "(", "record", ")", ":", "return", "bool", "(", "_INDENTED_COUNTERS_MESSAGE_RE", ".", "match", "(", "record", "[", "'message'", "]", ")", ")" ]
is this the record containing counters? .
train
false
44,827
def onlylib(*libs): def set_libs(function): if libs: function.LIBS = libs return function return set_libs
[ "def", "onlylib", "(", "*", "libs", ")", ":", "def", "set_libs", "(", "function", ")", ":", "if", "libs", ":", "function", ".", "LIBS", "=", "libs", "return", "function", "return", "set_libs" ]
decorator to restrict benchmarks to specific libraries .
train
false
44,828
def redirect_to_twitter(twitter_handle): try: user = User.find_one(Q('social.twitter', 'iexact', twitter_handle)) except NoResultsFound: raise HTTPError(http.NOT_FOUND, data={'message_short': 'User Not Found', 'message_long': 'There is no active user associated with the Twitter handle: {0}.'.format(twitter_handle)}) except MultipleResultsFound: users = User.find(Q('social.twitter', 'iexact', twitter_handle)) message_long = 'There are multiple OSF accounts associated with the Twitter handle: <strong>{0}</strong>. <br /> Please select from the accounts below. <br /><ul>'.format(markupsafe.escape(twitter_handle)) for user in users: message_long += '<li><a href="{0}">{1}</a></li>'.format(user.url, markupsafe.escape(user.fullname)) message_long += '</ul>' raise HTTPError(http.MULTIPLE_CHOICES, data={'message_short': 'Multiple Users Found', 'message_long': message_long}) return redirect(user.url)
[ "def", "redirect_to_twitter", "(", "twitter_handle", ")", ":", "try", ":", "user", "=", "User", ".", "find_one", "(", "Q", "(", "'social.twitter'", ",", "'iexact'", ",", "twitter_handle", ")", ")", "except", "NoResultsFound", ":", "raise", "HTTPError", "(", "http", ".", "NOT_FOUND", ",", "data", "=", "{", "'message_short'", ":", "'User Not Found'", ",", "'message_long'", ":", "'There is no active user associated with the Twitter handle: {0}.'", ".", "format", "(", "twitter_handle", ")", "}", ")", "except", "MultipleResultsFound", ":", "users", "=", "User", ".", "find", "(", "Q", "(", "'social.twitter'", ",", "'iexact'", ",", "twitter_handle", ")", ")", "message_long", "=", "'There are multiple OSF accounts associated with the Twitter handle: <strong>{0}</strong>. <br /> Please select from the accounts below. <br /><ul>'", ".", "format", "(", "markupsafe", ".", "escape", "(", "twitter_handle", ")", ")", "for", "user", "in", "users", ":", "message_long", "+=", "'<li><a href=\"{0}\">{1}</a></li>'", ".", "format", "(", "user", ".", "url", ",", "markupsafe", ".", "escape", "(", "user", ".", "fullname", ")", ")", "message_long", "+=", "'</ul>'", "raise", "HTTPError", "(", "http", ".", "MULTIPLE_CHOICES", ",", "data", "=", "{", "'message_short'", ":", "'Multiple Users Found'", ",", "'message_long'", ":", "message_long", "}", ")", "return", "redirect", "(", "user", ".", "url", ")" ]
redirect get requests for /@twitterhandle/ to respective the osf user account if it associated with an active account .
train
false
44,829
def rank(X, cond=1e-12): X = np.asarray(X) if (len(X.shape) == 2): D = scipy.linalg.svdvals(X) return int(np.add.reduce(np.greater((D / D.max()), cond).astype(np.int32))) else: return int((not np.alltrue(np.equal(X, 0.0))))
[ "def", "rank", "(", "X", ",", "cond", "=", "1e-12", ")", ":", "X", "=", "np", ".", "asarray", "(", "X", ")", "if", "(", "len", "(", "X", ".", "shape", ")", "==", "2", ")", ":", "D", "=", "scipy", ".", "linalg", ".", "svdvals", "(", "X", ")", "return", "int", "(", "np", ".", "add", ".", "reduce", "(", "np", ".", "greater", "(", "(", "D", "/", "D", ".", "max", "(", ")", ")", ",", "cond", ")", ".", "astype", "(", "np", ".", "int32", ")", ")", ")", "else", ":", "return", "int", "(", "(", "not", "np", ".", "alltrue", "(", "np", ".", "equal", "(", "X", ",", "0.0", ")", ")", ")", ")" ]
return the rank of a matrix x based on its generalized inverse .
train
false
44,831
@requires_badges_enabled def course_badge_check(user, course_key): if (not modulestore().get_course(course_key).issue_badges): LOGGER.info('Course is not configured to issue badges.') return badge_class = get_completion_badge(course_key, user) if (not badge_class): return if BadgeAssertion.objects.filter(user=user, badge_class=badge_class): LOGGER.info('Completion badge already exists for this user on this course.') return evidence = evidence_url(user.id, course_key) badge_class.award(user, evidence_url=evidence)
[ "@", "requires_badges_enabled", "def", "course_badge_check", "(", "user", ",", "course_key", ")", ":", "if", "(", "not", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ")", ".", "issue_badges", ")", ":", "LOGGER", ".", "info", "(", "'Course is not configured to issue badges.'", ")", "return", "badge_class", "=", "get_completion_badge", "(", "course_key", ",", "user", ")", "if", "(", "not", "badge_class", ")", ":", "return", "if", "BadgeAssertion", ".", "objects", ".", "filter", "(", "user", "=", "user", ",", "badge_class", "=", "badge_class", ")", ":", "LOGGER", ".", "info", "(", "'Completion badge already exists for this user on this course.'", ")", "return", "evidence", "=", "evidence_url", "(", "user", ".", "id", ",", "course_key", ")", "badge_class", ".", "award", "(", "user", ",", "evidence_url", "=", "evidence", ")" ]
takes a generatedcertificate instance .
train
false
44,833
def abbrev_list(vals): ranges = [] lower = 0 upper = (-2) for val in (sorted(vals) + [(-1)]): if (val != (upper + 1)): if (lower == upper): ranges.append(str(lower)) elif (lower <= upper): ranges.append(('%d-%d' % (lower, upper))) lower = val upper = val return ','.join(ranges)
[ "def", "abbrev_list", "(", "vals", ")", ":", "ranges", "=", "[", "]", "lower", "=", "0", "upper", "=", "(", "-", "2", ")", "for", "val", "in", "(", "sorted", "(", "vals", ")", "+", "[", "(", "-", "1", ")", "]", ")", ":", "if", "(", "val", "!=", "(", "upper", "+", "1", ")", ")", ":", "if", "(", "lower", "==", "upper", ")", ":", "ranges", ".", "append", "(", "str", "(", "lower", ")", ")", "elif", "(", "lower", "<=", "upper", ")", ":", "ranges", ".", "append", "(", "(", "'%d-%d'", "%", "(", "lower", ",", "upper", ")", ")", ")", "lower", "=", "val", "upper", "=", "val", "return", "','", ".", "join", "(", "ranges", ")" ]
condense unsigned to 0 .
train
false
44,834
def template_read(): if (len(get_vars) > 0): (dummy, template_id) = get_vars.viewing.split('.') else: template_id = request.args[0] def postp(r, output): if r.interactive: template_id = r.id form = s3db.survey_buildQuestionnaireFromTemplate(template_id) output['items'] = None output['form'] = None output['item'] = form output['title'] = s3.crud_strings['survey_template'].title_question_details return output s3.postp = postp s3db.configure('survey_template', listadd=False, editable=False, deletable=False) r = s3_request('survey', 'template', args=[template_id]) output = r(method='read', rheader=s3db.survey_template_rheader) return output
[ "def", "template_read", "(", ")", ":", "if", "(", "len", "(", "get_vars", ")", ">", "0", ")", ":", "(", "dummy", ",", "template_id", ")", "=", "get_vars", ".", "viewing", ".", "split", "(", "'.'", ")", "else", ":", "template_id", "=", "request", ".", "args", "[", "0", "]", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "r", ".", "interactive", ":", "template_id", "=", "r", ".", "id", "form", "=", "s3db", ".", "survey_buildQuestionnaireFromTemplate", "(", "template_id", ")", "output", "[", "'items'", "]", "=", "None", "output", "[", "'form'", "]", "=", "None", "output", "[", "'item'", "]", "=", "form", "output", "[", "'title'", "]", "=", "s3", ".", "crud_strings", "[", "'survey_template'", "]", ".", "title_question_details", "return", "output", "s3", ".", "postp", "=", "postp", "s3db", ".", "configure", "(", "'survey_template'", ",", "listadd", "=", "False", ",", "editable", "=", "False", ",", "deletable", "=", "False", ")", "r", "=", "s3_request", "(", "'survey'", ",", "'template'", ",", "args", "=", "[", "template_id", "]", ")", "output", "=", "r", "(", "method", "=", "'read'", ",", "rheader", "=", "s3db", ".", "survey_template_rheader", ")", "return", "output" ]
show the details of all the questions of a particular template .
train
false
44,836
def emit_field_changed_events(instance, user, db_table, excluded_fields=None, hidden_fields=None): def clean_field(field_name, value): '\n Prepare a field to be emitted in a JSON serializable format. If\n `field_name` is a hidden field, return None.\n ' if (field_name in hidden_fields): return None if isinstance(value, Country): if value.code: return value.code else: return None return value excluded_fields = (excluded_fields or []) hidden_fields = (hidden_fields or []) changed_fields = getattr(instance, '_changed_fields', {}) for field_name in changed_fields: if (field_name not in excluded_fields): old_value = clean_field(field_name, changed_fields[field_name]) new_value = clean_field(field_name, getattr(instance, field_name)) emit_setting_changed_event(user, db_table, field_name, old_value, new_value) if hasattr(instance, '_changed_fields'): del instance._changed_fields
[ "def", "emit_field_changed_events", "(", "instance", ",", "user", ",", "db_table", ",", "excluded_fields", "=", "None", ",", "hidden_fields", "=", "None", ")", ":", "def", "clean_field", "(", "field_name", ",", "value", ")", ":", "if", "(", "field_name", "in", "hidden_fields", ")", ":", "return", "None", "if", "isinstance", "(", "value", ",", "Country", ")", ":", "if", "value", ".", "code", ":", "return", "value", ".", "code", "else", ":", "return", "None", "return", "value", "excluded_fields", "=", "(", "excluded_fields", "or", "[", "]", ")", "hidden_fields", "=", "(", "hidden_fields", "or", "[", "]", ")", "changed_fields", "=", "getattr", "(", "instance", ",", "'_changed_fields'", ",", "{", "}", ")", "for", "field_name", "in", "changed_fields", ":", "if", "(", "field_name", "not", "in", "excluded_fields", ")", ":", "old_value", "=", "clean_field", "(", "field_name", ",", "changed_fields", "[", "field_name", "]", ")", "new_value", "=", "clean_field", "(", "field_name", ",", "getattr", "(", "instance", ",", "field_name", ")", ")", "emit_setting_changed_event", "(", "user", ",", "db_table", ",", "field_name", ",", "old_value", ",", "new_value", ")", "if", "hasattr", "(", "instance", ",", "'_changed_fields'", ")", ":", "del", "instance", ".", "_changed_fields" ]
emits a settings changed event for each field that has changed .
train
false
44,837
def mad(a, c=0.6745, axis=0): _shape = a.shape a.shape = np.product(a.shape, axis=0) m = (np.median(np.fabs((a - np.median(a)))) / c) a.shape = _shape return m
[ "def", "mad", "(", "a", ",", "c", "=", "0.6745", ",", "axis", "=", "0", ")", ":", "_shape", "=", "a", ".", "shape", "a", ".", "shape", "=", "np", ".", "product", "(", "a", ".", "shape", ",", "axis", "=", "0", ")", "m", "=", "(", "np", ".", "median", "(", "np", ".", "fabs", "(", "(", "a", "-", "np", ".", "median", "(", "a", ")", ")", ")", ")", "/", "c", ")", "a", ".", "shape", "=", "_shape", "return", "m" ]
median absolute deviation: median(abs(a - median(a))) / c .
train
false
44,838
def conllned(trace=1): from nltk.corpus import conll2002 vnv = "\n (\n is/V| # 3rd sing present and\n was/V| # past forms of the verb zijn ('be')\n werd/V| # and also present\n wordt/V # past of worden ('become)\n )\n .* # followed by anything\n van/Prep # followed by van ('of')\n " VAN = re.compile(vnv, re.VERBOSE) print() print('Dutch CoNLL2002: van(PER, ORG) -- raw rtuples with context:') print(('=' * 45)) for doc in conll2002.chunked_sents('ned.train'): lcon = rcon = False if trace: lcon = rcon = True for rel in extract_rels('PER', 'ORG', doc, corpus='conll2002', pattern=VAN, window=10): print(rtuple(rel, lcon=lcon, rcon=rcon))
[ "def", "conllned", "(", "trace", "=", "1", ")", ":", "from", "nltk", ".", "corpus", "import", "conll2002", "vnv", "=", "\"\\n (\\n is/V| # 3rd sing present and\\n was/V| # past forms of the verb zijn ('be')\\n werd/V| # and also present\\n wordt/V # past of worden ('become)\\n )\\n .* # followed by anything\\n van/Prep # followed by van ('of')\\n \"", "VAN", "=", "re", ".", "compile", "(", "vnv", ",", "re", ".", "VERBOSE", ")", "print", "(", ")", "print", "(", "'Dutch CoNLL2002: van(PER, ORG) -- raw rtuples with context:'", ")", "print", "(", "(", "'='", "*", "45", ")", ")", "for", "doc", "in", "conll2002", ".", "chunked_sents", "(", "'ned.train'", ")", ":", "lcon", "=", "rcon", "=", "False", "if", "trace", ":", "lcon", "=", "rcon", "=", "True", "for", "rel", "in", "extract_rels", "(", "'PER'", ",", "'ORG'", ",", "doc", ",", "corpus", "=", "'conll2002'", ",", "pattern", "=", "VAN", ",", "window", "=", "10", ")", ":", "print", "(", "rtuple", "(", "rel", ",", "lcon", "=", "lcon", ",", "rcon", "=", "rcon", ")", ")" ]
find the copula+van relation in the dutch tagged training corpus from conll 2002 .
train
false
44,839
def _expand_balancer(lb): ret = {} ret.update(lb.__dict__) hc = ret['extra']['healthchecks'] ret['extra']['healthchecks'] = [] for item in hc: ret['extra']['healthchecks'].append(_expand_item(item)) fwr = ret['extra']['forwarding_rule'] tp = ret['extra']['forwarding_rule'].targetpool reg = ret['extra']['forwarding_rule'].region ret['extra']['forwarding_rule'] = {} ret['extra']['forwarding_rule'].update(fwr.__dict__) ret['extra']['forwarding_rule']['targetpool'] = tp.name ret['extra']['forwarding_rule']['region'] = reg.name tp = ret['extra']['targetpool'] hc = ret['extra']['targetpool'].healthchecks nodes = ret['extra']['targetpool'].nodes region = ret['extra']['targetpool'].region zones = ret['extra']['targetpool'].region.zones ret['extra']['targetpool'] = {} ret['extra']['targetpool'].update(tp.__dict__) ret['extra']['targetpool']['region'] = _expand_item(region) ret['extra']['targetpool']['nodes'] = [] for n in nodes: ret['extra']['targetpool']['nodes'].append(_expand_node(n)) ret['extra']['targetpool']['healthchecks'] = [] for hci in hc: ret['extra']['targetpool']['healthchecks'].append(hci.name) ret['extra']['targetpool']['region']['zones'] = [] for z in zones: ret['extra']['targetpool']['region']['zones'].append(z.name) return ret
[ "def", "_expand_balancer", "(", "lb", ")", ":", "ret", "=", "{", "}", "ret", ".", "update", "(", "lb", ".", "__dict__", ")", "hc", "=", "ret", "[", "'extra'", "]", "[", "'healthchecks'", "]", "ret", "[", "'extra'", "]", "[", "'healthchecks'", "]", "=", "[", "]", "for", "item", "in", "hc", ":", "ret", "[", "'extra'", "]", "[", "'healthchecks'", "]", ".", "append", "(", "_expand_item", "(", "item", ")", ")", "fwr", "=", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", "tp", "=", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", ".", "targetpool", "reg", "=", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", ".", "region", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", "=", "{", "}", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", ".", "update", "(", "fwr", ".", "__dict__", ")", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", "[", "'targetpool'", "]", "=", "tp", ".", "name", "ret", "[", "'extra'", "]", "[", "'forwarding_rule'", "]", "[", "'region'", "]", "=", "reg", ".", "name", "tp", "=", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "hc", "=", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", ".", "healthchecks", "nodes", "=", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", ".", "nodes", "region", "=", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", ".", "region", "zones", "=", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", ".", "region", ".", "zones", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "=", "{", "}", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", ".", "update", "(", "tp", ".", "__dict__", ")", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'region'", "]", "=", "_expand_item", "(", "region", ")", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'nodes'", "]", "=", "[", "]", "for", "n", "in", "nodes", ":", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'nodes'", "]", ".", "append", "(", "_expand_node", "(", "n", ")", ")", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'healthchecks'", "]", "=", "[", "]", "for", "hci", "in", "hc", ":", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'healthchecks'", "]", ".", "append", "(", "hci", ".", "name", ")", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'region'", "]", "[", "'zones'", "]", "=", "[", "]", "for", "z", "in", "zones", ":", "ret", "[", "'extra'", "]", "[", "'targetpool'", "]", "[", "'region'", "]", "[", "'zones'", "]", ".", "append", "(", "z", ".", "name", ")", "return", "ret" ]
convert the libcloud load-balancer object into something more serializable .
train
true
44,842
def utc_aware(unaware): def utc_method(*args, **kwargs): dt = unaware(*args, **kwargs) return dt.replace(tzinfo=UTC) return utc_method
[ "def", "utc_aware", "(", "unaware", ")", ":", "def", "utc_method", "(", "*", "args", ",", "**", "kwargs", ")", ":", "dt", "=", "unaware", "(", "*", "args", ",", "**", "kwargs", ")", "return", "dt", ".", "replace", "(", "tzinfo", "=", "UTC", ")", "return", "utc_method" ]
decorator for adding utc tzinfo to datetimes utcfoo methods .
train
false
44,844
def ensure_str(text): if isinstance(text, unicode): return text.encode(pyreadline_codepage, 'replace') return text
[ "def", "ensure_str", "(", "text", ")", ":", "if", "isinstance", "(", "text", ",", "unicode", ")", ":", "return", "text", ".", "encode", "(", "pyreadline_codepage", ",", "'replace'", ")", "return", "text" ]
convert unicode to str using pyreadline_codepage .
train
false
44,846
def regularize_layer_params(layer, penalty, tags={'regularizable': True}, **kwargs): layers = ([layer] if isinstance(layer, Layer) else layer) all_params = [] for layer in layers: all_params += layer.get_params(**tags) return apply_penalty(all_params, penalty, **kwargs)
[ "def", "regularize_layer_params", "(", "layer", ",", "penalty", ",", "tags", "=", "{", "'regularizable'", ":", "True", "}", ",", "**", "kwargs", ")", ":", "layers", "=", "(", "[", "layer", "]", "if", "isinstance", "(", "layer", ",", "Layer", ")", "else", "layer", ")", "all_params", "=", "[", "]", "for", "layer", "in", "layers", ":", "all_params", "+=", "layer", ".", "get_params", "(", "**", "tags", ")", "return", "apply_penalty", "(", "all_params", ",", "penalty", ",", "**", "kwargs", ")" ]
computes a regularization cost by applying a penalty to the parameters of a layer or group of layers .
train
false
44,847
def dbhost(mac, ip, hostname): source_idx = None while (source_idx is None): source_idx = fetch('SELECT ROWID FROM host WHERE ip = ?', (ip,)) if (len(source_idx) > 0): insert('UPDATE host SET mac = ?, hostname = ? WHERE ip = ?', (mac, hostname)) source_idx = None else: source_idx = insert('INSERT INTO host VALUES (?,?,?);', (mac, ip, hostname)) return source_idx
[ "def", "dbhost", "(", "mac", ",", "ip", ",", "hostname", ")", ":", "source_idx", "=", "None", "while", "(", "source_idx", "is", "None", ")", ":", "source_idx", "=", "fetch", "(", "'SELECT ROWID FROM host WHERE ip = ?'", ",", "(", "ip", ",", ")", ")", "if", "(", "len", "(", "source_idx", ")", ">", "0", ")", ":", "insert", "(", "'UPDATE host SET mac = ?, hostname = ? WHERE ip = ?'", ",", "(", "mac", ",", "hostname", ")", ")", "source_idx", "=", "None", "else", ":", "source_idx", "=", "insert", "(", "'INSERT INTO host VALUES (?,?,?);'", ",", "(", "mac", ",", "ip", ",", "hostname", ")", ")", "return", "source_idx" ]
insert basic host information into the database .
train
false
44,851
def Queue(maxsize=0): from multiprocessing.queues import Queue return Queue(maxsize)
[ "def", "Queue", "(", "maxsize", "=", "0", ")", ":", "from", "multiprocessing", ".", "queues", "import", "Queue", "return", "Queue", "(", "maxsize", ")" ]
returns a queue object .
train
false
44,852
def tablespace_create(name, location, options=None, owner=None, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): owner_query = '' options_query = '' if owner: owner_query = 'OWNER "{0}"'.format(owner) if options: optionstext = ['{0} = {1}'.format(k, v) for (k, v) in six.iteritems(options)] options_query = 'WITH ( {0} )'.format(', '.join(optionstext)) query = 'CREATE TABLESPACE "{0}" {1} LOCATION \'{2}\' {3}'.format(name, owner_query, location, options_query) ret = _psql_prepare_and_run(['-c', query], user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas) return (ret['retcode'] == 0)
[ "def", "tablespace_create", "(", "name", ",", "location", ",", "options", "=", "None", ",", "owner", "=", "None", ",", "user", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "maintenance_db", "=", "None", ",", "password", "=", "None", ",", "runas", "=", "None", ")", ":", "owner_query", "=", "''", "options_query", "=", "''", "if", "owner", ":", "owner_query", "=", "'OWNER \"{0}\"'", ".", "format", "(", "owner", ")", "if", "options", ":", "optionstext", "=", "[", "'{0} = {1}'", ".", "format", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "options", ")", "]", "options_query", "=", "'WITH ( {0} )'", ".", "format", "(", "', '", ".", "join", "(", "optionstext", ")", ")", "query", "=", "'CREATE TABLESPACE \"{0}\" {1} LOCATION \\'{2}\\' {3}'", ".", "format", "(", "name", ",", "owner_query", ",", "location", ",", "options_query", ")", "ret", "=", "_psql_prepare_and_run", "(", "[", "'-c'", ",", "query", "]", ",", "user", "=", "user", ",", "host", "=", "host", ",", "port", "=", "port", ",", "maintenance_db", "=", "maintenance_db", ",", "password", "=", "password", ",", "runas", "=", "runas", ")", "return", "(", "ret", "[", "'retcode'", "]", "==", "0", ")" ]
adds a tablespace to the postgres server .
train
true
44,853
def datashape_type_to_numpy(type_): if isinstance(type_, Option): type_ = type_.ty if isinstance(type_, DateTime): return np.dtype('datetime64[ns]') else: return type_.to_numpy_dtype()
[ "def", "datashape_type_to_numpy", "(", "type_", ")", ":", "if", "isinstance", "(", "type_", ",", "Option", ")", ":", "type_", "=", "type_", ".", "ty", "if", "isinstance", "(", "type_", ",", "DateTime", ")", ":", "return", "np", ".", "dtype", "(", "'datetime64[ns]'", ")", "else", ":", "return", "type_", ".", "to_numpy_dtype", "(", ")" ]
given a datashape type .
train
false
44,854
def fetch_msx_hdu(cache=True): return fetch_hdu('galactic_center/gc_msx_e.fits', cache=cache)
[ "def", "fetch_msx_hdu", "(", "cache", "=", "True", ")", ":", "return", "fetch_hdu", "(", "'galactic_center/gc_msx_e.fits'", ",", "cache", "=", "cache", ")" ]
fetch the msx example dataset hdu .
train
false
44,858
def tos(): _custom_view('tos') response.title = T('Terms of Service') return dict()
[ "def", "tos", "(", ")", ":", "_custom_view", "(", "'tos'", ")", "response", ".", "title", "=", "T", "(", "'Terms of Service'", ")", "return", "dict", "(", ")" ]
custom view .
train
false
44,859
def process_items(r, keys, timeout, limit=0, log_every=1000, wait=0.1): limit = (limit or float(u'inf')) processed = 0 while (processed < limit): ret = r.blpop(keys, timeout) if (ret is None): time.sleep(wait) continue (source, data) = ret try: item = json.loads(data) except Exception: logger.exception(u'Failed to load item:\n%r', pprint.pformat(data)) continue try: name = (item.get(u'name') or item.get(u'title')) url = (item.get(u'url') or item.get(u'link')) logger.debug(u'[%s] Processing item: %s <%s>', source, name, url) except KeyError: logger.exception(u'[%s] Failed to process item:\n%r', source, pprint.pformat(item)) continue processed += 1 if ((processed % log_every) == 0): logger.info(u'Processed %s items', processed)
[ "def", "process_items", "(", "r", ",", "keys", ",", "timeout", ",", "limit", "=", "0", ",", "log_every", "=", "1000", ",", "wait", "=", "0.1", ")", ":", "limit", "=", "(", "limit", "or", "float", "(", "u'inf'", ")", ")", "processed", "=", "0", "while", "(", "processed", "<", "limit", ")", ":", "ret", "=", "r", ".", "blpop", "(", "keys", ",", "timeout", ")", "if", "(", "ret", "is", "None", ")", ":", "time", ".", "sleep", "(", "wait", ")", "continue", "(", "source", ",", "data", ")", "=", "ret", "try", ":", "item", "=", "json", ".", "loads", "(", "data", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "u'Failed to load item:\\n%r'", ",", "pprint", ".", "pformat", "(", "data", ")", ")", "continue", "try", ":", "name", "=", "(", "item", ".", "get", "(", "u'name'", ")", "or", "item", ".", "get", "(", "u'title'", ")", ")", "url", "=", "(", "item", ".", "get", "(", "u'url'", ")", "or", "item", ".", "get", "(", "u'link'", ")", ")", "logger", ".", "debug", "(", "u'[%s] Processing item: %s <%s>'", ",", "source", ",", "name", ",", "url", ")", "except", "KeyError", ":", "logger", ".", "exception", "(", "u'[%s] Failed to process item:\\n%r'", ",", "source", ",", "pprint", ".", "pformat", "(", "item", ")", ")", "continue", "processed", "+=", "1", "if", "(", "(", "processed", "%", "log_every", ")", "==", "0", ")", ":", "logger", ".", "info", "(", "u'Processed %s items'", ",", "processed", ")" ]
process items from a redis queue .
train
false
44,861
def getClosestDistanceIndexToLine(point, loop): smallestDistance = 9.876543219876543e+17 closestDistanceIndex = None for pointIndex in xrange(len(loop)): segmentBegin = loop[pointIndex] segmentEnd = loop[((pointIndex + 1) % len(loop))] distance = getDistanceToPlaneSegment(segmentBegin, segmentEnd, point) if (distance < smallestDistance): smallestDistance = distance closestDistanceIndex = DistanceIndex(distance, pointIndex) return closestDistanceIndex
[ "def", "getClosestDistanceIndexToLine", "(", "point", ",", "loop", ")", ":", "smallestDistance", "=", "9.876543219876543e+17", "closestDistanceIndex", "=", "None", "for", "pointIndex", "in", "xrange", "(", "len", "(", "loop", ")", ")", ":", "segmentBegin", "=", "loop", "[", "pointIndex", "]", "segmentEnd", "=", "loop", "[", "(", "(", "pointIndex", "+", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "distance", "=", "getDistanceToPlaneSegment", "(", "segmentBegin", ",", "segmentEnd", ",", "point", ")", "if", "(", "distance", "<", "smallestDistance", ")", ":", "smallestDistance", "=", "distance", "closestDistanceIndex", "=", "DistanceIndex", "(", "distance", ",", "pointIndex", ")", "return", "closestDistanceIndex" ]
get the distance squared to the closest segment of the loop and index of that segment .
train
false
44,862
@celery.task @dog_stats_api.timed('status.service.celery.pong') def delayed_ping(value, delay): if (value == 'ping'): result = 'pong' else: result = 'got: {0}'.format(value) time.sleep(delay) return result
[ "@", "celery", ".", "task", "@", "dog_stats_api", ".", "timed", "(", "'status.service.celery.pong'", ")", "def", "delayed_ping", "(", "value", ",", "delay", ")", ":", "if", "(", "value", "==", "'ping'", ")", ":", "result", "=", "'pong'", "else", ":", "result", "=", "'got: {0}'", ".", "format", "(", "value", ")", "time", ".", "sleep", "(", "delay", ")", "return", "result" ]
a simple tasks that replies to a message after a especified amount of seconds .
train
false
44,864
def get_enabled(): return _get_svc_list('YES')
[ "def", "get_enabled", "(", ")", ":", "return", "_get_svc_list", "(", "'YES'", ")" ]
return a list of enabled services .
train
false
44,866
def parse_cropbox(cropbox): if isinstance(cropbox, six.text_type): return tuple([int(x.strip()) for x in cropbox.split(',')]) else: return tuple(cropbox)
[ "def", "parse_cropbox", "(", "cropbox", ")", ":", "if", "isinstance", "(", "cropbox", ",", "six", ".", "text_type", ")", ":", "return", "tuple", "(", "[", "int", "(", "x", ".", "strip", "(", ")", ")", "for", "x", "in", "cropbox", ".", "split", "(", "','", ")", "]", ")", "else", ":", "return", "tuple", "(", "cropbox", ")" ]
returns x .
train
true
44,867
def test_ast_bad_try(): cant_compile(u'(try 1 bla)') cant_compile(u'(try 1 bla bla)') cant_compile(u'(try (do) (else 1) (else 2))') cant_compile(u'(try 1 (else 1))')
[ "def", "test_ast_bad_try", "(", ")", ":", "cant_compile", "(", "u'(try 1 bla)'", ")", "cant_compile", "(", "u'(try 1 bla bla)'", ")", "cant_compile", "(", "u'(try (do) (else 1) (else 2))'", ")", "cant_compile", "(", "u'(try 1 (else 1))'", ")" ]
make sure ast cant compile invalid try .
train
false
44,868
def add_hook(name, pass_function=False): def outer(f): f.__hook_name__ = name @functools.wraps(f) def inner(*args, **kwargs): manager = _HOOKS.setdefault(name, HookManager(name)) function = None if pass_function: function = f manager.run_pre(name, args, kwargs, f=function) rv = f(*args, **kwargs) manager.run_post(name, rv, args, kwargs, f=function) return rv return inner return outer
[ "def", "add_hook", "(", "name", ",", "pass_function", "=", "False", ")", ":", "def", "outer", "(", "f", ")", ":", "f", ".", "__hook_name__", "=", "name", "@", "functools", ".", "wraps", "(", "f", ")", "def", "inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "manager", "=", "_HOOKS", ".", "setdefault", "(", "name", ",", "HookManager", "(", "name", ")", ")", "function", "=", "None", "if", "pass_function", ":", "function", "=", "f", "manager", ".", "run_pre", "(", "name", ",", "args", ",", "kwargs", ",", "f", "=", "function", ")", "rv", "=", "f", "(", "*", "args", ",", "**", "kwargs", ")", "manager", ".", "run_post", "(", "name", ",", "rv", ",", "args", ",", "kwargs", ",", "f", "=", "function", ")", "return", "rv", "return", "inner", "return", "outer" ]
execute optional pre and post methods around the decorated function .
train
false
44,869
def get_read_data(data): if config['mix_case']: seq = sequence_case(data) qual = data['quality_scores'] else: seq = data['bases'] qual = data['quality_scores'] return (seq, qual)
[ "def", "get_read_data", "(", "data", ")", ":", "if", "config", "[", "'mix_case'", "]", ":", "seq", "=", "sequence_case", "(", "data", ")", "qual", "=", "data", "[", "'quality_scores'", "]", "else", ":", "seq", "=", "data", "[", "'bases'", "]", "qual", "=", "data", "[", "'quality_scores'", "]", "return", "(", "seq", ",", "qual", ")" ]
given the data for one read it returns 2 strs with the fasta seq and fasta qual .
train
false
44,870
def year_month_to_month_number(year, month, day=None): return ((((year - start_year) * 12) + (month - 1)) - start_month_0_indexed)
[ "def", "year_month_to_month_number", "(", "year", ",", "month", ",", "day", "=", "None", ")", ":", "return", "(", "(", "(", "(", "year", "-", "start_year", ")", "*", "12", ")", "+", "(", "month", "-", "1", ")", ")", "-", "start_month_0_indexed", ")" ]
time periods are integers representing months in years .
train
false
44,871
def rad(d): return ((d * pi) / 180)
[ "def", "rad", "(", "d", ")", ":", "return", "(", "(", "d", "*", "pi", ")", "/", "180", ")" ]
return the radian value for the given degrees .
train
false
44,872
@curry def set_attribute(name, value): def decorator(f): setattr(f, name, value) return f return decorator
[ "@", "curry", "def", "set_attribute", "(", "name", ",", "value", ")", ":", "def", "decorator", "(", "f", ")", ":", "setattr", "(", "f", ",", "name", ",", "value", ")", "return", "f", "return", "decorator" ]
set the value of an attribute .
train
true
44,873
def identity(x): return x
[ "def", "identity", "(", "x", ")", ":", "return", "x" ]
returns content unchanged .
train
false
44,876
def AutoProxy(token, serializer, manager=None, authkey=None, exposed=None, incref=True): _Client = listener_client[serializer][1] if (exposed is None): conn = _Client(token.address, authkey=authkey) try: exposed = dispatch(conn, None, 'get_methods', (token,)) finally: conn.close() if ((authkey is None) and (manager is not None)): authkey = manager._authkey if (authkey is None): authkey = process.current_process().authkey ProxyType = MakeProxyType(('AutoProxy[%s]' % token.typeid), exposed) proxy = ProxyType(token, serializer, manager=manager, authkey=authkey, incref=incref) proxy._isauto = True return proxy
[ "def", "AutoProxy", "(", "token", ",", "serializer", ",", "manager", "=", "None", ",", "authkey", "=", "None", ",", "exposed", "=", "None", ",", "incref", "=", "True", ")", ":", "_Client", "=", "listener_client", "[", "serializer", "]", "[", "1", "]", "if", "(", "exposed", "is", "None", ")", ":", "conn", "=", "_Client", "(", "token", ".", "address", ",", "authkey", "=", "authkey", ")", "try", ":", "exposed", "=", "dispatch", "(", "conn", ",", "None", ",", "'get_methods'", ",", "(", "token", ",", ")", ")", "finally", ":", "conn", ".", "close", "(", ")", "if", "(", "(", "authkey", "is", "None", ")", "and", "(", "manager", "is", "not", "None", ")", ")", ":", "authkey", "=", "manager", ".", "_authkey", "if", "(", "authkey", "is", "None", ")", ":", "authkey", "=", "process", ".", "current_process", "(", ")", ".", "authkey", "ProxyType", "=", "MakeProxyType", "(", "(", "'AutoProxy[%s]'", "%", "token", ".", "typeid", ")", ",", "exposed", ")", "proxy", "=", "ProxyType", "(", "token", ",", "serializer", ",", "manager", "=", "manager", ",", "authkey", "=", "authkey", ",", "incref", "=", "incref", ")", "proxy", ".", "_isauto", "=", "True", "return", "proxy" ]
return an auto-proxy for token .
train
false
44,877
def mock_inputs(inputs): def inner(test_func): def wrapped(*args): class mock_getpass: @staticmethod def getpass(prompt='Password: ', stream=None): if (not PY3): assert isinstance(prompt, binary_type) return inputs[u'password'] def mock_input(prompt): prompt = str(prompt) assert (str(u'__proxy__') not in prompt) response = u'' for (key, val) in inputs.items(): if (force_str(key) in prompt.lower()): response = val break return response old_getpass = createsuperuser.getpass old_input = createsuperuser.input createsuperuser.getpass = mock_getpass createsuperuser.input = mock_input try: test_func(*args) finally: createsuperuser.getpass = old_getpass createsuperuser.input = old_input return wrapped return inner
[ "def", "mock_inputs", "(", "inputs", ")", ":", "def", "inner", "(", "test_func", ")", ":", "def", "wrapped", "(", "*", "args", ")", ":", "class", "mock_getpass", ":", "@", "staticmethod", "def", "getpass", "(", "prompt", "=", "'Password: '", ",", "stream", "=", "None", ")", ":", "if", "(", "not", "PY3", ")", ":", "assert", "isinstance", "(", "prompt", ",", "binary_type", ")", "return", "inputs", "[", "u'password'", "]", "def", "mock_input", "(", "prompt", ")", ":", "prompt", "=", "str", "(", "prompt", ")", "assert", "(", "str", "(", "u'__proxy__'", ")", "not", "in", "prompt", ")", "response", "=", "u''", "for", "(", "key", ",", "val", ")", "in", "inputs", ".", "items", "(", ")", ":", "if", "(", "force_str", "(", "key", ")", "in", "prompt", ".", "lower", "(", ")", ")", ":", "response", "=", "val", "break", "return", "response", "old_getpass", "=", "createsuperuser", ".", "getpass", "old_input", "=", "createsuperuser", ".", "input", "createsuperuser", ".", "getpass", "=", "mock_getpass", "createsuperuser", ".", "input", "=", "mock_input", "try", ":", "test_func", "(", "*", "args", ")", "finally", ":", "createsuperuser", ".", "getpass", "=", "old_getpass", "createsuperuser", ".", "input", "=", "old_input", "return", "wrapped", "return", "inner" ]
decorator to temporarily replace input/getpass to allow interactive createsuperuser .
train
false
44,878
def get_setting(varname): gl = globals() if (varname not in gl.keys()): raise ValueError(('Unknown setting %s' % varname)) return gl[varname]
[ "def", "get_setting", "(", "varname", ")", ":", "gl", "=", "globals", "(", ")", "if", "(", "varname", "not", "in", "gl", ".", "keys", "(", ")", ")", ":", "raise", "ValueError", "(", "(", "'Unknown setting %s'", "%", "varname", ")", ")", "return", "gl", "[", "varname", "]" ]
returns the value of a configuration variable .
train
false