id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
32,304
def sudo_from_args(command, log_command_filter=identity): return Effect(Sudo.from_args(command, log_command_filter=log_command_filter))
[ "def", "sudo_from_args", "(", "command", ",", "log_command_filter", "=", "identity", ")", ":", "return", "Effect", "(", "Sudo", ".", "from_args", "(", "command", ",", "log_command_filter", "=", "log_command_filter", ")", ")" ]
run a command on a remote host with sudo .
train
false
32,305
def current_page(context): page = context['__CACTUS_CURRENT_PAGE__'] return page.final_url
[ "def", "current_page", "(", "context", ")", ":", "page", "=", "context", "[", "'__CACTUS_CURRENT_PAGE__'", "]", "return", "page", ".", "final_url" ]
returns the current url .
train
false
32,306
def colorclose(color, hsva_expected): hsva_actual = color.getHsvF() return all(((abs((a - b)) <= (2 ** (-16))) for (a, b) in zip(hsva_actual, hsva_expected)))
[ "def", "colorclose", "(", "color", ",", "hsva_expected", ")", ":", "hsva_actual", "=", "color", ".", "getHsvF", "(", ")", "return", "all", "(", "(", "(", "abs", "(", "(", "a", "-", "b", ")", ")", "<=", "(", "2", "**", "(", "-", "16", ")", ")", ...
compares hsv values which are stored as 16-bit integers .
train
false
32,308
def remove_tool_dependency(app, tool_dependency): context = app.install_model.context dependency_install_dir = tool_dependency.installation_directory(app) (removed, error_message) = remove_tool_dependency_installation_directory(dependency_install_dir) if removed: tool_dependency.status = app.install_model.ToolDependency.installation_status.UNINSTALLED tool_dependency.error_message = None context.add(tool_dependency) context.flush() return (removed, error_message)
[ "def", "remove_tool_dependency", "(", "app", ",", "tool_dependency", ")", ":", "context", "=", "app", ".", "install_model", ".", "context", "dependency_install_dir", "=", "tool_dependency", ".", "installation_directory", "(", "app", ")", "(", "removed", ",", "erro...
the received tool_dependency must be in an error state .
train
false
32,309
def ListHunts(context=None): items = context.SendIteratorRequest('ListHunts', api_pb2.ApiListHuntsArgs()) return utils.MapItemsIterator((lambda data: Hunt(data=data, context=context)), items)
[ "def", "ListHunts", "(", "context", "=", "None", ")", ":", "items", "=", "context", ".", "SendIteratorRequest", "(", "'ListHunts'", ",", "api_pb2", ".", "ApiListHuntsArgs", "(", ")", ")", "return", "utils", ".", "MapItemsIterator", "(", "(", "lambda", "data"...
list all grr hunts .
train
true
32,310
def lr_op(left, right): if (len(left) > 0): lr_gate = left[(len(left) - 1)] lr_gate_is_unitary = is_scalar_matrix((Dagger(lr_gate), lr_gate), _get_min_qubits(lr_gate), True) if ((len(left) > 0) and lr_gate_is_unitary): new_left = left[0:(len(left) - 1)] new_right = (right + (Dagger(lr_gate),)) return (new_left, new_right) return None
[ "def", "lr_op", "(", "left", ",", "right", ")", ":", "if", "(", "len", "(", "left", ")", ">", "0", ")", ":", "lr_gate", "=", "left", "[", "(", "len", "(", "left", ")", "-", "1", ")", "]", "lr_gate_is_unitary", "=", "is_scalar_matrix", "(", "(", ...
perform a lr operation .
train
false
32,312
def valid_status(status): if (not status): status = 200 status = str(status) parts = status.split(' ', 1) if (len(parts) == 1): (code,) = parts reason = None else: (code, reason) = parts reason = reason.strip() try: code = int(code) except ValueError: raise ValueError(('Illegal response status from server (%s is non-numeric).' % repr(code))) if ((code < 100) or (code > 599)): raise ValueError(('Illegal response status from server (%s is out of range).' % repr(code))) if (code not in response_codes): (default_reason, message) = ('', '') else: (default_reason, message) = response_codes[code] if (reason is None): reason = default_reason return (code, reason, message)
[ "def", "valid_status", "(", "status", ")", ":", "if", "(", "not", "status", ")", ":", "status", "=", "200", "status", "=", "str", "(", "status", ")", "parts", "=", "status", ".", "split", "(", "' '", ",", "1", ")", "if", "(", "len", "(", "parts",...
return legal http status code .
train
false
32,316
def _explode_shorthand_ip_string(ip_str): if (not _is_shorthand_ip(ip_str)): return ip_str new_ip = [] hextet = ip_str.split('::') if ('.' in ip_str.split(':')[(-1)]): fill_to = 7 else: fill_to = 8 if (len(hextet) > 1): sep = (len(hextet[0].split(':')) + len(hextet[1].split(':'))) new_ip = hextet[0].split(':') for _ in xrange((fill_to - sep)): new_ip.append('0000') new_ip += hextet[1].split(':') else: new_ip = ip_str.split(':') ret_ip = [] for hextet in new_ip: ret_ip.append((('0' * (4 - len(hextet))) + hextet).lower()) return ':'.join(ret_ip)
[ "def", "_explode_shorthand_ip_string", "(", "ip_str", ")", ":", "if", "(", "not", "_is_shorthand_ip", "(", "ip_str", ")", ")", ":", "return", "ip_str", "new_ip", "=", "[", "]", "hextet", "=", "ip_str", ".", "split", "(", "'::'", ")", "if", "(", "'.'", ...
expand a shortened ipv6 address .
train
false
32,319
def gallery_async(request): media_type = request.GET.get('type', 'image') term = request.GET.get('q') media_locale = request.GET.get('locale', settings.WIKI_DEFAULT_LANGUAGE) if (media_type == 'image'): media_qs = Image.objects elif (media_type == 'video'): media_qs = Video.objects else: raise Http404 media_qs = media_qs.filter(locale=media_locale) if term: media_qs = media_qs.filter((Q(title__icontains=term) | Q(description__icontains=term))) media = paginate(request, media_qs, per_page=ITEMS_PER_PAGE) return render(request, 'gallery/includes/media_list.html', {'media_list': media})
[ "def", "gallery_async", "(", "request", ")", ":", "media_type", "=", "request", ".", "GET", ".", "get", "(", "'type'", ",", "'image'", ")", "term", "=", "request", ".", "GET", ".", "get", "(", "'q'", ")", "media_locale", "=", "request", ".", "GET", "...
ajax endpoint to media gallery .
train
false
32,320
def reach(h): i = h.find('.') if (i >= 0): b = h[(i + 1):] i = b.find('.') if (is_HDN(h) and ((i >= 0) or (b == 'local'))): return ('.' + b) return h
[ "def", "reach", "(", "h", ")", ":", "i", "=", "h", ".", "find", "(", "'.'", ")", "if", "(", "i", ">=", "0", ")", ":", "b", "=", "h", "[", "(", "i", "+", "1", ")", ":", "]", "i", "=", "b", ".", "find", "(", "'.'", ")", "if", "(", "is...
return reach of host h .
train
true
32,321
def path2stdout(sourcepath, title='', colors=None, markup='html', header=None, footer=None, linenumbers=0, form=None): sourcestring = open(sourcepath).read() Parser(sourcestring, colors=colors, title=sourcepath, markup=markup, header=header, footer=footer, linenumbers=linenumbers).format(form)
[ "def", "path2stdout", "(", "sourcepath", ",", "title", "=", "''", ",", "colors", "=", "None", ",", "markup", "=", "'html'", ",", "header", "=", "None", ",", "footer", "=", "None", ",", "linenumbers", "=", "0", ",", "form", "=", "None", ")", ":", "s...
converts code to colorized html .
train
false
32,322
def android_check_data_dir(): ext_dir = android_ext_dir() data_dir = android_data_dir() old_electrum_dir = (ext_dir + '/electrum') if ((not os.path.exists(data_dir)) and os.path.exists(old_electrum_dir)): import shutil new_headers_path = android_headers_path() old_headers_path = (old_electrum_dir + '/blockchain_headers') if ((not os.path.exists(new_headers_path)) and os.path.exists(old_headers_path)): print_error('Moving headers file to', new_headers_path) shutil.move(old_headers_path, new_headers_path) print_error('Moving data to', data_dir) shutil.move(old_electrum_dir, data_dir) return data_dir
[ "def", "android_check_data_dir", "(", ")", ":", "ext_dir", "=", "android_ext_dir", "(", ")", "data_dir", "=", "android_data_dir", "(", ")", "old_electrum_dir", "=", "(", "ext_dir", "+", "'/electrum'", ")", "if", "(", "(", "not", "os", ".", "path", ".", "ex...
if needed .
train
false
32,324
@task(aliases=['beat']) def celery_beat(ctx, level='debug', schedule=None): os.environ['DJANGO_SETTINGS_MODULE'] = 'api.base.settings' cmd = 'celery beat -A framework.celery_tasks -l {0} --pidfile='.format(level) if schedule: cmd = (cmd + ' --schedule={}'.format(schedule)) ctx.run(bin_prefix(cmd), pty=True)
[ "@", "task", "(", "aliases", "=", "[", "'beat'", "]", ")", "def", "celery_beat", "(", "ctx", ",", "level", "=", "'debug'", ",", "schedule", "=", "None", ")", ":", "os", ".", "environ", "[", "'DJANGO_SETTINGS_MODULE'", "]", "=", "'api.base.settings'", "cm...
run the celery process .
train
false
32,325
def fill_plot(): x = np.linspace(((-2) * np.pi), (2 * np.pi), 1000, endpoint=True) y = np.sin(x) plt.plot(x, y, color='blue', alpha=1.0) plt.fill_between(x, 0, y, (y > 0), color='blue', alpha=0.25) plt.fill_between(x, 0, y, (y < 0), color='red', alpha=0.25) plt.show() return
[ "def", "fill_plot", "(", ")", ":", "x", "=", "np", ".", "linspace", "(", "(", "(", "-", "2", ")", "*", "np", ".", "pi", ")", ",", "(", "2", "*", "np", ".", "pi", ")", ",", "1000", ",", "endpoint", "=", "True", ")", "y", "=", "np", ".", ...
fill plot .
train
false
32,326
def run_with_retry(args): if (args[0] != MONIT): logging.error('Cannot execute command {0}, as it is not a monit command.'.format(args)) return False retries_left = NUM_RETRIES while retries_left: return_status = subprocess.call(args) if (return_status == 0): logging.info('Monit command {0} returned successfully!'.format(args)) return True retries_left -= 1 logging.warning('Monit command {0} returned with status {1}, {2} retries left.'.format(args, return_status, retries_left)) time.sleep(SMALL_WAIT) return False
[ "def", "run_with_retry", "(", "args", ")", ":", "if", "(", "args", "[", "0", "]", "!=", "MONIT", ")", ":", "logging", ".", "error", "(", "'Cannot execute command {0}, as it is not a monit command.'", ".", "format", "(", "args", ")", ")", "return", "False", "...
runs the given monit command .
train
false
32,327
def recreate_indexes(es=None, indexes=None): if (es is None): es = get_es() if (indexes is None): indexes = all_write_indexes() for index in indexes: delete_index(index) es.indices.create(index=index, body={'mappings': get_mappings(index), 'settings': {'analysis': get_analysis()}}) es.cluster.health(wait_for_status='yellow')
[ "def", "recreate_indexes", "(", "es", "=", "None", ",", "indexes", "=", "None", ")", ":", "if", "(", "es", "is", "None", ")", ":", "es", "=", "get_es", "(", ")", "if", "(", "indexes", "is", "None", ")", ":", "indexes", "=", "all_write_indexes", "("...
deletes indexes and recreates them .
train
false
32,328
def get_cifar10(withlabel=True, ndim=3, scale=1.0): raw = _retrieve_cifar('cifar-10') train = _preprocess_cifar(raw['train_x'], raw['train_y'], withlabel, ndim, scale) test = _preprocess_cifar(raw['test_x'], raw['test_y'], withlabel, ndim, scale) return (train, test)
[ "def", "get_cifar10", "(", "withlabel", "=", "True", ",", "ndim", "=", "3", ",", "scale", "=", "1.0", ")", ":", "raw", "=", "_retrieve_cifar", "(", "'cifar-10'", ")", "train", "=", "_preprocess_cifar", "(", "raw", "[", "'train_x'", "]", ",", "raw", "["...
gets the cifar-10 dataset .
train
false
32,329
def get_profile_user_fieldname(profile_model=None, user_model=None): Profile = (profile_model or get_profile_model()) User = (user_model or get_user_model()) for field in Profile._meta.fields: if (field.rel and (field.rel.to == User)): return field.name raise ImproperlyConfigured((u'Value for ACCOUNTS_PROFILE_MODEL does not contain a ForeignKey field for auth.User: %s' % Profile.__name__))
[ "def", "get_profile_user_fieldname", "(", "profile_model", "=", "None", ",", "user_model", "=", "None", ")", ":", "Profile", "=", "(", "profile_model", "or", "get_profile_model", "(", ")", ")", "User", "=", "(", "user_model", "or", "get_user_model", "(", ")", ...
returns the name of the first field on the profile model that points to the auth .
train
true
32,330
def is_resource_enabled(resource): if (sys._getframe().f_back.f_globals.get('__name__') == '__main__'): return True result = ((use_resources is not None) and ((resource in use_resources) or ('*' in use_resources))) if (not result): _unavail[resource] = None return result
[ "def", "is_resource_enabled", "(", "resource", ")", ":", "if", "(", "sys", ".", "_getframe", "(", ")", ".", "f_back", ".", "f_globals", ".", "get", "(", "'__name__'", ")", "==", "'__main__'", ")", ":", "return", "True", "result", "=", "(", "(", "use_re...
test whether a resource is enabled .
train
false
32,331
def trim_mean(a, proportiontocut, axis=0): a = np.asarray(a) if (a.size == 0): return np.nan if (axis is None): a = a.ravel() axis = 0 nobs = a.shape[axis] lowercut = int((proportiontocut * nobs)) uppercut = (nobs - lowercut) if (lowercut > uppercut): raise ValueError('Proportion too big.') atmp = np.partition(a, (lowercut, (uppercut - 1)), axis) sl = ([slice(None)] * atmp.ndim) sl[axis] = slice(lowercut, uppercut) return np.mean(atmp[sl], axis=axis)
[ "def", "trim_mean", "(", "a", ",", "proportiontocut", ",", "axis", "=", "0", ")", ":", "a", "=", "np", ".", "asarray", "(", "a", ")", "if", "(", "a", ".", "size", "==", "0", ")", ":", "return", "np", ".", "nan", "if", "(", "axis", "is", "None...
return mean of array after trimming distribution from both tails .
train
false
32,333
def load_etc_dir(options, tags): etcdir = os.path.join(options.cdir, 'etc') sys.path.append(etcdir) modules = {} for name in list_config_modules(etcdir): path = os.path.join(etcdir, name) module = load_config_module(name, options, tags) modules[path] = (module, os.path.getmtime(path)) return modules
[ "def", "load_etc_dir", "(", "options", ",", "tags", ")", ":", "etcdir", "=", "os", ".", "path", ".", "join", "(", "options", ".", "cdir", ",", "'etc'", ")", "sys", ".", "path", ".", "append", "(", "etcdir", ")", "modules", "=", "{", "}", "for", "...
loads any python module from tcollectors own etc directory .
train
false
32,335
def _serialize_allocations_for_consumer(allocations): return _allocations_dict(allocations, (lambda x: x.resource_provider.uuid))
[ "def", "_serialize_allocations_for_consumer", "(", "allocations", ")", ":", "return", "_allocations_dict", "(", "allocations", ",", "(", "lambda", "x", ":", "x", ".", "resource_provider", ".", "uuid", ")", ")" ]
turn a list of allocations into a dict by resource provider uuid .
train
false
32,337
def update_package(package, local=False, npm='npm'): if local: run(('%(npm)s update -l %(package)s' % locals())) else: run_as_root(('HOME=/root %(npm)s update -g %(package)s' % locals()))
[ "def", "update_package", "(", "package", ",", "local", "=", "False", ",", "npm", "=", "'npm'", ")", ":", "if", "local", ":", "run", "(", "(", "'%(npm)s update -l %(package)s'", "%", "locals", "(", ")", ")", ")", "else", ":", "run_as_root", "(", "(", "'...
update a node .
train
false
32,339
def _get_all_by_resource_types(context, session, filters, marker=None, limit=None, sort_key=None, sort_dir=None): resource_types = filters['resource_types'] resource_type_list = resource_types.split(',') db_recs = session.query(models.MetadefResourceType).join(models.MetadefResourceType.associations).filter(models.MetadefResourceType.name.in_(resource_type_list)).values(models.MetadefResourceType.name, models.MetadefNamespaceResourceType.namespace_id) namespace_id_list = [] for (name, namespace_id) in db_recs: namespace_id_list.append(namespace_id) if (len(namespace_id_list) is 0): return [] filters2 = filters filters2.update({'id_list': namespace_id_list}) return _get_all(context, session, filters2, marker, limit, sort_key, sort_dir)
[ "def", "_get_all_by_resource_types", "(", "context", ",", "session", ",", "filters", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "sort_key", "=", "None", ",", "sort_dir", "=", "None", ")", ":", "resource_types", "=", "filters", "[", "'resou...
get all visible namespaces for the specified resource_types .
train
false
32,340
@contextlib.contextmanager def safe_query_string(request): qs = request.META['QUERY_STRING'] try: request.META['QUERY_STRING'] = iri_to_uri(qs) (yield) finally: request.META['QUERY_STRING'] = qs
[ "@", "contextlib", ".", "contextmanager", "def", "safe_query_string", "(", "request", ")", ":", "qs", "=", "request", ".", "META", "[", "'QUERY_STRING'", "]", "try", ":", "request", ".", "META", "[", "'QUERY_STRING'", "]", "=", "iri_to_uri", "(", "qs", ")"...
turn the query_string into a unicode- and ascii-safe string .
train
false
32,341
def _create_formatters(cp): flist = cp.get('formatters', 'keys') if (not len(flist)): return {} flist = string.split(flist, ',') flist = _strip_spaces(flist) formatters = {} for form in flist: sectname = ('formatter_%s' % form) opts = cp.options(sectname) if ('format' in opts): fs = cp.get(sectname, 'format', 1) else: fs = None if ('datefmt' in opts): dfs = cp.get(sectname, 'datefmt', 1) else: dfs = None c = logging.Formatter if ('class' in opts): class_name = cp.get(sectname, 'class') if class_name: c = _resolve(class_name) f = c(fs, dfs) formatters[form] = f return formatters
[ "def", "_create_formatters", "(", "cp", ")", ":", "flist", "=", "cp", ".", "get", "(", "'formatters'", ",", "'keys'", ")", "if", "(", "not", "len", "(", "flist", ")", ")", ":", "return", "{", "}", "flist", "=", "string", ".", "split", "(", "flist",...
create and return formatters .
train
false
32,342
def snapshot_profile(): with _profile_lock: merge_profile() return copy.deepcopy(_cumulative_profile)
[ "def", "snapshot_profile", "(", ")", ":", "with", "_profile_lock", ":", "merge_profile", "(", ")", "return", "copy", ".", "deepcopy", "(", "_cumulative_profile", ")" ]
returns the cumulative execution profile until this call .
train
false
32,343
def make_get_access_token_call(rpc, scopes, service_account_id=None): request = app_identity_service_pb.GetAccessTokenRequest() if (not scopes): raise InvalidScope('No scopes specified.') if isinstance(scopes, basestring): request.add_scope(scopes) else: for scope in scopes: request.add_scope(scope) if service_account_id: request.set_service_account_id(service_account_id) response = app_identity_service_pb.GetAccessTokenResponse() def get_access_token_result(rpc): 'Check success, handle exceptions, and return converted RPC result.\n\n This method waits for the RPC if it has not yet finished, and calls the\n post-call hooks on the first invocation.\n\n Args:\n rpc: A UserRPC object.\n\n Returns:\n Pair, Access token (string) and expiration time (seconds since the epoch).\n ' assert (rpc.service == _APP_IDENTITY_SERVICE_NAME), repr(rpc.service) assert (rpc.method == _GET_ACCESS_TOKEN_METHOD_NAME), repr(rpc.method) try: rpc.check_success() except apiproxy_errors.ApplicationError as err: raise _to_app_identity_error(err) return (response.access_token(), response.expiration_time()) rpc.make_call(_GET_ACCESS_TOKEN_METHOD_NAME, request, response, get_access_token_result)
[ "def", "make_get_access_token_call", "(", "rpc", ",", "scopes", ",", "service_account_id", "=", "None", ")", ":", "request", "=", "app_identity_service_pb", ".", "GetAccessTokenRequest", "(", ")", "if", "(", "not", "scopes", ")", ":", "raise", "InvalidScope", "(...
oauth2 access token to act on behalf of the application .
train
false
32,344
def test_mod_compile(): x = tensor.vector() y = tensor.vector() shape = x.shape out = tensor.switch(tensor.eq((3 % x.shape[0]), 0), y, y[:(-1)]) f = theano.function([x, y], out)
[ "def", "test_mod_compile", "(", ")", ":", "x", "=", "tensor", ".", "vector", "(", ")", "y", "=", "tensor", ".", "vector", "(", ")", "shape", "=", "x", ".", "shape", "out", "=", "tensor", ".", "switch", "(", "tensor", ".", "eq", "(", "(", "3", "...
this test generate an elemwise of composite as: elemwise{ composite{ composite{ composite{ composite{mod .
train
false
32,345
@should_profile_memory def stop_memory_profiling(): cancel_thread(SAVE_THREAD_PTR) dump_objects()
[ "@", "should_profile_memory", "def", "stop_memory_profiling", "(", ")", ":", "cancel_thread", "(", "SAVE_THREAD_PTR", ")", "dump_objects", "(", ")" ]
we cancel the save thread and dump objects for the last time .
train
false
32,346
@receiver(models.signals.post_save, sender=PriceCurrency, dispatch_uid='save_price_currency') @receiver(models.signals.post_delete, sender=PriceCurrency, dispatch_uid='delete_price_currency') def update_price_currency(sender, instance, **kw): if kw.get('raw'): return try: ids = list(instance.tier.addonpremium_set.values_list('addon_id', flat=True)) except Price.DoesNotExist: return if ids: log.info('Indexing {0} add-ons due to PriceCurrency changes'.format(len(ids))) from mkt.webapps.tasks import index_webapps index_webapps.delay(ids)
[ "@", "receiver", "(", "models", ".", "signals", ".", "post_save", ",", "sender", "=", "PriceCurrency", ",", "dispatch_uid", "=", "'save_price_currency'", ")", "@", "receiver", "(", "models", ".", "signals", ".", "post_delete", ",", "sender", "=", "PriceCurrenc...
ensure that when pricecurrencies are updated .
train
false
32,347
def test_encoded_stream(httpbin): with open(BIN_FILE_PATH, 'rb') as f: env = TestEnvironment(stdin=f, stdin_isatty=False) r = http('--pretty=none', '--stream', '--verbose', 'GET', (httpbin.url + '/get'), env=env) assert (BINARY_SUPPRESSED_NOTICE.decode() in r)
[ "def", "test_encoded_stream", "(", "httpbin", ")", ":", "with", "open", "(", "BIN_FILE_PATH", ",", "'rb'", ")", "as", "f", ":", "env", "=", "TestEnvironment", "(", "stdin", "=", "f", ",", "stdin_isatty", "=", "False", ")", "r", "=", "http", "(", "'--pr...
test that --stream works with non-prettified redirected terminal output .
train
false
32,348
def pdf(x, mu, sigma): return ((1 / (sigma * sy.sqrt((2 * sy.pi)))) * sy.exp(((- ((x - mu) ** 2)) / (2 * (sigma ** 2)))))
[ "def", "pdf", "(", "x", ",", "mu", ",", "sigma", ")", ":", "return", "(", "(", "1", "/", "(", "sigma", "*", "sy", ".", "sqrt", "(", "(", "2", "*", "sy", ".", "pi", ")", ")", ")", ")", "*", "sy", ".", "exp", "(", "(", "(", "-", "(", "(...
probability density function computing p input is the mean .
train
false
32,349
def make_timestamp_range(start, end): ts_range = {} if start: ts_range['$gte'] = start if end: ts_range['$lt'] = end return ts_range
[ "def", "make_timestamp_range", "(", "start", ",", "end", ")", ":", "ts_range", "=", "{", "}", "if", "start", ":", "ts_range", "[", "'$gte'", "]", "=", "start", "if", "end", ":", "ts_range", "[", "'$lt'", "]", "=", "end", "return", "ts_range" ]
given two possible datetimes .
train
false
32,350
def global_names_dict_generator(evaluator, scope, position): in_func = False while (scope is not None): if (not ((scope.type == 'classdef') and in_func)): for names_dict in scope.names_dicts(True): (yield (names_dict, position)) if hasattr(scope, 'resets_positions'): position = None if (scope.type == 'funcdef'): in_func = True position = None scope = evaluator.wrap(scope.get_parent_scope()) for names_dict in evaluator.BUILTINS.names_dicts(True): (yield (names_dict, None))
[ "def", "global_names_dict_generator", "(", "evaluator", ",", "scope", ",", "position", ")", ":", "in_func", "=", "False", "while", "(", "scope", "is", "not", "None", ")", ":", "if", "(", "not", "(", "(", "scope", ".", "type", "==", "'classdef'", ")", "...
for global name lookups .
train
false
32,352
def grant_privilege(database, privilege, username, **client_args): client = _client(**client_args) client.grant_privilege(privilege, database, username) return True
[ "def", "grant_privilege", "(", "database", ",", "privilege", ",", "username", ",", "**", "client_args", ")", ":", "client", "=", "_client", "(", "**", "client_args", ")", "client", ".", "grant_privilege", "(", "privilege", ",", "database", ",", "username", "...
grant a privilege on a database to a user .
train
true
32,354
def solve_lyapunov(a, q): return solve_sylvester(a, a.conj().transpose(), q)
[ "def", "solve_lyapunov", "(", "a", ",", "q", ")", ":", "return", "solve_sylvester", "(", "a", ",", "a", ".", "conj", "(", ")", ".", "transpose", "(", ")", ",", "q", ")" ]
solves the continuous lyapunov equation :math:ax + xa^h = q .
train
false
32,355
def buildConfiguration(config_dict, dirpath='.'): (scheme, h, path, p, q, f) = urlparse(dirpath) if (scheme in ('', 'file')): sys.path.insert(0, path) cache_dict = config_dict.get('cache', {}) cache = _parseConfigCache(cache_dict, dirpath) config = Configuration(cache, dirpath) for (name, layer_dict) in config_dict.get('layers', {}).items(): config.layers[name] = _parseConfigLayer(layer_dict, config, dirpath) if ('index' in config_dict): index_href = urljoin(dirpath, config_dict['index']) index_body = urlopen(index_href).read() index_type = guess_type(index_href) config.index = (index_type[0], index_body) if ('logging' in config_dict): level = config_dict['logging'].upper() if hasattr(logging, level): logging.basicConfig(level=getattr(logging, level)) return config
[ "def", "buildConfiguration", "(", "config_dict", ",", "dirpath", "=", "'.'", ")", ":", "(", "scheme", ",", "h", ",", "path", ",", "p", ",", "q", ",", "f", ")", "=", "urlparse", "(", "dirpath", ")", "if", "(", "scheme", "in", "(", "''", ",", "'fil...
build a configuration dictionary into a configuration object .
train
false
32,356
@contextmanager def skip_signal(signal, **kwargs): signal.disconnect(**kwargs) (yield) signal.connect(**kwargs)
[ "@", "contextmanager", "def", "skip_signal", "(", "signal", ",", "**", "kwargs", ")", ":", "signal", ".", "disconnect", "(", "**", "kwargs", ")", "(", "yield", ")", "signal", ".", "connect", "(", "**", "kwargs", ")" ]
contextmanager to skip a signal by disconnecting it .
train
false
32,357
@retry_on_failure def test_inet_ntop(): if (not is_cli): return AssertError(socket.error, socket.inet_ntop, socket.AF_INET, 'garbage dkfjdkfjdkfj')
[ "@", "retry_on_failure", "def", "test_inet_ntop", "(", ")", ":", "if", "(", "not", "is_cli", ")", ":", "return", "AssertError", "(", "socket", ".", "error", ",", "socket", ".", "inet_ntop", ",", "socket", ".", "AF_INET", ",", "'garbage dkfjdkfjdkfj'", ")" ]
tests socket .
train
false
32,358
def apply_on_element(f, args, kwargs, n): if isinstance(n, int): structure = args[n] is_arg = True elif isinstance(n, str): structure = kwargs[n] is_arg = False def f_reduced(x): if hasattr(x, '__iter__'): return list(map(f_reduced, x)) else: if is_arg: args[n] = x else: kwargs[n] = x return f(*args, **kwargs) return list(map(f_reduced, structure))
[ "def", "apply_on_element", "(", "f", ",", "args", ",", "kwargs", ",", "n", ")", ":", "if", "isinstance", "(", "n", ",", "int", ")", ":", "structure", "=", "args", "[", "n", "]", "is_arg", "=", "True", "elif", "isinstance", "(", "n", ",", "str", "...
returns a structure with the same dimension as the specified argument .
train
false
32,359
def addLevel(level, levelName): _levelNames[level] = levelName _levelNames[levelName] = level
[ "def", "addLevel", "(", "level", ",", "levelName", ")", ":", "_levelNames", "[", "level", "]", "=", "levelName", "_levelNames", "[", "levelName", "]", "=", "level" ]
associate levelname with level .
train
false
32,362
def write_cols(worksheet): def sorter(value): return column_index_from_string(value[0]) el = Element('cols') obj = None for (idx, col) in sorted(worksheet.column_dimensions.items(), key=sorter): if (dict(col) == {}): continue idx = column_index_from_string(idx) obj = Element('col', dict(col)) obj.set('min', ('%d' % (col.min or idx))) obj.set('max', ('%d' % (col.max or idx))) el.append(obj) if (obj is not None): return el
[ "def", "write_cols", "(", "worksheet", ")", ":", "def", "sorter", "(", "value", ")", ":", "return", "column_index_from_string", "(", "value", "[", "0", "]", ")", "el", "=", "Element", "(", "'cols'", ")", "obj", "=", "None", "for", "(", "idx", ",", "c...
write worksheet columns to xml .
train
false
32,363
@docfiller def rank_filter(input, rank, size=None, footprint=None, output=None, mode='reflect', cval=0.0, origin=0): return _rank_filter(input, rank, size, footprint, output, mode, cval, origin, 'rank')
[ "@", "docfiller", "def", "rank_filter", "(", "input", ",", "rank", ",", "size", "=", "None", ",", "footprint", "=", "None", ",", "output", "=", "None", ",", "mode", "=", "'reflect'", ",", "cval", "=", "0.0", ",", "origin", "=", "0", ")", ":", "retu...
calculates a multi-dimensional rank filter .
train
false
32,364
def naive_to_utc(ts): return pd.Timestamp(ts.to_pydatetime(warn=False), tz='UTC')
[ "def", "naive_to_utc", "(", "ts", ")", ":", "return", "pd", ".", "Timestamp", "(", "ts", ".", "to_pydatetime", "(", "warn", "=", "False", ")", ",", "tz", "=", "'UTC'", ")" ]
converts a utc tz-naive timestamp to a tz-aware timestamp .
train
false
32,365
def webtest_maybe_follow(response, **kw): remaining_redirects = 100 while ((300 <= response.status_int < 400) and remaining_redirects): response = response.follow(**kw) remaining_redirects -= 1 assert (remaining_redirects > 0), 'redirects chain looks infinite' return response
[ "def", "webtest_maybe_follow", "(", "response", ",", "**", "kw", ")", ":", "remaining_redirects", "=", "100", "while", "(", "(", "300", "<=", "response", ".", "status_int", "<", "400", ")", "and", "remaining_redirects", ")", ":", "response", "=", "response",...
follow all redirects .
train
false
32,366
def batch_size(batch): size = 0 for mutation in batch: size += len(mutation['key']) if ('values' in mutation): for value in mutation['values'].values(): size += len(value) return size
[ "def", "batch_size", "(", "batch", ")", ":", "size", "=", "0", "for", "mutation", "in", "batch", ":", "size", "+=", "len", "(", "mutation", "[", "'key'", "]", ")", "if", "(", "'values'", "in", "mutation", ")", ":", "for", "value", "in", "mutation", ...
calculates the size of a batch .
train
false
32,367
@require_POST def ipn(request, item_check_callable=None): flag = None ipn_obj = None form = PayPalIPNForm(request.POST) if form.is_valid(): try: ipn_obj = form.save(commit=False) except Exception as e: flag = ('Exception while processing. (%s)' % e) else: flag = ('Invalid form. (%s)' % form.errors) if (ipn_obj is None): ipn_obj = PayPalIPN() ipn_obj.initialize(request) if (flag is not None): ipn_obj.set_flag(flag) elif (request.is_secure() and ('secret' in request.GET)): ipn_obj.verify_secret(form, request.GET['secret']) else: ipn_obj.verify(item_check_callable) ipn_obj.save() return HttpResponse('OKAY')
[ "@", "require_POST", "def", "ipn", "(", "request", ",", "item_check_callable", "=", "None", ")", ":", "flag", "=", "None", "ipn_obj", "=", "None", "form", "=", "PayPalIPNForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ...
paypal ipn endpoint .
train
false
32,369
def deprecated_method(replacement): def outer(fun): msg = ('%s() is deprecated; use %s() instead' % (fun.__name__, replacement)) if (fun.__doc__ is None): fun.__doc__ = msg @functools.wraps(fun) def inner(self, *args, **kwargs): warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return getattr(self, replacement)(*args, **kwargs) return inner return outer
[ "def", "deprecated_method", "(", "replacement", ")", ":", "def", "outer", "(", "fun", ")", ":", "msg", "=", "(", "'%s() is deprecated; use %s() instead'", "%", "(", "fun", ".", "__name__", ",", "replacement", ")", ")", "if", "(", "fun", ".", "__doc__", "is...
a decorator which can be used to mark a method as deprecated replcement is the method name which will be called instead .
train
false
32,370
def _newton(n, x_initial, maxit=5): mu = sqrt(((2.0 * n) + 1.0)) t = (x_initial / mu) theta = arccos(t) for i in range(maxit): (u, ud) = _pbcf(n, theta) dtheta = (u / (((sqrt(2.0) * mu) * sin(theta)) * ud)) theta = (theta + dtheta) if (max(abs(dtheta)) < 1e-14): break x = (mu * cos(theta)) if ((n % 2) == 1): x[0] = 0.0 w = (exp((- (x ** 2))) / (2.0 * (ud ** 2))) return (x, w)
[ "def", "_newton", "(", "n", ",", "x_initial", ",", "maxit", "=", "5", ")", ":", "mu", "=", "sqrt", "(", "(", "(", "2.0", "*", "n", ")", "+", "1.0", ")", ")", "t", "=", "(", "x_initial", "/", "mu", ")", "theta", "=", "arccos", "(", "t", ")",...
newton iteration for polishing the asymptotic approximation to the zeros of the hermite polynomials .
train
false
32,372
def call_auth(auth_name, context, **kwargs): assert ('user' in context), 'Test methods must put a user name in the context dict' assert ('model' in context), 'Test methods must put a model in the context dict' return logic.check_access(auth_name, context, data_dict=kwargs)
[ "def", "call_auth", "(", "auth_name", ",", "context", ",", "**", "kwargs", ")", ":", "assert", "(", "'user'", "in", "context", ")", ",", "'Test methods must put a user name in the context dict'", "assert", "(", "'model'", "in", "context", ")", ",", "'Test methods ...
call the named ckan .
train
false
32,374
def filterControlChars(value): return filterStringValue(value, PRINTABLE_CHAR_REGEX, ' ')
[ "def", "filterControlChars", "(", "value", ")", ":", "return", "filterStringValue", "(", "value", ",", "PRINTABLE_CHAR_REGEX", ",", "' '", ")" ]
returns string value with control chars being supstituted with .
train
false
32,375
def safe_getattr(obj, name, *defargs): try: return getattr(obj, name, *defargs) except Exception: if defargs: return defargs[0] raise AttributeError(name)
[ "def", "safe_getattr", "(", "obj", ",", "name", ",", "*", "defargs", ")", ":", "try", ":", "return", "getattr", "(", "obj", ",", "name", ",", "*", "defargs", ")", "except", "Exception", ":", "if", "defargs", ":", "return", "defargs", "[", "0", "]", ...
a getattr() that turns all exceptions into attributeerrors .
train
false
32,377
def rcode(expr, assign_to=None, **settings): return RCodePrinter(settings).doprint(expr, assign_to)
[ "def", "rcode", "(", "expr", ",", "assign_to", "=", "None", ",", "**", "settings", ")", ":", "return", "RCodePrinter", "(", "settings", ")", ".", "doprint", "(", "expr", ",", "assign_to", ")" ]
converts an expr to a string of r code parameters expr : expr a sympy expression to be converted .
train
false
32,378
def transferPathsToSurroundingLoops(paths, surroundingLoops): for surroundingLoop in surroundingLoops: surroundingLoop.transferPaths(paths)
[ "def", "transferPathsToSurroundingLoops", "(", "paths", ",", "surroundingLoops", ")", ":", "for", "surroundingLoop", "in", "surroundingLoops", ":", "surroundingLoop", ".", "transferPaths", "(", "paths", ")" ]
transfer paths to surrounding loops .
train
false
32,380
def test_iht_fit_single_class(): iht = InstanceHardnessThreshold(ESTIMATOR, random_state=RND_SEED) y_single_class = np.zeros((X.shape[0],)) assert_warns(UserWarning, iht.fit, X, y_single_class)
[ "def", "test_iht_fit_single_class", "(", ")", ":", "iht", "=", "InstanceHardnessThreshold", "(", "ESTIMATOR", ",", "random_state", "=", "RND_SEED", ")", "y_single_class", "=", "np", ".", "zeros", "(", "(", "X", ".", "shape", "[", "0", "]", ",", ")", ")", ...
test either if an error when there is a single class .
train
false
32,381
def processElse(xmlElement): evaluate.processCondition(xmlElement)
[ "def", "processElse", "(", "xmlElement", ")", ":", "evaluate", ".", "processCondition", "(", "xmlElement", ")" ]
process the else statement .
train
false
32,382
def get_error_match(text): import re return re.match(' File "(.*)", line (\\d*)', text)
[ "def", "get_error_match", "(", "text", ")", ":", "import", "re", "return", "re", ".", "match", "(", "' File \"(.*)\", line (\\\\d*)'", ",", "text", ")" ]
return error match .
train
false
32,383
@np.deprecate(message='spltopp is deprecated in scipy 0.19.0, use PPoly.from_spline instead.') def spltopp(xk, cvals, k): return ppform.fromspline(xk, cvals, k)
[ "@", "np", ".", "deprecate", "(", "message", "=", "'spltopp is deprecated in scipy 0.19.0, use PPoly.from_spline instead.'", ")", "def", "spltopp", "(", "xk", ",", "cvals", ",", "k", ")", ":", "return", "ppform", ".", "fromspline", "(", "xk", ",", "cvals", ",", ...
return a piece-wise polynomial object from a fixed-spline tuple .
train
false
32,384
@retry_on_failure def test_SSLType_ssl(): s = socket.socket(socket.AF_INET) s.connect((SSL_URL, SSL_PORT)) ssl_s = real_ssl.sslwrap(s._sock, False) if is_cpython: pass s.close()
[ "@", "retry_on_failure", "def", "test_SSLType_ssl", "(", ")", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ")", "s", ".", "connect", "(", "(", "SSL_URL", ",", "SSL_PORT", ")", ")", "ssl_s", "=", "real_ssl", ".", "sslwrap", "("...
should be essentially the same as _ssl .
train
false
32,386
def validate_mutually_exclusive(parsed_args, *groups): args_dict = vars(parsed_args) all_args = set((arg for group in groups for arg in group)) if (not any(((k in all_args) for k in args_dict if (args_dict[k] is not None)))): return current_group = None for key in [k for k in args_dict if (args_dict[k] is not None)]: key_group = _get_group_for_key(key, groups) if (key_group is None): continue if (current_group is None): current_group = key_group elif (not (key_group == current_group)): raise ValueError(('The key "%s" cannot be specified when one of the following keys are also specified: %s' % (key, ', '.join(current_group))))
[ "def", "validate_mutually_exclusive", "(", "parsed_args", ",", "*", "groups", ")", ":", "args_dict", "=", "vars", "(", "parsed_args", ")", "all_args", "=", "set", "(", "(", "arg", "for", "group", "in", "groups", "for", "arg", "in", "group", ")", ")", "if...
validate mututally exclusive groups in the parsed args .
train
false
32,387
def is_classmethod(instancemethod, klass): return (inspect.ismethod(instancemethod) and (instancemethod.__self__ is klass))
[ "def", "is_classmethod", "(", "instancemethod", ",", "klass", ")", ":", "return", "(", "inspect", ".", "ismethod", "(", "instancemethod", ")", "and", "(", "instancemethod", ".", "__self__", "is", "klass", ")", ")" ]
determine if an instancemethod is a classmethod .
train
false
32,388
def task_failed_deps(args): dag = get_dag(args) task = dag.get_task(task_id=args.task_id) ti = TaskInstance(task, args.execution_date) dep_context = DepContext(deps=SCHEDULER_DEPS) failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context)) if failed_deps: print('Task instance dependencies not met:') for dep in failed_deps: print('{}: {}'.format(dep.dep_name, dep.reason)) else: print('Task instance dependencies are all met.')
[ "def", "task_failed_deps", "(", "args", ")", ":", "dag", "=", "get_dag", "(", "args", ")", "task", "=", "dag", ".", "get_task", "(", "task_id", "=", "args", ".", "task_id", ")", "ti", "=", "TaskInstance", "(", "task", ",", "args", ".", "execution_date"...
returns the unmet dependencies for a task instance from the perspective of the scheduler .
train
true
32,389
def _findOptionValueInSeceditFile(option): try: _d = uuid.uuid4().hex _tfile = '{0}\\{1}'.format(__salt__['config.get']('cachedir'), 'salt-secedit-dump-{0}.txt'.format(_d)) _ret = __salt__['cmd.run']('secedit /export /cfg {0}'.format(_tfile)) if _ret: _reader = codecs.open(_tfile, 'r', encoding='utf-16') _secdata = _reader.readlines() _reader.close() _ret = __salt__['file.remove'](_tfile) for _line in _secdata: if _line.startswith(option): return (True, _line.split('=')[1].strip()) return (True, 'Not Defined') except Exception as e: log.debug('error occurred while trying to get secedit data') return (False, None)
[ "def", "_findOptionValueInSeceditFile", "(", "option", ")", ":", "try", ":", "_d", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "_tfile", "=", "'{0}\\\\{1}'", ".", "format", "(", "__salt__", "[", "'config.get'", "]", "(", "'cachedir'", ")", ",", "'sa...
helper function to dump/parse a secedit /export file for a particular option .
train
false
32,390
def evcam(camera, freq=1.0, maxrate=None, loglevel=0, logfile=None, pidfile=None, timer=None, app=None): app = app_or_default(app) if pidfile: platforms.create_pidlock(pidfile) app.log.setup_logging_subsystem(loglevel, logfile) print(u'-> evcam: Taking snapshots with {0} (every {1} secs.)'.format(camera, freq)) state = app.events.State() cam = instantiate(camera, state, app=app, freq=freq, maxrate=maxrate, timer=timer) cam.install() conn = app.connection_for_read() recv = app.events.Receiver(conn, handlers={u'*': state.event}) try: recv.capture(limit=None) except KeyboardInterrupt: raise SystemExit finally: cam.cancel() conn.close()
[ "def", "evcam", "(", "camera", ",", "freq", "=", "1.0", ",", "maxrate", "=", "None", ",", "loglevel", "=", "0", ",", "logfile", "=", "None", ",", "pidfile", "=", "None", ",", "timer", "=", "None", ",", "app", "=", "None", ")", ":", "app", "=", ...
start snapshot recorder .
train
false
32,391
def mul_xin(p, i, n): R = p.ring q = R(0) for (k, v) in p.items(): k1 = list(k) k1[i] += n q[tuple(k1)] = v return q
[ "def", "mul_xin", "(", "p", ",", "i", ",", "n", ")", ":", "R", "=", "p", ".", "ring", "q", "=", "R", "(", "0", ")", "for", "(", "k", ",", "v", ")", "in", "p", ".", "items", "(", ")", ":", "k1", "=", "list", "(", "k", ")", "k1", "[", ...
return p*x_i**n .
train
false
32,392
def _load_user_django(path, debug): abspath = os.path.abspath(path) if (not debug): template = template_cache.get(abspath, None) else: template = None if (not template): (directory, file_name) = os.path.split(abspath) new_settings = {'TEMPLATE_DIRS': (directory,), 'TEMPLATE_DEBUG': debug, 'DEBUG': debug} old_settings = _swap_settings(new_settings) try: template = django.template.loader.get_template(file_name) finally: _swap_settings(old_settings) if (not debug): template_cache[abspath] = template def wrap_render(context, orig_render=template.render): URLNode = django.template.defaulttags.URLNode save_urlnode_render = URLNode.render old_settings = _swap_settings(new_settings) try: URLNode.render = _urlnode_render_replacement return orig_render(context) finally: _swap_settings(old_settings) URLNode.render = save_urlnode_render template.render = wrap_render return template
[ "def", "_load_user_django", "(", "path", ",", "debug", ")", ":", "abspath", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "if", "(", "not", "debug", ")", ":", "template", "=", "template_cache", ".", "get", "(", "abspath", ",", "None", ")"...
load the given template using the django found in third_party .
train
false
32,393
def audio_left_right(audioclip, left=1, right=1, merge=False): funleft = ((lambda t: left) if np.isscalar(left) else left) funright = ((lambda t: right) if np.isscalar(right) else right)
[ "def", "audio_left_right", "(", "audioclip", ",", "left", "=", "1", ",", "right", "=", "1", ",", "merge", "=", "False", ")", ":", "funleft", "=", "(", "(", "lambda", "t", ":", "left", ")", "if", "np", ".", "isscalar", "(", "left", ")", "else", "l...
not yet finished for a stereo audioclip .
train
false
32,394
def getRoundedToPlaces(decimalPlaces, number): decimalPlacesRounded = max(1, int(round(decimalPlaces))) return round(number, decimalPlacesRounded)
[ "def", "getRoundedToPlaces", "(", "decimalPlaces", ",", "number", ")", ":", "decimalPlacesRounded", "=", "max", "(", "1", ",", "int", "(", "round", "(", "decimalPlaces", ")", ")", ")", "return", "round", "(", "number", ",", "decimalPlacesRounded", ")" ]
get number rounded to a number of decimal places .
train
false
32,396
@command('reverse\\s*(\\d{1,4})\\s*-\\s*(\\d{1,4})\\s*') def reverse_songs_range(lower, upper): (lower, upper) = (int(lower), int(upper)) if (lower > upper): (lower, upper) = (upper, lower) g.model.songs[(lower - 1):upper] = reversed(g.model.songs[(lower - 1):upper]) g.message = (((((c.y + 'Reversed range: ') + str(lower)) + '-') + str(upper)) + c.w) g.content = content.generate_songlist_display()
[ "@", "command", "(", "'reverse\\\\s*(\\\\d{1,4})\\\\s*-\\\\s*(\\\\d{1,4})\\\\s*'", ")", "def", "reverse_songs_range", "(", "lower", ",", "upper", ")", ":", "(", "lower", ",", "upper", ")", "=", "(", "int", "(", "lower", ")", ",", "int", "(", "upper", ")", ")...
reverse the songs within a specified range .
train
false
32,397
@frappe.whitelist() def rename_doc(doctype, old, new, force=False, merge=False, ignore_permissions=False): if (not frappe.db.exists(doctype, old)): return force = cint(force) merge = cint(merge) meta = frappe.get_meta(doctype) old_doc = frappe.get_doc(doctype, old) out = (old_doc.run_method(u'before_rename', old, new, merge) or {}) new = ((out.get(u'new') or new) if isinstance(out, dict) else (out or new)) if (doctype != u'DocType'): new = validate_rename(doctype, new, meta, merge, force, ignore_permissions) if (not merge): rename_parent_and_child(doctype, old, new, meta) link_fields = get_link_fields(doctype) update_link_field_values(link_fields, old, new, doctype) rename_dynamic_links(doctype, old, new) if (doctype == u'DocType'): rename_doctype(doctype, old, new, force) update_attachments(doctype, old, new) if merge: frappe.delete_doc(doctype, old) new_doc = frappe.get_doc(doctype, new) new_doc._local = getattr(old_doc, u'_local', None) new_doc.run_method(u'after_rename', old, new, merge) rename_versions(doctype, old, new) if (not merge): rename_password(doctype, old, new) frappe.db.sql(u"update tabDefaultValue set defvalue=%s where parenttype='User Permission'\n DCTB DCTB and defkey=%s and defvalue=%s", (new, doctype, old)) frappe.clear_cache() if merge: new_doc.add_comment(u'Edit', _(u'merged {0} into {1}').format(frappe.bold(old), frappe.bold(new))) else: new_doc.add_comment(u'Edit', _(u'renamed from {0} to {1}').format(frappe.bold(old), frappe.bold(new))) return new
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "rename_doc", "(", "doctype", ",", "old", ",", "new", ",", "force", "=", "False", ",", "merge", "=", "False", ",", "ignore_permissions", "=", "False", ")", ":", "if", "(", "not", "frappe", ".", "db", ...
rename a document .
train
false
32,398
def filter_properties_target(namespaces_iter, resource_types, properties_target): def filter_namespace(namespace): for asn in namespace.get('resource_type_associations'): if ((asn.get('name') in resource_types) and (asn.get('properties_target') == properties_target)): return True return False return filter(filter_namespace, namespaces_iter)
[ "def", "filter_properties_target", "(", "namespaces_iter", ",", "resource_types", ",", "properties_target", ")", ":", "def", "filter_namespace", "(", "namespace", ")", ":", "for", "asn", "in", "namespace", ".", "get", "(", "'resource_type_associations'", ")", ":", ...
filter metadata namespaces based on the given resource types and properties target .
train
true
32,399
def frame_codeinfo(fframe, back=0): try: if (not fframe): return ('<unknown>', '') for i in range(back): fframe = fframe.f_back try: fname = getsourcefile(fframe) except TypeError: fname = '<builtin>' lineno = (fframe.f_lineno or '') return (fname, lineno) except Exception: return ('<unknown>', '')
[ "def", "frame_codeinfo", "(", "fframe", ",", "back", "=", "0", ")", ":", "try", ":", "if", "(", "not", "fframe", ")", ":", "return", "(", "'<unknown>'", ",", "''", ")", "for", "i", "in", "range", "(", "back", ")", ":", "fframe", "=", "fframe", "....
return a pair for a previous frame .
train
false
32,400
def hdn_counts(sequence, graph): hdns = {} for kmer in kmers(sequence): d = graph.kmer_degree(kmer) if (d > 2): hdns[d] = (hdns.get(d, 0) + 1) return hdns
[ "def", "hdn_counts", "(", "sequence", ",", "graph", ")", ":", "hdns", "=", "{", "}", "for", "kmer", "in", "kmers", "(", "sequence", ")", ":", "d", "=", "graph", ".", "kmer_degree", "(", "kmer", ")", "if", "(", "d", ">", "2", ")", ":", "hdns", "...
get the degree distribution of nodes with degree more than 2 .
train
false
32,401
def relative_script(lines): activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this" activate_at = None for (idx, line) in reversed(list(enumerate(lines))): if (line.split()[:3] == ['from', '__future__', 'import']): activate_at = (idx + 1) break if (activate_at is None): activate_at = 1 return ((lines[:activate_at] + ['', activate, '']) + lines[activate_at:])
[ "def", "relative_script", "(", "lines", ")", ":", "activate", "=", "\"import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this\"", "a...
return a script thatll work in a relocatable environment .
train
true
32,403
def connect_kinesis(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): from boto.kinesis.layer1 import KinesisConnection return KinesisConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, **kwargs)
[ "def", "connect_kinesis", "(", "aws_access_key_id", "=", "None", ",", "aws_secret_access_key", "=", "None", ",", "**", "kwargs", ")", ":", "from", "boto", ".", "kinesis", ".", "layer1", "import", "KinesisConnection", "return", "KinesisConnection", "(", "aws_access...
connect to amazon kinesis :type aws_access_key_id: string .
train
false
32,404
def google_text_emphasis(style): emphasis = [] if ('text-decoration' in style): emphasis.append(style['text-decoration']) if ('font-style' in style): emphasis.append(style['font-style']) if ('font-weight' in style): emphasis.append(style['font-weight']) return emphasis
[ "def", "google_text_emphasis", "(", "style", ")", ":", "emphasis", "=", "[", "]", "if", "(", "'text-decoration'", "in", "style", ")", ":", "emphasis", ".", "append", "(", "style", "[", "'text-decoration'", "]", ")", "if", "(", "'font-style'", "in", "style"...
return a list of all emphasis modifiers of the element .
train
true
32,405
def configure_snmp(community, snmp_port=161, snmp_trapport=161): _xml = '<RIBCL VERSION="2.2">\n <LOGIN USER_LOGIN="x" PASSWORD="y">\n <RIB_INFO mode="write">\n <MOD_GLOBAL_SETTINGS>\n <SNMP_ACCESS_ENABLED VALUE="Yes"/>\n <SNMP_PORT VALUE="{0}"/>\n <SNMP_TRAP_PORT VALUE="{1}"/>\n </MOD_GLOBAL_SETTINGS>\n\n <MOD_SNMP_IM_SETTINGS>\n <SNMP_ADDRESS_1 VALUE=""/>\n <SNMP_ADDRESS_1_ROCOMMUNITY VALUE="{2}"/>\n <SNMP_ADDRESS_1_TRAPCOMMUNITY VERSION="" VALUE=""/>\n <RIB_TRAPS VALUE="Y"/>\n <OS_TRAPS VALUE="Y"/>\n <SNMP_PASSTHROUGH_STATUS VALUE="N"/>\n </MOD_SNMP_IM_SETTINGS>\n </RIB_INFO>\n </LOGIN>\n </RIBCL>'.format(snmp_port, snmp_trapport, community) return __execute_cmd('Configure_SNMP', _xml)
[ "def", "configure_snmp", "(", "community", ",", "snmp_port", "=", "161", ",", "snmp_trapport", "=", "161", ")", ":", "_xml", "=", "'<RIBCL VERSION=\"2.2\">\\n <LOGIN USER_LOGIN=\"x\" PASSWORD=\"y\">\\n <RIB_INFO mode=\"write\">\\n <M...
configure snmp cli example: .
train
true
32,406
def tock(phenny, input): u = urllib.urlopen('http://tycho.usno.navy.mil/cgi-bin/timer.pl') info = u.info() u.close() phenny.say((('"' + info['Date']) + '" - tycho.usno.navy.mil'))
[ "def", "tock", "(", "phenny", ",", "input", ")", ":", "u", "=", "urllib", ".", "urlopen", "(", "'http://tycho.usno.navy.mil/cgi-bin/timer.pl'", ")", "info", "=", "u", ".", "info", "(", ")", "u", ".", "close", "(", ")", "phenny", ".", "say", "(", "(", ...
shows the time from the usnos atomic clock .
train
false
32,410
def gemset_empty(ruby, gemset, runas=None): return _rvm_do(ruby, ['rvm', '--force', 'gemset', 'empty', gemset], runas=runas)
[ "def", "gemset_empty", "(", "ruby", ",", "gemset", ",", "runas", "=", "None", ")", ":", "return", "_rvm_do", "(", "ruby", ",", "[", "'rvm'", ",", "'--force'", ",", "'gemset'", ",", "'empty'", ",", "gemset", "]", ",", "runas", "=", "runas", ")" ]
remove all gems from a gemset .
train
false
32,411
def load_exception(package): try: return pickle.loads(package) except pickle.PicklingError as ex: return NestedException(('failed to unpickle remote exception -- %r' % (ex,)))
[ "def", "load_exception", "(", "package", ")", ":", "try", ":", "return", "pickle", ".", "loads", "(", "package", ")", "except", "pickle", ".", "PicklingError", "as", "ex", ":", "return", "NestedException", "(", "(", "'failed to unpickle remote exception -- %r'", ...
returns an exception object .
train
false
32,412
def complete_alias(text): global rline_mpstate return rline_mpstate.aliases.keys()
[ "def", "complete_alias", "(", "text", ")", ":", "global", "rline_mpstate", "return", "rline_mpstate", ".", "aliases", ".", "keys", "(", ")" ]
return list of aliases .
train
false
32,413
def evaluation_error(y_real, y_pred, max_rating, min_rating): mae = mean_absolute_error(y_real, y_pred) nmae = normalized_mean_absolute_error(y_real, y_pred, max_rating, min_rating) rmse = root_mean_square_error(y_real, y_pred) return (mae, nmae, rmse)
[ "def", "evaluation_error", "(", "y_real", ",", "y_pred", ",", "max_rating", ",", "min_rating", ")", ":", "mae", "=", "mean_absolute_error", "(", "y_real", ",", "y_pred", ")", "nmae", "=", "normalized_mean_absolute_error", "(", "y_real", ",", "y_pred", ",", "ma...
it computes the nmae .
train
false
32,415
def analyse_par2(name): head = None vol = block = 0 if (name and (not REJECT_PAR2_RE.search(name))): m = PROBABLY_PAR2_RE.search(name) if m: head = m.group(1) vol = m.group(2) block = m.group(3) elif (name.lower().find('.par2') > 0): head = os.path.splitext(name)[0].strip() else: head = None return (head, vol, block)
[ "def", "analyse_par2", "(", "name", ")", ":", "head", "=", "None", "vol", "=", "block", "=", "0", "if", "(", "name", "and", "(", "not", "REJECT_PAR2_RE", ".", "search", "(", "name", ")", ")", ")", ":", "m", "=", "PROBABLY_PAR2_RE", ".", "search", "...
check if file is a par2-file and determine vol/block return head .
train
false
32,416
def check_context_log(logical_line, physical_line, filename): if ('nova/tests' in filename): return if pep8.noqa(physical_line): return if log_remove_context.match(logical_line): (yield (0, "N353: Nova is using oslo.context's RequestContext which means the context object is in scope when doing logging using oslo.log, so no need to pass it askwarg."))
[ "def", "check_context_log", "(", "logical_line", ",", "physical_line", ",", "filename", ")", ":", "if", "(", "'nova/tests'", "in", "filename", ")", ":", "return", "if", "pep8", ".", "noqa", "(", "physical_line", ")", ":", "return", "if", "log_remove_context", ...
check whether context is being passed to the logs not correct: log .
train
false
32,418
def _find_by_name(tree_data, name, is_dir, start_at): try: item = tree_data[start_at] if (item and (item[2] == name) and (S_ISDIR(item[1]) == is_dir)): tree_data[start_at] = None return item except IndexError: pass for (index, item) in enumerate(tree_data): if (item and (item[2] == name) and (S_ISDIR(item[1]) == is_dir)): tree_data[index] = None return item return None
[ "def", "_find_by_name", "(", "tree_data", ",", "name", ",", "is_dir", ",", "start_at", ")", ":", "try", ":", "item", "=", "tree_data", "[", "start_at", "]", "if", "(", "item", "and", "(", "item", "[", "2", "]", "==", "name", ")", "and", "(", "S_ISD...
return data entry matching the given name and tree mode or none .
train
true
32,419
def test_bc_bad_ratio(): ratio = (-1.0) bc = BalanceCascade(ratio=ratio) assert_raises(ValueError, bc.fit, X, Y) ratio = 100.0 bc = BalanceCascade(ratio=ratio) assert_raises(ValueError, bc.fit, X, Y) ratio = 'rnd' bc = BalanceCascade(ratio=ratio) assert_raises(ValueError, bc.fit, X, Y) ratio = [0.5, 0.5] bc = BalanceCascade(ratio=ratio) assert_raises(ValueError, bc.fit, X, Y)
[ "def", "test_bc_bad_ratio", "(", ")", ":", "ratio", "=", "(", "-", "1.0", ")", "bc", "=", "BalanceCascade", "(", "ratio", "=", "ratio", ")", "assert_raises", "(", "ValueError", ",", "bc", ".", "fit", ",", "X", ",", "Y", ")", "ratio", "=", "100.0", ...
test either if an error is raised with a wrong decimal value for the ratio .
train
false
32,420
def patch_testcase(): def enter_atomics_wrapper(wrapped_func): '\n Wrapper for TestCase._enter_atomics\n ' wrapped_func = wrapped_func.__func__ def _wrapper(*args, **kwargs): '\n Method that performs atomic-entering accounting.\n ' CommitOnSuccessManager.ENABLED = False OuterAtomic.ALLOW_NESTED = True if (not hasattr(OuterAtomic, 'atomic_for_testcase_calls')): OuterAtomic.atomic_for_testcase_calls = 0 OuterAtomic.atomic_for_testcase_calls += 1 return wrapped_func(*args, **kwargs) return classmethod(_wrapper) def rollback_atomics_wrapper(wrapped_func): '\n Wrapper for TestCase._rollback_atomics\n ' wrapped_func = wrapped_func.__func__ def _wrapper(*args, **kwargs): '\n Method that performs atomic-rollback accounting.\n ' CommitOnSuccessManager.ENABLED = True OuterAtomic.ALLOW_NESTED = False OuterAtomic.atomic_for_testcase_calls -= 1 return wrapped_func(*args, **kwargs) return classmethod(_wrapper) TestCase._enter_atomics = enter_atomics_wrapper(TestCase._enter_atomics) TestCase._rollback_atomics = rollback_atomics_wrapper(TestCase._rollback_atomics)
[ "def", "patch_testcase", "(", ")", ":", "def", "enter_atomics_wrapper", "(", "wrapped_func", ")", ":", "wrapped_func", "=", "wrapped_func", ".", "__func__", "def", "_wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "CommitOnSuccessManager", ".", "ENA...
disable commit_on_success decorators for tests in testcase subclasses .
train
false
32,422
def _add_directives(block, directives, replace): for directive in directives: _add_directive(block, directive, replace) if (block and ('\n' not in block[(-1)])): block.append(nginxparser.UnspacedList('\n'))
[ "def", "_add_directives", "(", "block", ",", "directives", ",", "replace", ")", ":", "for", "directive", "in", "directives", ":", "_add_directive", "(", "block", ",", "directive", ",", "replace", ")", "if", "(", "block", "and", "(", "'\\n'", "not", "in", ...
adds or replaces directives in a config block .
train
false
32,424
def is_chordal(G): if G.is_directed(): raise nx.NetworkXError('Directed graphs not supported') if G.is_multigraph(): raise nx.NetworkXError('Multiply connected graphs not supported.') if (len(_find_chordality_breaker(G)) == 0): return True else: return False
[ "def", "is_chordal", "(", "G", ")", ":", "if", "G", ".", "is_directed", "(", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'Directed graphs not supported'", ")", "if", "G", ".", "is_multigraph", "(", ")", ":", "raise", "nx", ".", "NetworkXError", ...
checks whether g is a chordal graph .
train
false
32,425
def versionFromCommitNo(commitNo): return ('0.0.0-dev%d' % commitNo)
[ "def", "versionFromCommitNo", "(", "commitNo", ")", ":", "return", "(", "'0.0.0-dev%d'", "%", "commitNo", ")" ]
generate a version string from a numerical commit no .
train
false
32,426
def print_sample(ex_source, ex_reference, ex_prediction, src_dict, tgt_dict): sample_output = u'\n Source Sentence: {source}\n Reference Translation: {reference}\n Predicted Translation: {prediction}\n '.format(source=' '.join([src_dict[k].decode('utf-8') for k in ex_source]), reference=' '.join([tgt_dict[k].decode('utf-8') for k in ex_reference]), prediction=' '.join([tgt_dict[k].decode('utf-8') for k in ex_prediction])) print sample_output.encode('utf-8')
[ "def", "print_sample", "(", "ex_source", ",", "ex_reference", ",", "ex_prediction", ",", "src_dict", ",", "tgt_dict", ")", ":", "sample_output", "=", "u'\\n Source Sentence: {source}\\n Reference Translation: {reference}\\n Predicted Translation: {prediction}\\n '", "."...
print some example predictions .
train
false
32,427
def project_rfa_opts(): T = current.T return {1: T('RFA1: Governance-Organisational, Institutional, Policy and Decision Making Framework'), 2: T('RFA2: Knowledge, Information, Public Awareness and Education'), 3: T('RFA3: Analysis and Evaluation of Hazards, Vulnerabilities and Elements at Risk'), 4: T('RFA4: Planning for Effective Preparedness, Response and Recovery'), 5: T('RFA5: Effective, Integrated and People-Focused Early Warning Systems'), 6: T('RFA6: Reduction of Underlying Risk Factors')}
[ "def", "project_rfa_opts", "(", ")", ":", "T", "=", "current", ".", "T", "return", "{", "1", ":", "T", "(", "'RFA1: Governance-Organisational, Institutional, Policy and Decision Making Framework'", ")", ",", "2", ":", "T", "(", "'RFA2: Knowledge, Information, Public Awa...
provide the options for the rfa filter rfa: applies to pacific countries only .
train
false
32,428
def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): storage_client = storage.Client() source_bucket = storage_client.get_bucket(bucket_name) source_blob = source_bucket.blob(blob_name) destination_bucket = storage_client.get_bucket(new_bucket_name) new_blob = source_bucket.copy_blob(source_blob, destination_bucket, new_blob_name) print 'Blob {} in bucket {} copied to blob {} in bucket {}.'.format(source_blob.name, source_bucket.name, new_blob.name, destination_bucket.name)
[ "def", "copy_blob", "(", "bucket_name", ",", "blob_name", ",", "new_bucket_name", ",", "new_blob_name", ")", ":", "storage_client", "=", "storage", ".", "Client", "(", ")", "source_bucket", "=", "storage_client", ".", "get_bucket", "(", "bucket_name", ")", "sour...
copies a blob from one bucket to another with a new name .
train
false
32,429
def _tgrep_segmented_pattern_action(_s, _l, tokens): node_label = tokens[0] reln_preds = tokens[1:] def pattern_segment_pred(n, m=None, l=None): u'This predicate function ignores its node argument.' if ((l is None) or (node_label not in l)): raise TgrepException(u'node_label ={0} not bound in pattern'.format(node_label)) node = l[node_label] return all((pred(node, m, l) for pred in reln_preds)) return pattern_segment_pred
[ "def", "_tgrep_segmented_pattern_action", "(", "_s", ",", "_l", ",", "tokens", ")", ":", "node_label", "=", "tokens", "[", "0", "]", "reln_preds", "=", "tokens", "[", "1", ":", "]", "def", "pattern_segment_pred", "(", "n", ",", "m", "=", "None", ",", "...
builds a lambda function representing a segmented pattern .
train
false
32,430
def GetWSAActionInput(operation): attr = operation.input.action if (attr is not None): return attr portType = operation.getPortType() targetNamespace = portType.getTargetNamespace() ptName = portType.name msgName = operation.input.name if (not msgName): msgName = (operation.name + 'Request') if targetNamespace.endswith('/'): return ('%s%s/%s' % (targetNamespace, ptName, msgName)) return ('%s/%s/%s' % (targetNamespace, ptName, msgName))
[ "def", "GetWSAActionInput", "(", "operation", ")", ":", "attr", "=", "operation", ".", "input", ".", "action", "if", "(", "attr", "is", "not", "None", ")", ":", "return", "attr", "portType", "=", "operation", ".", "getPortType", "(", ")", "targetNamespace"...
find wsa:action attribute .
train
true
32,431
@check_is_trading @export_as_api @ExecutionContext.enforce_phase(EXECUTION_PHASE.HANDLE_BAR, EXECUTION_PHASE.SCHEDULED) def order_value(id_or_ins, cash_amount, style=None): order_book_id = assure_order_book_id(id_or_ins) bar_dict = ExecutionContext.get_current_bar_dict() price = bar_dict[order_book_id].close round_lot = int(get_data_proxy().instrument(order_book_id).round_lot) amount = (((cash_amount // price) // round_lot) * round_lot) position = get_simu_exchange().account.portfolio.positions[order_book_id] if (amount < 0): if (abs(amount) > position.sellable): amount = (- position.sellable) return order_shares(order_book_id, amount, style)
[ "@", "check_is_trading", "@", "export_as_api", "@", "ExecutionContext", ".", "enforce_phase", "(", "EXECUTION_PHASE", ".", "HANDLE_BAR", ",", "EXECUTION_PHASE", ".", "SCHEDULED", ")", "def", "order_value", "(", "id_or_ins", ",", "cash_amount", ",", "style", "=", "...
place an order by specified value amount rather than specific number of shares/lots .
train
false
32,432
def populate_default_writers(writer_populator): writer_populator.register(u'html', HTMLReportWriter) writer_populator.register(u'pdf', PDFReportWriter) writer_populator.register(u'json', JSONReportWriter) writer_populator.register(u'pprint', PprintReportWriter) writer_populator.register(u'html', HTMLReportWriter) if openpyxl: writer_populator.register(u'excel', ExcelReportWriter)
[ "def", "populate_default_writers", "(", "writer_populator", ")", ":", "writer_populator", ".", "register", "(", "u'html'", ",", "HTMLReportWriter", ")", "writer_populator", ".", "register", "(", "u'pdf'", ",", "PDFReportWriter", ")", "writer_populator", ".", "register...
populate the default report writers :type writer_populator: reportwriterpopulator .
train
false
32,434
def salustowicz_2d(data): return (((((exp((- data[0])) * (data[0] ** 3)) * cos(data[0])) * sin(data[0])) * ((cos(data[0]) * (sin(data[0]) ** 2)) - 1)) * (data[1] - 5))
[ "def", "salustowicz_2d", "(", "data", ")", ":", "return", "(", "(", "(", "(", "(", "exp", "(", "(", "-", "data", "[", "0", "]", ")", ")", "*", "(", "data", "[", "0", "]", "**", "3", ")", ")", "*", "cos", "(", "data", "[", "0", "]", ")", ...
salustowicz benchmark function .
train
false
32,435
def validate_and_normalize_address(address): new_address = address.lstrip('[').rstrip(']') if (address.startswith('[') and address.endswith(']')): return validate_and_normalize_ip(new_address) new_address = new_address.lower() if is_valid_ipv4(new_address): return new_address elif is_valid_ipv6(new_address): return expand_ipv6(new_address) elif is_valid_hostname(new_address): return new_address else: raise ValueError(('Invalid address %s' % address))
[ "def", "validate_and_normalize_address", "(", "address", ")", ":", "new_address", "=", "address", ".", "lstrip", "(", "'['", ")", ".", "rstrip", "(", "']'", ")", "if", "(", "address", ".", "startswith", "(", "'['", ")", "and", "address", ".", "endswith", ...
return normalized address if the address is a valid ip or hostname .
train
false