id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
37,312
def _drop_protected_attrs(model_class, values): for attr in model_class.__protected_attributes__: if (attr in values): del values[attr]
[ "def", "_drop_protected_attrs", "(", "model_class", ",", "values", ")", ":", "for", "attr", "in", "model_class", ".", "__protected_attributes__", ":", "if", "(", "attr", "in", "values", ")", ":", "del", "values", "[", "attr", "]" ]
removed protected attributes from values dictionary using the models __protected_attributes__ field .
train
false
37,313
def callMultipleInThread(tupleList): from twisted.internet import reactor reactor.callInThread(_runMultiple, tupleList)
[ "def", "callMultipleInThread", "(", "tupleList", ")", ":", "from", "twisted", ".", "internet", "import", "reactor", "reactor", ".", "callInThread", "(", "_runMultiple", ",", "tupleList", ")" ]
run a list of functions in the same thread .
train
false
37,314
def widget_activity(request): activity_data = dynamic_activity_data(request) return HttpResponse(json.dumps(activity_data), content_type=u'application/json')
[ "def", "widget_activity", "(", "request", ")", ":", "activity_data", "=", "dynamic_activity_data", "(", "request", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "activity_data", ")", ",", "content_type", "=", "u'application/json'", ")" ]
return json data for the admin activity widget .
train
false
37,315
def map_wms(request, mapid): map_obj = _resolve_map(request, mapid, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) if (request.method == 'PUT'): try: layerGroupName = map_obj.publish_layer_group() response = dict(layerGroupName=layerGroupName, ows=getattr(ogc_server_settings, 'ows', '')) return HttpResponse(json.dumps(response), content_type='application/json') except: return HttpResponseServerError() if (request.method == 'GET'): response = dict(layerGroupName=getattr(map_obj.layer_group, 'name', ''), ows=getattr(ogc_server_settings, 'ows', '')) return HttpResponse(json.dumps(response), content_type='application/json') return HttpResponseNotAllowed(['PUT', 'GET'])
[ "def", "map_wms", "(", "request", ",", "mapid", ")", ":", "map_obj", "=", "_resolve_map", "(", "request", ",", "mapid", ",", "'base.view_resourcebase'", ",", "_PERMISSION_MSG_VIEW", ")", "if", "(", "request", ".", "method", "==", "'PUT'", ")", ":", "try", ":", "layerGroupName", "=", "map_obj", ".", "publish_layer_group", "(", ")", "response", "=", "dict", "(", "layerGroupName", "=", "layerGroupName", ",", "ows", "=", "getattr", "(", "ogc_server_settings", ",", "'ows'", ",", "''", ")", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "response", ")", ",", "content_type", "=", "'application/json'", ")", "except", ":", "return", "HttpResponseServerError", "(", ")", "if", "(", "request", ".", "method", "==", "'GET'", ")", ":", "response", "=", "dict", "(", "layerGroupName", "=", "getattr", "(", "map_obj", ".", "layer_group", ",", "'name'", ",", "''", ")", ",", "ows", "=", "getattr", "(", "ogc_server_settings", ",", "'ows'", ",", "''", ")", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "response", ")", ",", "content_type", "=", "'application/json'", ")", "return", "HttpResponseNotAllowed", "(", "[", "'PUT'", ",", "'GET'", "]", ")" ]
publish local map layers as group layer in local ows .
train
false
37,316
def _get_dvs(service_instance, dvs_name): switches = list_dvs(service_instance) if (dvs_name in switches): inventory = get_inventory(service_instance) container = inventory.viewManager.CreateContainerView(inventory.rootFolder, [vim.DistributedVirtualSwitch], True) for item in container.view: if (item.name == dvs_name): return item return None
[ "def", "_get_dvs", "(", "service_instance", ",", "dvs_name", ")", ":", "switches", "=", "list_dvs", "(", "service_instance", ")", "if", "(", "dvs_name", "in", "switches", ")", ":", "inventory", "=", "get_inventory", "(", "service_instance", ")", "container", "=", "inventory", ".", "viewManager", ".", "CreateContainerView", "(", "inventory", ".", "rootFolder", ",", "[", "vim", ".", "DistributedVirtualSwitch", "]", ",", "True", ")", "for", "item", "in", "container", ".", "view", ":", "if", "(", "item", ".", "name", "==", "dvs_name", ")", ":", "return", "item", "return", "None" ]
return a reference to a distributed virtual switch object .
train
true
37,317
@register.filter def label_tag(field, suffix=None): if (not hasattr(field, 'label_tag')): return '' return field.label_tag(label_suffix=suffix)
[ "@", "register", ".", "filter", "def", "label_tag", "(", "field", ",", "suffix", "=", "None", ")", ":", "if", "(", "not", "hasattr", "(", "field", ",", "'label_tag'", ")", ")", ":", "return", "''", "return", "field", ".", "label_tag", "(", "label_suffix", "=", "suffix", ")" ]
returns the label_tag for a field .
train
false
37,318
def get_eol_chars_from_os_name(os_name): for (eol_chars, name) in EOL_CHARS: if (name == os_name): return eol_chars
[ "def", "get_eol_chars_from_os_name", "(", "os_name", ")", ":", "for", "(", "eol_chars", ",", "name", ")", "in", "EOL_CHARS", ":", "if", "(", "name", "==", "os_name", ")", ":", "return", "eol_chars" ]
return eol characters from os name .
train
false
37,319
def infer_unit(value): from apcaccess.status import ALL_UNITS for unit in ALL_UNITS: if value.endswith(unit): return (value[:(- len(unit))], INFERRED_UNITS.get(unit, unit.strip())) return (value, None)
[ "def", "infer_unit", "(", "value", ")", ":", "from", "apcaccess", ".", "status", "import", "ALL_UNITS", "for", "unit", "in", "ALL_UNITS", ":", "if", "value", ".", "endswith", "(", "unit", ")", ":", "return", "(", "value", "[", ":", "(", "-", "len", "(", "unit", ")", ")", "]", ",", "INFERRED_UNITS", ".", "get", "(", "unit", ",", "unit", ".", "strip", "(", ")", ")", ")", "return", "(", "value", ",", "None", ")" ]
if the value ends with any of the units from all_units .
train
false
37,321
def _select_iterables(elements): return itertools.chain(*[c._select_iterable for c in elements])
[ "def", "_select_iterables", "(", "elements", ")", ":", "return", "itertools", ".", "chain", "(", "*", "[", "c", ".", "_select_iterable", "for", "c", "in", "elements", "]", ")" ]
expand tables into individual columns in the given list of column expressions .
train
false
37,325
def find_command(cmd, paths=None, pathext=None): if (paths is None): paths = os.environ.get('PATH', '').split(os.pathsep) if isinstance(paths, string_types): paths = [paths] if (pathext is None): pathext = get_pathext() pathext = [ext for ext in pathext.lower().split(os.pathsep) if len(ext)] if (os.path.splitext(cmd)[1].lower() in pathext): pathext = [''] for path in paths: cmd_path = os.path.join(path, cmd) for ext in pathext: cmd_path_ext = (cmd_path + ext) if os.path.isfile(cmd_path_ext): return cmd_path_ext if os.path.isfile(cmd_path): return cmd_path raise BadCommand(('Cannot find command %r' % cmd))
[ "def", "find_command", "(", "cmd", ",", "paths", "=", "None", ",", "pathext", "=", "None", ")", ":", "if", "(", "paths", "is", "None", ")", ":", "paths", "=", "os", ".", "environ", ".", "get", "(", "'PATH'", ",", "''", ")", ".", "split", "(", "os", ".", "pathsep", ")", "if", "isinstance", "(", "paths", ",", "string_types", ")", ":", "paths", "=", "[", "paths", "]", "if", "(", "pathext", "is", "None", ")", ":", "pathext", "=", "get_pathext", "(", ")", "pathext", "=", "[", "ext", "for", "ext", "in", "pathext", ".", "lower", "(", ")", ".", "split", "(", "os", ".", "pathsep", ")", "if", "len", "(", "ext", ")", "]", "if", "(", "os", ".", "path", ".", "splitext", "(", "cmd", ")", "[", "1", "]", ".", "lower", "(", ")", "in", "pathext", ")", ":", "pathext", "=", "[", "''", "]", "for", "path", "in", "paths", ":", "cmd_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "cmd", ")", "for", "ext", "in", "pathext", ":", "cmd_path_ext", "=", "(", "cmd_path", "+", "ext", ")", "if", "os", ".", "path", ".", "isfile", "(", "cmd_path_ext", ")", ":", "return", "cmd_path_ext", "if", "os", ".", "path", ".", "isfile", "(", "cmd_path", ")", ":", "return", "cmd_path", "raise", "BadCommand", "(", "(", "'Cannot find command %r'", "%", "cmd", ")", ")" ]
try to find a command in the path .
train
true
37,327
@task(name='geonode.tasks.update.create_document_thumbnail', queue='update') def create_document_thumbnail(object_id): try: document = Document.objects.get(id=object_id) except Document.DoesNotExist: return image = document._render_thumbnail() filename = ('document-%s-thumb.png' % document.uuid) document.save_thumbnail(filename, image)
[ "@", "task", "(", "name", "=", "'geonode.tasks.update.create_document_thumbnail'", ",", "queue", "=", "'update'", ")", "def", "create_document_thumbnail", "(", "object_id", ")", ":", "try", ":", "document", "=", "Document", ".", "objects", ".", "get", "(", "id", "=", "object_id", ")", "except", "Document", ".", "DoesNotExist", ":", "return", "image", "=", "document", ".", "_render_thumbnail", "(", ")", "filename", "=", "(", "'document-%s-thumb.png'", "%", "document", ".", "uuid", ")", "document", ".", "save_thumbnail", "(", "filename", ",", "image", ")" ]
runs the create_thumbnail logic on a document .
train
false
37,329
@gen.coroutine def Unshare(client, obj_store, user_id, device_id, request): (yield Activity.VerifyActivityId(client, user_id, device_id, request['activity']['activity_id'])) (yield gen.Task(Operation.CreateAndExecute, client, user_id, device_id, 'UnshareOperation.Execute', request)) logging.info(('UNSHARE: user: %d, device: %d, viewpoint: %s, %d episodes, %d photos' % (user_id, device_id, request['viewpoint_id'], len(request['episodes']), sum([len(ep_dict['photo_ids']) for ep_dict in request['episodes']])))) raise gen.Return({})
[ "@", "gen", ".", "coroutine", "def", "Unshare", "(", "client", ",", "obj_store", ",", "user_id", ",", "device_id", ",", "request", ")", ":", "(", "yield", "Activity", ".", "VerifyActivityId", "(", "client", ",", "user_id", ",", "device_id", ",", "request", "[", "'activity'", "]", "[", "'activity_id'", "]", ")", ")", "(", "yield", "gen", ".", "Task", "(", "Operation", ".", "CreateAndExecute", ",", "client", ",", "user_id", ",", "device_id", ",", "'UnshareOperation.Execute'", ",", "request", ")", ")", "logging", ".", "info", "(", "(", "'UNSHARE: user: %d, device: %d, viewpoint: %s, %d episodes, %d photos'", "%", "(", "user_id", ",", "device_id", ",", "request", "[", "'viewpoint_id'", "]", ",", "len", "(", "request", "[", "'episodes'", "]", ")", ",", "sum", "(", "[", "len", "(", "ep_dict", "[", "'photo_ids'", "]", ")", "for", "ep_dict", "in", "request", "[", "'episodes'", "]", "]", ")", ")", ")", ")", "raise", "gen", ".", "Return", "(", "{", "}", ")" ]
unshares photos from the episodes in the specified viewpoint .
train
false
37,331
@not_implemented_for('undirected') def kosaraju_strongly_connected_components(G, source=None): with nx.utils.reversed(G): post = list(nx.dfs_postorder_nodes(G, source=source)) seen = set() while post: r = post.pop() if (r in seen): continue c = nx.dfs_preorder_nodes(G, r) new = {v for v in c if (v not in seen)} (yield new) seen.update(new)
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "kosaraju_strongly_connected_components", "(", "G", ",", "source", "=", "None", ")", ":", "with", "nx", ".", "utils", ".", "reversed", "(", "G", ")", ":", "post", "=", "list", "(", "nx", ".", "dfs_postorder_nodes", "(", "G", ",", "source", "=", "source", ")", ")", "seen", "=", "set", "(", ")", "while", "post", ":", "r", "=", "post", ".", "pop", "(", ")", "if", "(", "r", "in", "seen", ")", ":", "continue", "c", "=", "nx", ".", "dfs_preorder_nodes", "(", "G", ",", "r", ")", "new", "=", "{", "v", "for", "v", "in", "c", "if", "(", "v", "not", "in", "seen", ")", "}", "(", "yield", "new", ")", "seen", ".", "update", "(", "new", ")" ]
generate nodes in strongly connected components of graph .
train
false
37,332
def _CheckType(value, check_type, name, allow_none=True): if ((value is None) and allow_none): return if (not isinstance(value, check_type)): raise TypeError(("%s type doesn't match %s." % (name, check_type)))
[ "def", "_CheckType", "(", "value", ",", "check_type", ",", "name", ",", "allow_none", "=", "True", ")", ":", "if", "(", "(", "value", "is", "None", ")", "and", "allow_none", ")", ":", "return", "if", "(", "not", "isinstance", "(", "value", ",", "check_type", ")", ")", ":", "raise", "TypeError", "(", "(", "\"%s type doesn't match %s.\"", "%", "(", "name", ",", "check_type", ")", ")", ")" ]
check that the type of an object is acceptable .
train
true
37,334
@contextmanager def _start_worker_thread(app, concurrency=1, pool=u'solo', loglevel=WORKER_LOGLEVEL, logfile=None, WorkController=TestWorkController, **kwargs): setup_app_for_worker(app, loglevel, logfile) assert (u'celery.ping' in app.tasks) with app.connection() as conn: conn.default_channel.queue_declare worker = WorkController(app=app, concurrency=concurrency, hostname=anon_nodename(), pool=pool, loglevel=loglevel, logfile=logfile, ready_callback=None, without_heartbeat=True, without_mingle=True, without_gossip=True, **kwargs) t = threading.Thread(target=worker.start) t.start() worker.ensure_started() _set_task_join_will_block(False) (yield worker) from celery.worker import state state.should_terminate = 0 t.join(10) state.should_terminate = None
[ "@", "contextmanager", "def", "_start_worker_thread", "(", "app", ",", "concurrency", "=", "1", ",", "pool", "=", "u'solo'", ",", "loglevel", "=", "WORKER_LOGLEVEL", ",", "logfile", "=", "None", ",", "WorkController", "=", "TestWorkController", ",", "**", "kwargs", ")", ":", "setup_app_for_worker", "(", "app", ",", "loglevel", ",", "logfile", ")", "assert", "(", "u'celery.ping'", "in", "app", ".", "tasks", ")", "with", "app", ".", "connection", "(", ")", "as", "conn", ":", "conn", ".", "default_channel", ".", "queue_declare", "worker", "=", "WorkController", "(", "app", "=", "app", ",", "concurrency", "=", "concurrency", ",", "hostname", "=", "anon_nodename", "(", ")", ",", "pool", "=", "pool", ",", "loglevel", "=", "loglevel", ",", "logfile", "=", "logfile", ",", "ready_callback", "=", "None", ",", "without_heartbeat", "=", "True", ",", "without_mingle", "=", "True", ",", "without_gossip", "=", "True", ",", "**", "kwargs", ")", "t", "=", "threading", ".", "Thread", "(", "target", "=", "worker", ".", "start", ")", "t", ".", "start", "(", ")", "worker", ".", "ensure_started", "(", ")", "_set_task_join_will_block", "(", "False", ")", "(", "yield", "worker", ")", "from", "celery", ".", "worker", "import", "state", "state", ".", "should_terminate", "=", "0", "t", ".", "join", "(", "10", ")", "state", ".", "should_terminate", "=", "None" ]
start celery worker in a thread .
train
false
37,335
def remove_app(name, site): pscmd = list() current_apps = list_apps(site) if (name not in current_apps): _LOG.debug('Application already absent: %s', name) return True pscmd.append("Remove-WebApplication -Name '{0}' -Site '{1}'".format(name, site)) cmd_ret = _srvmgr(str().join(pscmd)) if (cmd_ret['retcode'] == 0): new_apps = list_apps(site) if (name not in new_apps): _LOG.debug('Application removed successfully: %s', name) return True _LOG.error('Unable to remove application: %s', name) return False
[ "def", "remove_app", "(", "name", ",", "site", ")", ":", "pscmd", "=", "list", "(", ")", "current_apps", "=", "list_apps", "(", "site", ")", "if", "(", "name", "not", "in", "current_apps", ")", ":", "_LOG", ".", "debug", "(", "'Application already absent: %s'", ",", "name", ")", "return", "True", "pscmd", ".", "append", "(", "\"Remove-WebApplication -Name '{0}' -Site '{1}'\"", ".", "format", "(", "name", ",", "site", ")", ")", "cmd_ret", "=", "_srvmgr", "(", "str", "(", ")", ".", "join", "(", "pscmd", ")", ")", "if", "(", "cmd_ret", "[", "'retcode'", "]", "==", "0", ")", ":", "new_apps", "=", "list_apps", "(", "site", ")", "if", "(", "name", "not", "in", "new_apps", ")", ":", "_LOG", ".", "debug", "(", "'Application removed successfully: %s'", ",", "name", ")", "return", "True", "_LOG", ".", "error", "(", "'Unable to remove application: %s'", ",", "name", ")", "return", "False" ]
remove an iis application .
train
false
37,337
def _gid(): if salt.utils.is_windows(): return None return os.getgid()
[ "def", "_gid", "(", ")", ":", "if", "salt", ".", "utils", ".", "is_windows", "(", ")", ":", "return", "None", "return", "os", ".", "getgid", "(", ")" ]
grain for the minion group id .
train
false
37,338
def calculate_distance_between_colors(color1, color2): return (((color1[0] + color2[0]) / 2), ((color1[1] + color2[1]) / 2), ((color1[2] + color2[2]) / 2))
[ "def", "calculate_distance_between_colors", "(", "color1", ",", "color2", ")", ":", "return", "(", "(", "(", "color1", "[", "0", "]", "+", "color2", "[", "0", "]", ")", "/", "2", ")", ",", "(", "(", "color1", "[", "1", "]", "+", "color2", "[", "1", "]", ")", "/", "2", ")", ",", "(", "(", "color1", "[", "2", "]", "+", "color2", "[", "2", "]", ")", "/", "2", ")", ")" ]
takes 2 color tupes and returns the average between them .
train
false
37,339
def quota_allocated_get_all_by_project(context, project_id): return IMPL.quota_allocated_get_all_by_project(context, project_id)
[ "def", "quota_allocated_get_all_by_project", "(", "context", ",", "project_id", ")", ":", "return", "IMPL", ".", "quota_allocated_get_all_by_project", "(", "context", ",", "project_id", ")" ]
retrieve all allocated quotas associated with a given project .
train
false
37,340
def file_digest(project_tree, f): return FileDigest.create(f.path, project_tree.content(f.path))
[ "def", "file_digest", "(", "project_tree", ",", "f", ")", ":", "return", "FileDigest", ".", "create", "(", "f", ".", "path", ",", "project_tree", ".", "content", "(", "f", ".", "path", ")", ")" ]
return a filedigest for a known-existing file .
train
false
37,341
def test_run_json_load(mocker, mock_user_config, template_name, context, replay_test_dir, replay_file): spy_get_replay_file = mocker.spy(replay, 'get_file_name') mock_json_load = mocker.patch('json.load', side_effect=json.load) loaded_context = replay.load(replay_test_dir, template_name) assert (not mock_user_config.called) spy_get_replay_file.assert_called_once_with(replay_test_dir, template_name) assert (mock_json_load.call_count == 1) ((infile_handler,), kwargs) = mock_json_load.call_args assert (infile_handler.name == replay_file) assert (loaded_context == context)
[ "def", "test_run_json_load", "(", "mocker", ",", "mock_user_config", ",", "template_name", ",", "context", ",", "replay_test_dir", ",", "replay_file", ")", ":", "spy_get_replay_file", "=", "mocker", ".", "spy", "(", "replay", ",", "'get_file_name'", ")", "mock_json_load", "=", "mocker", ".", "patch", "(", "'json.load'", ",", "side_effect", "=", "json", ".", "load", ")", "loaded_context", "=", "replay", ".", "load", "(", "replay_test_dir", ",", "template_name", ")", "assert", "(", "not", "mock_user_config", ".", "called", ")", "spy_get_replay_file", ".", "assert_called_once_with", "(", "replay_test_dir", ",", "template_name", ")", "assert", "(", "mock_json_load", ".", "call_count", "==", "1", ")", "(", "(", "infile_handler", ",", ")", ",", "kwargs", ")", "=", "mock_json_load", ".", "call_args", "assert", "(", "infile_handler", ".", "name", "==", "replay_file", ")", "assert", "(", "loaded_context", "==", "context", ")" ]
test that replay .
train
false
37,342
def set_login_cookie(remove=False, remember_me=False): salt = randint(1, 1000) cherrypy.response.cookie['login_cookie'] = hashlib.sha1(((str(salt) + cherrypy.request.remote.ip) + COOKIE_SECRET)).hexdigest() cherrypy.response.cookie['login_cookie']['path'] = '/' cherrypy.response.cookie['login_salt'] = salt cherrypy.response.cookie['login_salt']['path'] = '/' if remember_me: cherrypy.response.cookie['login_cookie']['max-age'] = ((3600 * 24) * 14) cherrypy.response.cookie['login_salt']['max-age'] = ((3600 * 24) * 14) if remove: cherrypy.response.cookie['login_cookie']['expires'] = 0 cherrypy.response.cookie['login_salt']['expires'] = 0 else: notifier.send_notification(T('User logged in'), T('User logged in to the web interface'), 'new_login')
[ "def", "set_login_cookie", "(", "remove", "=", "False", ",", "remember_me", "=", "False", ")", ":", "salt", "=", "randint", "(", "1", ",", "1000", ")", "cherrypy", ".", "response", ".", "cookie", "[", "'login_cookie'", "]", "=", "hashlib", ".", "sha1", "(", "(", "(", "str", "(", "salt", ")", "+", "cherrypy", ".", "request", ".", "remote", ".", "ip", ")", "+", "COOKIE_SECRET", ")", ")", ".", "hexdigest", "(", ")", "cherrypy", ".", "response", ".", "cookie", "[", "'login_cookie'", "]", "[", "'path'", "]", "=", "'/'", "cherrypy", ".", "response", ".", "cookie", "[", "'login_salt'", "]", "=", "salt", "cherrypy", ".", "response", ".", "cookie", "[", "'login_salt'", "]", "[", "'path'", "]", "=", "'/'", "if", "remember_me", ":", "cherrypy", ".", "response", ".", "cookie", "[", "'login_cookie'", "]", "[", "'max-age'", "]", "=", "(", "(", "3600", "*", "24", ")", "*", "14", ")", "cherrypy", ".", "response", ".", "cookie", "[", "'login_salt'", "]", "[", "'max-age'", "]", "=", "(", "(", "3600", "*", "24", ")", "*", "14", ")", "if", "remove", ":", "cherrypy", ".", "response", ".", "cookie", "[", "'login_cookie'", "]", "[", "'expires'", "]", "=", "0", "cherrypy", ".", "response", ".", "cookie", "[", "'login_salt'", "]", "[", "'expires'", "]", "=", "0", "else", ":", "notifier", ".", "send_notification", "(", "T", "(", "'User logged in'", ")", ",", "T", "(", "'User logged in to the web interface'", ")", ",", "'new_login'", ")" ]
we try to set a cookie as unique as possible to the current user .
train
false
37,343
def exit_success(jid, ext_source=None): ret = dict() data = list_job(jid, ext_source=ext_source) minions = data['Minions'] result = data['Result'] for minion in minions: if ((minion in result) and ('return' in result[minion])): ret[minion] = (True if result[minion]['return'] else False) else: ret[minion] = False return ret
[ "def", "exit_success", "(", "jid", ",", "ext_source", "=", "None", ")", ":", "ret", "=", "dict", "(", ")", "data", "=", "list_job", "(", "jid", ",", "ext_source", "=", "ext_source", ")", "minions", "=", "data", "[", "'Minions'", "]", "result", "=", "data", "[", "'Result'", "]", "for", "minion", "in", "minions", ":", "if", "(", "(", "minion", "in", "result", ")", "and", "(", "'return'", "in", "result", "[", "minion", "]", ")", ")", ":", "ret", "[", "minion", "]", "=", "(", "True", "if", "result", "[", "minion", "]", "[", "'return'", "]", "else", "False", ")", "else", ":", "ret", "[", "minion", "]", "=", "False", "return", "ret" ]
check if a job has been executed and exit successfully jid the jid to look up .
train
true
37,344
def _escape_filename(filename): if (' ' not in filename): return filename if (filename.startswith('"') and filename.endswith('"')): return filename else: return ('"%s"' % filename)
[ "def", "_escape_filename", "(", "filename", ")", ":", "if", "(", "' '", "not", "in", "filename", ")", ":", "return", "filename", "if", "(", "filename", ".", "startswith", "(", "'\"'", ")", "and", "filename", ".", "endswith", "(", "'\"'", ")", ")", ":", "return", "filename", "else", ":", "return", "(", "'\"%s\"'", "%", "filename", ")" ]
escape filenames with spaces by adding quotes .
train
false
37,346
@docfiller def savemat(file_name, mdict, appendmat=True, format='5', long_field_names=False, do_compression=False, oned_as='row'): file_is_string = isinstance(file_name, string_types) if file_is_string: if (appendmat and (file_name[(-4):] != '.mat')): file_name = (file_name + '.mat') file_stream = open(file_name, 'wb') else: if (not hasattr(file_name, 'write')): raise IOError('Writer needs file name or writeable file-like object') file_stream = file_name if (format == '4'): if long_field_names: raise ValueError('Long field names are not available for version 4 files') MW = MatFile4Writer(file_stream, oned_as) elif (format == '5'): MW = MatFile5Writer(file_stream, do_compression=do_compression, unicode_strings=True, long_field_names=long_field_names, oned_as=oned_as) else: raise ValueError("Format should be '4' or '5'") MW.put_variables(mdict) if file_is_string: file_stream.close()
[ "@", "docfiller", "def", "savemat", "(", "file_name", ",", "mdict", ",", "appendmat", "=", "True", ",", "format", "=", "'5'", ",", "long_field_names", "=", "False", ",", "do_compression", "=", "False", ",", "oned_as", "=", "'row'", ")", ":", "file_is_string", "=", "isinstance", "(", "file_name", ",", "string_types", ")", "if", "file_is_string", ":", "if", "(", "appendmat", "and", "(", "file_name", "[", "(", "-", "4", ")", ":", "]", "!=", "'.mat'", ")", ")", ":", "file_name", "=", "(", "file_name", "+", "'.mat'", ")", "file_stream", "=", "open", "(", "file_name", ",", "'wb'", ")", "else", ":", "if", "(", "not", "hasattr", "(", "file_name", ",", "'write'", ")", ")", ":", "raise", "IOError", "(", "'Writer needs file name or writeable file-like object'", ")", "file_stream", "=", "file_name", "if", "(", "format", "==", "'4'", ")", ":", "if", "long_field_names", ":", "raise", "ValueError", "(", "'Long field names are not available for version 4 files'", ")", "MW", "=", "MatFile4Writer", "(", "file_stream", ",", "oned_as", ")", "elif", "(", "format", "==", "'5'", ")", ":", "MW", "=", "MatFile5Writer", "(", "file_stream", ",", "do_compression", "=", "do_compression", ",", "unicode_strings", "=", "True", ",", "long_field_names", "=", "long_field_names", ",", "oned_as", "=", "oned_as", ")", "else", ":", "raise", "ValueError", "(", "\"Format should be '4' or '5'\"", ")", "MW", ".", "put_variables", "(", "mdict", ")", "if", "file_is_string", ":", "file_stream", ".", "close", "(", ")" ]
save a dictionary of names and arrays into a matlab-style .
train
false
37,347
def compiler(path): def justlist(arg, directory, names): pynames = [os.path.join(directory, n) for n in names if n.endswith('.py')] arg.extend(pynames) all = [] os.path.walk(path, justlist, all) remaining = len(all) i = zip(all, range((remaining - 1), (-1), (-1))) for (f, remaining) in i: py_compile.compile(f) (yield remaining)
[ "def", "compiler", "(", "path", ")", ":", "def", "justlist", "(", "arg", ",", "directory", ",", "names", ")", ":", "pynames", "=", "[", "os", ".", "path", ".", "join", "(", "directory", ",", "n", ")", "for", "n", "in", "names", "if", "n", ".", "endswith", "(", "'.py'", ")", "]", "arg", ".", "extend", "(", "pynames", ")", "all", "=", "[", "]", "os", ".", "path", ".", "walk", "(", "path", ",", "justlist", ",", "all", ")", "remaining", "=", "len", "(", "all", ")", "i", "=", "zip", "(", "all", ",", "range", "(", "(", "remaining", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ")", "for", "(", "f", ",", "remaining", ")", "in", "i", ":", "py_compile", ".", "compile", "(", "f", ")", "(", "yield", "remaining", ")" ]
a generator for compiling files to .
train
false
37,348
def verify_rsa_sha1(request, rsa_public_key): norm_params = normalize_parameters(request.params) uri = normalize_base_string_uri(request.uri) message = construct_base_string(request.http_method, uri, norm_params).encode(u'utf-8') sig = binascii.a2b_base64(request.signature.encode(u'utf-8')) alg = _jwt_rs1_signing_algorithm() key = _prepare_key_plus(alg, rsa_public_key) return alg.verify(message, key, sig)
[ "def", "verify_rsa_sha1", "(", "request", ",", "rsa_public_key", ")", ":", "norm_params", "=", "normalize_parameters", "(", "request", ".", "params", ")", "uri", "=", "normalize_base_string_uri", "(", "request", ".", "uri", ")", "message", "=", "construct_base_string", "(", "request", ".", "http_method", ",", "uri", ",", "norm_params", ")", ".", "encode", "(", "u'utf-8'", ")", "sig", "=", "binascii", ".", "a2b_base64", "(", "request", ".", "signature", ".", "encode", "(", "u'utf-8'", ")", ")", "alg", "=", "_jwt_rs1_signing_algorithm", "(", ")", "key", "=", "_prepare_key_plus", "(", "alg", ",", "rsa_public_key", ")", "return", "alg", ".", "verify", "(", "message", ",", "key", ",", "sig", ")" ]
verify a rsassa-pkcs #1 v1 .
train
false
37,349
def close_cover(hass, entity_id=None): data = ({ATTR_ENTITY_ID: entity_id} if entity_id else None) hass.services.call(DOMAIN, SERVICE_CLOSE_COVER, data)
[ "def", "close_cover", "(", "hass", ",", "entity_id", "=", "None", ")", ":", "data", "=", "(", "{", "ATTR_ENTITY_ID", ":", "entity_id", "}", "if", "entity_id", "else", "None", ")", "hass", ".", "services", ".", "call", "(", "DOMAIN", ",", "SERVICE_CLOSE_COVER", ",", "data", ")" ]
close all or specified cover .
train
false
37,350
def _app_or_default(app=None): if (app is None): return (getattr(_tls, 'current_app', None) or default_app) return app
[ "def", "_app_or_default", "(", "app", "=", "None", ")", ":", "if", "(", "app", "is", "None", ")", ":", "return", "(", "getattr", "(", "_tls", ",", "'current_app'", ",", "None", ")", "or", "default_app", ")", "return", "app" ]
returns the app provided or the default app if none .
train
false
37,353
def vm_absent(name, archive=False): name = name.lower() ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} if (name not in __salt__['vmadm.list'](order='hostname')): ret['result'] = True ret['comment'] = 'vm {0} is absent'.format(name) else: if (not __opts__['test']): if archive: __salt__['vmadm.update'](vm=name, key='hostname', archive_on_delete=True) ret['result'] = __salt__['vmadm.delete'](name, key='hostname') else: ret['result'] = True if ((not isinstance(ret['result'], bool)) and ret['result'].get('Error')): ret['result'] = False ret['comment'] = 'failed to delete vm {0}'.format(name) else: ret['comment'] = 'vm {0} deleted'.format(name) ret['changes'][name] = None return ret
[ "def", "vm_absent", "(", "name", ",", "archive", "=", "False", ")", ":", "name", "=", "name", ".", "lower", "(", ")", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "None", ",", "'comment'", ":", "''", "}", "if", "(", "name", "not", "in", "__salt__", "[", "'vmadm.list'", "]", "(", "order", "=", "'hostname'", ")", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'vm {0} is absent'", ".", "format", "(", "name", ")", "else", ":", "if", "(", "not", "__opts__", "[", "'test'", "]", ")", ":", "if", "archive", ":", "__salt__", "[", "'vmadm.update'", "]", "(", "vm", "=", "name", ",", "key", "=", "'hostname'", ",", "archive_on_delete", "=", "True", ")", "ret", "[", "'result'", "]", "=", "__salt__", "[", "'vmadm.delete'", "]", "(", "name", ",", "key", "=", "'hostname'", ")", "else", ":", "ret", "[", "'result'", "]", "=", "True", "if", "(", "(", "not", "isinstance", "(", "ret", "[", "'result'", "]", ",", "bool", ")", ")", "and", "ret", "[", "'result'", "]", ".", "get", "(", "'Error'", ")", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'failed to delete vm {0}'", ".", "format", "(", "name", ")", "else", ":", "ret", "[", "'comment'", "]", "=", "'vm {0} deleted'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "[", "name", "]", "=", "None", "return", "ret" ]
ensure vm is absent on the computenode name : string hostname of vm archive : boolean toggle archiving of vm on removal .
train
true
37,354
def getTopAddBiconicOutput(bottomRadians, height, outputs, radius, sides, start, tipRadius, topRadians): radiusMaximum = max(radius.real, radius.imag) topRadiusMaximum = (radiusMaximum - (height * math.tan(bottomRadians))) trunkEndZ = (start.z + height) trunkTopOverBottom = (topRadiusMaximum / radiusMaximum) topRadiusComplex = (trunkTopOverBottom * radius) cylinder.addCylinderOutputByEndStart(trunkEndZ, radius, outputs, sides, start, trunkTopOverBottom) tipOverTop = (tipRadius / topRadiusMaximum) if (tipOverTop >= 1.0): return trunkEndZ capStart = Vector3(start.x, start.y, trunkEndZ) capEndZ = (trunkEndZ + ((topRadiusMaximum - tipRadius) / math.tan(topRadians))) cylinder.addCylinderOutputByEndStart(capEndZ, topRadiusComplex, outputs, sides, capStart, tipOverTop) return capEndZ
[ "def", "getTopAddBiconicOutput", "(", "bottomRadians", ",", "height", ",", "outputs", ",", "radius", ",", "sides", ",", "start", ",", "tipRadius", ",", "topRadians", ")", ":", "radiusMaximum", "=", "max", "(", "radius", ".", "real", ",", "radius", ".", "imag", ")", "topRadiusMaximum", "=", "(", "radiusMaximum", "-", "(", "height", "*", "math", ".", "tan", "(", "bottomRadians", ")", ")", ")", "trunkEndZ", "=", "(", "start", ".", "z", "+", "height", ")", "trunkTopOverBottom", "=", "(", "topRadiusMaximum", "/", "radiusMaximum", ")", "topRadiusComplex", "=", "(", "trunkTopOverBottom", "*", "radius", ")", "cylinder", ".", "addCylinderOutputByEndStart", "(", "trunkEndZ", ",", "radius", ",", "outputs", ",", "sides", ",", "start", ",", "trunkTopOverBottom", ")", "tipOverTop", "=", "(", "tipRadius", "/", "topRadiusMaximum", ")", "if", "(", "tipOverTop", ">=", "1.0", ")", ":", "return", "trunkEndZ", "capStart", "=", "Vector3", "(", "start", ".", "x", ",", "start", ".", "y", ",", "trunkEndZ", ")", "capEndZ", "=", "(", "trunkEndZ", "+", "(", "(", "topRadiusMaximum", "-", "tipRadius", ")", "/", "math", ".", "tan", "(", "topRadians", ")", ")", ")", "cylinder", ".", "addCylinderOutputByEndStart", "(", "capEndZ", ",", "topRadiusComplex", ",", "outputs", ",", "sides", ",", "capStart", ",", "tipOverTop", ")", "return", "capEndZ" ]
get top and add biconic cylinder to outputs .
train
false
37,355
def update_firewall_rule(firewall_rule, protocol=None, action=None, name=None, description=None, ip_version=None, source_ip_address=None, destination_ip_address=None, source_port=None, destination_port=None, shared=None, enabled=None, profile=None): conn = _auth(profile) return conn.update_firewall_rule(firewall_rule, protocol, action, name, description, ip_version, source_ip_address, destination_ip_address, source_port, destination_port, shared, enabled)
[ "def", "update_firewall_rule", "(", "firewall_rule", ",", "protocol", "=", "None", ",", "action", "=", "None", ",", "name", "=", "None", ",", "description", "=", "None", ",", "ip_version", "=", "None", ",", "source_ip_address", "=", "None", ",", "destination_ip_address", "=", "None", ",", "source_port", "=", "None", ",", "destination_port", "=", "None", ",", "shared", "=", "None", ",", "enabled", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "update_firewall_rule", "(", "firewall_rule", ",", "protocol", ",", "action", ",", "name", ",", "description", ",", "ip_version", ",", "source_ip_address", ",", "destination_ip_address", ",", "source_port", ",", "destination_port", ",", "shared", ",", "enabled", ")" ]
update a firewall rule cli example: .
train
true
37,356
def _numpy_eye(n): if (not np): raise ImportError return np.matrix(np.eye(n, dtype='complex'))
[ "def", "_numpy_eye", "(", "n", ")", ":", "if", "(", "not", "np", ")", ":", "raise", "ImportError", "return", "np", ".", "matrix", "(", "np", ".", "eye", "(", "n", ",", "dtype", "=", "'complex'", ")", ")" ]
numpy version of complex eye .
train
false
37,357
def scan_hostlist(hostlist, threads=5): task = threadpool.map_async(scan_host, hostlist) while True: print counter['Total'], 'hosts done' task.wait(1) if (task.ready() or hasattr(threadpool, 'done')): return threadpool.close() threadpool.join()
[ "def", "scan_hostlist", "(", "hostlist", ",", "threads", "=", "5", ")", ":", "task", "=", "threadpool", ".", "map_async", "(", "scan_host", ",", "hostlist", ")", "while", "True", ":", "print", "counter", "[", "'Total'", "]", ",", "'hosts done'", "task", ".", "wait", "(", "1", ")", "if", "(", "task", ".", "ready", "(", ")", "or", "hasattr", "(", "threadpool", ",", "'done'", ")", ")", ":", "return", "threadpool", ".", "close", "(", ")", "threadpool", ".", "join", "(", ")" ]
iterates through hostlist and scans them arguments: hostlist -- iterable with ip addresses threads -- if specified .
train
false
37,358
def postprocess_element(elements, processed): if (elements in processed): return processed.append(elements) for (k, v) in elements.items(): if isinstance(v, Struct): if (v != elements): try: postprocess_element(v, processed) except RuntimeError as e: warnings.warn(unicode(e), RuntimeWarning) if v.refers_to: if isinstance(v.refers_to, dict): extend_element(v, v.refers_to) v.refers_to = None else: elements[k] = v.refers_to if v.array: elements[k] = [v] if isinstance(v, list): for n in v: if isinstance(n, (Struct, list)): postprocess_element(n, processed)
[ "def", "postprocess_element", "(", "elements", ",", "processed", ")", ":", "if", "(", "elements", "in", "processed", ")", ":", "return", "processed", ".", "append", "(", "elements", ")", "for", "(", "k", ",", "v", ")", "in", "elements", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "Struct", ")", ":", "if", "(", "v", "!=", "elements", ")", ":", "try", ":", "postprocess_element", "(", "v", ",", "processed", ")", "except", "RuntimeError", "as", "e", ":", "warnings", ".", "warn", "(", "unicode", "(", "e", ")", ",", "RuntimeWarning", ")", "if", "v", ".", "refers_to", ":", "if", "isinstance", "(", "v", ".", "refers_to", ",", "dict", ")", ":", "extend_element", "(", "v", ",", "v", ".", "refers_to", ")", "v", ".", "refers_to", "=", "None", "else", ":", "elements", "[", "k", "]", "=", "v", ".", "refers_to", "if", "v", ".", "array", ":", "elements", "[", "k", "]", "=", "[", "v", "]", "if", "isinstance", "(", "v", ",", "list", ")", ":", "for", "n", "in", "v", ":", "if", "isinstance", "(", "n", ",", "(", "Struct", ",", "list", ")", ")", ":", "postprocess_element", "(", "n", ",", "processed", ")" ]
fix unresolved references .
train
false
37,359
def extract_intro(filename): (docstring, _) = get_docstring_and_rest(filename) paragraphs = docstring.lstrip().split('\n\n') if (len(paragraphs) > 1): first_paragraph = re.sub('\n', ' ', paragraphs[1]) first_paragraph = ((first_paragraph[:95] + '...') if (len(first_paragraph) > 95) else first_paragraph) else: raise ValueError('Example docstring should have a header for the example title and at least a paragraph explaining what the example is about. Please check the example file:\n {}\n'.format(filename)) return first_paragraph
[ "def", "extract_intro", "(", "filename", ")", ":", "(", "docstring", ",", "_", ")", "=", "get_docstring_and_rest", "(", "filename", ")", "paragraphs", "=", "docstring", ".", "lstrip", "(", ")", ".", "split", "(", "'\\n\\n'", ")", "if", "(", "len", "(", "paragraphs", ")", ">", "1", ")", ":", "first_paragraph", "=", "re", ".", "sub", "(", "'\\n'", ",", "' '", ",", "paragraphs", "[", "1", "]", ")", "first_paragraph", "=", "(", "(", "first_paragraph", "[", ":", "95", "]", "+", "'...'", ")", "if", "(", "len", "(", "first_paragraph", ")", ">", "95", ")", "else", "first_paragraph", ")", "else", ":", "raise", "ValueError", "(", "'Example docstring should have a header for the example title and at least a paragraph explaining what the example is about. Please check the example file:\\n {}\\n'", ".", "format", "(", "filename", ")", ")", "return", "first_paragraph" ]
extract the first paragraph of module-level docstring .
train
true
37,361
def list_resource_groups(conn=None, call=None): if (call == 'action'): raise SaltCloudSystemExit('The list_hosted_services function must be called with -f or --function') global resconn if (not resconn): resconn = get_conn(ResourceManagementClient) ret = {} region = get_location() bank = 'cloud/metadata/azurearm/{0}'.format(region) groups = cache.cache(bank, 'resource_groups', resconn.resource_groups.list, loop_fun=object_to_dict, expire=config.get_cloud_config_value('expire_group_cache', get_configured_provider(), __opts__, search_global=False, default=86400)) for group in groups: ret[group['name']] = group return ret
[ "def", "list_resource_groups", "(", "conn", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_hosted_services function must be called with -f or --function'", ")", "global", "resconn", "if", "(", "not", "resconn", ")", ":", "resconn", "=", "get_conn", "(", "ResourceManagementClient", ")", "ret", "=", "{", "}", "region", "=", "get_location", "(", ")", "bank", "=", "'cloud/metadata/azurearm/{0}'", ".", "format", "(", "region", ")", "groups", "=", "cache", ".", "cache", "(", "bank", ",", "'resource_groups'", ",", "resconn", ".", "resource_groups", ".", "list", ",", "loop_fun", "=", "object_to_dict", ",", "expire", "=", "config", ".", "get_cloud_config_value", "(", "'expire_group_cache'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "86400", ")", ")", "for", "group", "in", "groups", ":", "ret", "[", "group", "[", "'name'", "]", "]", "=", "group", "return", "ret" ]
list resource groups associated with the account .
train
false
37,362
def servicegroup_delete(sg_name, **connection_args): ret = True sg = _servicegroup_get(sg_name, **connection_args) if (sg is None): return False nitro = _connect(**connection_args) if (nitro is None): return False try: NSServiceGroup.delete(nitro, sg) except NSNitroError as error: log.debug('netscaler module error - NSServiceGroup.delete() failed: {0}'.format(error)) ret = False _disconnect(nitro) return ret
[ "def", "servicegroup_delete", "(", "sg_name", ",", "**", "connection_args", ")", ":", "ret", "=", "True", "sg", "=", "_servicegroup_get", "(", "sg_name", ",", "**", "connection_args", ")", "if", "(", "sg", "is", "None", ")", ":", "return", "False", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "False", "try", ":", "NSServiceGroup", ".", "delete", "(", "nitro", ",", "sg", ")", "except", "NSNitroError", "as", "error", ":", "log", ".", "debug", "(", "'netscaler module error - NSServiceGroup.delete() failed: {0}'", ".", "format", "(", "error", ")", ")", "ret", "=", "False", "_disconnect", "(", "nitro", ")", "return", "ret" ]
delete a new service group cli example: .
train
true
37,364
def test_cursor_keys(): superConsole.SendKeys('outputRedirectStart{(}True{)}{ENTER}') testRegex = '' superConsole.SendKeys("print 'up'{ENTER}") testRegex += 'up' superConsole.SendKeys("print 'down'{ENTER}") testRegex += 'down' superConsole.SendKeys('{UP}{UP}{ENTER}') testRegex += 'up' superConsole.SendKeys('{DOWN}{ENTER}') testRegex += 'down' superConsole.SendKeys("print 'up'{ENTER}{UP}{ENTER}") testRegex += 'upup' superConsole.SendKeys("print 'awy{LEFT}{LEFT}{RIGHT}a{RIGHT}'{ENTER}") testRegex += 'away' superConsole.SendKeys("print 'bad'{ESC}print 'good'{ENTER}") testRegex += 'good' superConsole.SendKeys("rint 'hom'{HOME}p{END}{LEFT}e{ENTER}") testRegex += 'home' superConsole.SendKeys('outputRedirectStop{(}{)}{ENTER}') verifyResults(getTestOutput()[0], testRegex)
[ "def", "test_cursor_keys", "(", ")", ":", "superConsole", ".", "SendKeys", "(", "'outputRedirectStart{(}True{)}{ENTER}'", ")", "testRegex", "=", "''", "superConsole", ".", "SendKeys", "(", "\"print 'up'{ENTER}\"", ")", "testRegex", "+=", "'up'", "superConsole", ".", "SendKeys", "(", "\"print 'down'{ENTER}\"", ")", "testRegex", "+=", "'down'", "superConsole", ".", "SendKeys", "(", "'{UP}{UP}{ENTER}'", ")", "testRegex", "+=", "'up'", "superConsole", ".", "SendKeys", "(", "'{DOWN}{ENTER}'", ")", "testRegex", "+=", "'down'", "superConsole", ".", "SendKeys", "(", "\"print 'up'{ENTER}{UP}{ENTER}\"", ")", "testRegex", "+=", "'upup'", "superConsole", ".", "SendKeys", "(", "\"print 'awy{LEFT}{LEFT}{RIGHT}a{RIGHT}'{ENTER}\"", ")", "testRegex", "+=", "'away'", "superConsole", ".", "SendKeys", "(", "\"print 'bad'{ESC}print 'good'{ENTER}\"", ")", "testRegex", "+=", "'good'", "superConsole", ".", "SendKeys", "(", "\"rint 'hom'{HOME}p{END}{LEFT}e{ENTER}\"", ")", "testRegex", "+=", "'home'", "superConsole", ".", "SendKeys", "(", "'outputRedirectStop{(}{)}{ENTER}'", ")", "verifyResults", "(", "getTestOutput", "(", ")", "[", "0", "]", ",", "testRegex", ")" ]
cursor keys .
train
false
37,365
def _create_preference_update_error(preference_key, preference_value, error): return PreferenceUpdateError(developer_message=u"Save failed for user preference '{key}' with value '{value}': {error}".format(key=preference_key, value=preference_value, error=error), user_message=_(u"Save failed for user preference '{key}' with value '{value}'.").format(key=preference_key, value=preference_value))
[ "def", "_create_preference_update_error", "(", "preference_key", ",", "preference_value", ",", "error", ")", ":", "return", "PreferenceUpdateError", "(", "developer_message", "=", "u\"Save failed for user preference '{key}' with value '{value}': {error}\"", ".", "format", "(", "key", "=", "preference_key", ",", "value", "=", "preference_value", ",", "error", "=", "error", ")", ",", "user_message", "=", "_", "(", "u\"Save failed for user preference '{key}' with value '{value}'.\"", ")", ".", "format", "(", "key", "=", "preference_key", ",", "value", "=", "preference_value", ")", ")" ]
creates a preferenceupdateerror with developer_message and user_message .
train
false
37,366
def group_by_period(queryset, column, period, **annotate): d = OrderedDict() for line in queryset.extra({'period_group': connection.ops.date_trunc_sql(period, column)}).values('period_group').annotate(**annotate).order_by('period_group').values(*(['period_group'] + list(annotate.keys()))): d[parse_date(line.pop('period_group'))] = line return d
[ "def", "group_by_period", "(", "queryset", ",", "column", ",", "period", ",", "**", "annotate", ")", ":", "d", "=", "OrderedDict", "(", ")", "for", "line", "in", "queryset", ".", "extra", "(", "{", "'period_group'", ":", "connection", ".", "ops", ".", "date_trunc_sql", "(", "period", ",", "column", ")", "}", ")", ".", "values", "(", "'period_group'", ")", ".", "annotate", "(", "**", "annotate", ")", ".", "order_by", "(", "'period_group'", ")", ".", "values", "(", "*", "(", "[", "'period_group'", "]", "+", "list", "(", "annotate", ".", "keys", "(", ")", ")", ")", ")", ":", "d", "[", "parse_date", "(", "line", ".", "pop", "(", "'period_group'", ")", ")", "]", "=", "line", "return", "d" ]
group and annotate given queryset by a given date period .
train
false
37,368
def AROONOSC(barDs, count, timeperiod=(- (2 ** 31))): return call_talib_with_hl(barDs, count, talib.AROONOSC, timeperiod)
[ "def", "AROONOSC", "(", "barDs", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_hl", "(", "barDs", ",", "count", ",", "talib", ".", "AROONOSC", ",", "timeperiod", ")" ]
aroon oscillator .
train
false
37,370
@memoized def getPos(position, pagesize): position = str(position).split() if (len(position) != 2): raise Exception('position not defined right way') (x, y) = [getSize(pos) for pos in position] return getCoords(x, y, None, None, pagesize)
[ "@", "memoized", "def", "getPos", "(", "position", ",", "pagesize", ")", ":", "position", "=", "str", "(", "position", ")", ".", "split", "(", ")", "if", "(", "len", "(", "position", ")", "!=", "2", ")", ":", "raise", "Exception", "(", "'position not defined right way'", ")", "(", "x", ",", "y", ")", "=", "[", "getSize", "(", "pos", ")", "for", "pos", "in", "position", "]", "return", "getCoords", "(", "x", ",", "y", ",", "None", ",", "None", ",", "pagesize", ")" ]
pair of coordinates .
train
true
37,371
def _validate_throttle(throttle): if (throttle is not None): if (not isinstance(throttle, dict)): raise TypeError('throttle must be a dictionary, provided value: {0}'.format(throttle))
[ "def", "_validate_throttle", "(", "throttle", ")", ":", "if", "(", "throttle", "is", "not", "None", ")", ":", "if", "(", "not", "isinstance", "(", "throttle", ",", "dict", ")", ")", ":", "raise", "TypeError", "(", "'throttle must be a dictionary, provided value: {0}'", ".", "format", "(", "throttle", ")", ")" ]
helper to verify that throttling parameters are valid .
train
false
37,373
def commonprefix(m): if (not m): return '' s1 = min(m) s2 = max(m) n = min(len(s1), len(s2)) for i in xrange(n): if (s1[i] != s2[i]): return s1[:i] return s1[:n]
[ "def", "commonprefix", "(", "m", ")", ":", "if", "(", "not", "m", ")", ":", "return", "''", "s1", "=", "min", "(", "m", ")", "s2", "=", "max", "(", "m", ")", "n", "=", "min", "(", "len", "(", "s1", ")", ",", "len", "(", "s2", ")", ")", "for", "i", "in", "xrange", "(", "n", ")", ":", "if", "(", "s1", "[", "i", "]", "!=", "s2", "[", "i", "]", ")", ":", "return", "s1", "[", ":", "i", "]", "return", "s1", "[", ":", "n", "]" ]
given a list of pathnames .
train
false
37,375
def _fit_slices(clf, x_chunk, y, slices, cv_splits): from sklearn.base import clone n_epochs = len(x_chunk) estimators = list() values = np.unique([val for sl in slices for val in sl]) for t_slice in slices: t_slice = np.array([np.where((ii == values))[0][0] for ii in t_slice]) X = x_chunk[..., t_slice] X = X.reshape(n_epochs, np.prod(X.shape[1:])) estimators_ = list() for (fold, (train, test)) in enumerate(cv_splits): clf_ = clone(clf) clf_.fit(X[train, :], y[train]) estimators_.append(clf_) estimators.append(estimators_) return estimators
[ "def", "_fit_slices", "(", "clf", ",", "x_chunk", ",", "y", ",", "slices", ",", "cv_splits", ")", ":", "from", "sklearn", ".", "base", "import", "clone", "n_epochs", "=", "len", "(", "x_chunk", ")", "estimators", "=", "list", "(", ")", "values", "=", "np", ".", "unique", "(", "[", "val", "for", "sl", "in", "slices", "for", "val", "in", "sl", "]", ")", "for", "t_slice", "in", "slices", ":", "t_slice", "=", "np", ".", "array", "(", "[", "np", ".", "where", "(", "(", "ii", "==", "values", ")", ")", "[", "0", "]", "[", "0", "]", "for", "ii", "in", "t_slice", "]", ")", "X", "=", "x_chunk", "[", "...", ",", "t_slice", "]", "X", "=", "X", ".", "reshape", "(", "n_epochs", ",", "np", ".", "prod", "(", "X", ".", "shape", "[", "1", ":", "]", ")", ")", "estimators_", "=", "list", "(", ")", "for", "(", "fold", ",", "(", "train", ",", "test", ")", ")", "in", "enumerate", "(", "cv_splits", ")", ":", "clf_", "=", "clone", "(", "clf", ")", "clf_", ".", "fit", "(", "X", "[", "train", ",", ":", "]", ",", "y", "[", "train", "]", ")", "estimators_", ".", "append", "(", "clf_", ")", "estimators", ".", "append", "(", "estimators_", ")", "return", "estimators" ]
aux function of generalizationacrosstime .
train
false
37,377
@contextmanager def cache_server(return_failed=False, cache_root=None): queue = Queue() process = Process(target=_cache_server_process, args=(queue, return_failed, cache_root)) process.start() try: port = queue.get() (yield TestCacheServer(u'http://localhost:{0}'.format(port), cache_root)) finally: process.terminate()
[ "@", "contextmanager", "def", "cache_server", "(", "return_failed", "=", "False", ",", "cache_root", "=", "None", ")", ":", "queue", "=", "Queue", "(", ")", "process", "=", "Process", "(", "target", "=", "_cache_server_process", ",", "args", "=", "(", "queue", ",", "return_failed", ",", "cache_root", ")", ")", "process", ".", "start", "(", ")", "try", ":", "port", "=", "queue", ".", "get", "(", ")", "(", "yield", "TestCacheServer", "(", "u'http://localhost:{0}'", ".", "format", "(", "port", ")", ",", "cache_root", ")", ")", "finally", ":", "process", ".", "terminate", "(", ")" ]
a context manager which launches a temporary cache server on a random port .
train
false
37,378
def _init_atexit(): import atexit import thread atexit.register(thread.do_terminate_threads)
[ "def", "_init_atexit", "(", ")", ":", "import", "atexit", "import", "thread", "atexit", ".", "register", "(", "thread", ".", "do_terminate_threads", ")" ]
setup an at-exit job to be sure our workers are shutdown correctly before the interpreter quits .
train
false
37,380
def _filter_ctx_units(units_qs, unit, how_many, gap=0): result = {'before': [], 'after': []} if (how_many and ((unit.index - gap) > 0)): before = units_qs.filter(store=unit.store_id, index__lt=unit.index).order_by('-index')[gap:(how_many + gap)] result['before'] = _build_units_list(before, reverse=True) result['before'].reverse() if how_many: after = units_qs.filter(store=unit.store_id, index__gt=unit.index)[gap:(how_many + gap)] result['after'] = _build_units_list(after) return result
[ "def", "_filter_ctx_units", "(", "units_qs", ",", "unit", ",", "how_many", ",", "gap", "=", "0", ")", ":", "result", "=", "{", "'before'", ":", "[", "]", ",", "'after'", ":", "[", "]", "}", "if", "(", "how_many", "and", "(", "(", "unit", ".", "index", "-", "gap", ")", ">", "0", ")", ")", ":", "before", "=", "units_qs", ".", "filter", "(", "store", "=", "unit", ".", "store_id", ",", "index__lt", "=", "unit", ".", "index", ")", ".", "order_by", "(", "'-index'", ")", "[", "gap", ":", "(", "how_many", "+", "gap", ")", "]", "result", "[", "'before'", "]", "=", "_build_units_list", "(", "before", ",", "reverse", "=", "True", ")", "result", "[", "'before'", "]", ".", "reverse", "(", ")", "if", "how_many", ":", "after", "=", "units_qs", ".", "filter", "(", "store", "=", "unit", ".", "store_id", ",", "index__gt", "=", "unit", ".", "index", ")", "[", "gap", ":", "(", "how_many", "+", "gap", ")", "]", "result", "[", "'after'", "]", "=", "_build_units_list", "(", "after", ")", "return", "result" ]
returns how_many*2 units that are before and after index .
train
false
37,381
def read_xmp_identifers(parent): for li in XPath(u'./rdf:Bag/rdf:li')(parent): is_resource = (li.attrib.get(expand(u'rdf:parseType'), None) == u'Resource') is_resource = (is_resource or ((len(li) == 1) and (li[0].tag == expand(u'rdf:Description')))) if (not is_resource): (yield (None, (li.text or u''))) value = XPath(u'descendant::rdf:value')(li) if (not value): continue value = (value[0].text or u'') scheme = XPath(u'descendant::xmpidq:Scheme')(li) if (not scheme): (yield (None, value)) else: (yield ((scheme[0].text or u''), value))
[ "def", "read_xmp_identifers", "(", "parent", ")", ":", "for", "li", "in", "XPath", "(", "u'./rdf:Bag/rdf:li'", ")", "(", "parent", ")", ":", "is_resource", "=", "(", "li", ".", "attrib", ".", "get", "(", "expand", "(", "u'rdf:parseType'", ")", ",", "None", ")", "==", "u'Resource'", ")", "is_resource", "=", "(", "is_resource", "or", "(", "(", "len", "(", "li", ")", "==", "1", ")", "and", "(", "li", "[", "0", "]", ".", "tag", "==", "expand", "(", "u'rdf:Description'", ")", ")", ")", ")", "if", "(", "not", "is_resource", ")", ":", "(", "yield", "(", "None", ",", "(", "li", ".", "text", "or", "u''", ")", ")", ")", "value", "=", "XPath", "(", "u'descendant::rdf:value'", ")", "(", "li", ")", "if", "(", "not", "value", ")", ":", "continue", "value", "=", "(", "value", "[", "0", "]", ".", "text", "or", "u''", ")", "scheme", "=", "XPath", "(", "u'descendant::xmpidq:Scheme'", ")", "(", "li", ")", "if", "(", "not", "scheme", ")", ":", "(", "yield", "(", "None", ",", "value", ")", ")", "else", ":", "(", "yield", "(", "(", "scheme", "[", "0", "]", ".", "text", "or", "u''", ")", ",", "value", ")", ")" ]
for example: <rdf:li rdf:parsetype="resource"><xmpidq:scheme>url</xmp:idq><rdf:value>URL or the longer form: <rdf:li><rdf:description><xmpidq:scheme>url</xmp:idq><rdf:value>URL .
train
false
37,382
def make_function_type(cfnptr): if (cfnptr.argtypes is None): raise TypeError(("ctypes function %r doesn't define its argument types; consider setting the `argtypes` attribute" % (cfnptr.__name__,))) cargs = [from_ctypes(a) for a in cfnptr.argtypes] cret = from_ctypes(cfnptr.restype) if ((sys.platform == 'win32') and (not (cfnptr._flags_ & ctypes._FUNCFLAG_CDECL))): cconv = 'x86_stdcallcc' else: cconv = None sig = templates.signature(cret, *cargs) return types.ExternalFunctionPointer(sig, cconv=cconv, get_pointer=get_pointer)
[ "def", "make_function_type", "(", "cfnptr", ")", ":", "if", "(", "cfnptr", ".", "argtypes", "is", "None", ")", ":", "raise", "TypeError", "(", "(", "\"ctypes function %r doesn't define its argument types; consider setting the `argtypes` attribute\"", "%", "(", "cfnptr", ".", "__name__", ",", ")", ")", ")", "cargs", "=", "[", "from_ctypes", "(", "a", ")", "for", "a", "in", "cfnptr", ".", "argtypes", "]", "cret", "=", "from_ctypes", "(", "cfnptr", ".", "restype", ")", "if", "(", "(", "sys", ".", "platform", "==", "'win32'", ")", "and", "(", "not", "(", "cfnptr", ".", "_flags_", "&", "ctypes", ".", "_FUNCFLAG_CDECL", ")", ")", ")", ":", "cconv", "=", "'x86_stdcallcc'", "else", ":", "cconv", "=", "None", "sig", "=", "templates", ".", "signature", "(", "cret", ",", "*", "cargs", ")", "return", "types", ".", "ExternalFunctionPointer", "(", "sig", ",", "cconv", "=", "cconv", ",", "get_pointer", "=", "get_pointer", ")" ]
return a numba type for the given ctypes function pointer .
train
false
37,383
def _is_legacy_mode(config): write_url = config.get('ckan.datastore.write_url') engine = db._get_engine({'connection_url': write_url}) connection = engine.connect() return ((not config.get('ckan.datastore.read_url')) or (not db._pg_version_is_at_least(connection, '9.0')))
[ "def", "_is_legacy_mode", "(", "config", ")", ":", "write_url", "=", "config", ".", "get", "(", "'ckan.datastore.write_url'", ")", "engine", "=", "db", ".", "_get_engine", "(", "{", "'connection_url'", ":", "write_url", "}", ")", "connection", "=", "engine", ".", "connect", "(", ")", "return", "(", "(", "not", "config", ".", "get", "(", "'ckan.datastore.read_url'", ")", ")", "or", "(", "not", "db", ".", "_pg_version_is_at_least", "(", "connection", ",", "'9.0'", ")", ")", ")" ]
decides if the datastore should run on legacy mode returns true if ckan .
train
false
37,384
def libvlc_media_player_previous_chapter(p_mi): f = (_Cfunctions.get('libvlc_media_player_previous_chapter', None) or _Cfunction('libvlc_media_player_previous_chapter', ((1,),), None, None, MediaPlayer)) return f(p_mi)
[ "def", "libvlc_media_player_previous_chapter", "(", "p_mi", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_player_previous_chapter'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_player_previous_chapter'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaPlayer", ")", ")", "return", "f", "(", "p_mi", ")" ]
set previous chapter .
train
false
37,388
def get_supported_types(): from datetime import date editable_types = (([int, float, complex, list, dict, tuple, date] + list(TEXT_TYPES)) + list(INT_TYPES)) try: from numpy import ndarray, matrix, generic editable_types += [ndarray, matrix, generic] except ImportError: pass try: from pandas import DataFrame, Series, DatetimeIndex editable_types += [DataFrame, Series, DatetimeIndex] except ImportError: pass picklable_types = editable_types[:] try: from spyder.pil_patch import Image editable_types.append(Image.Image) except ImportError: pass return dict(picklable=picklable_types, editable=editable_types)
[ "def", "get_supported_types", "(", ")", ":", "from", "datetime", "import", "date", "editable_types", "=", "(", "(", "[", "int", ",", "float", ",", "complex", ",", "list", ",", "dict", ",", "tuple", ",", "date", "]", "+", "list", "(", "TEXT_TYPES", ")", ")", "+", "list", "(", "INT_TYPES", ")", ")", "try", ":", "from", "numpy", "import", "ndarray", ",", "matrix", ",", "generic", "editable_types", "+=", "[", "ndarray", ",", "matrix", ",", "generic", "]", "except", "ImportError", ":", "pass", "try", ":", "from", "pandas", "import", "DataFrame", ",", "Series", ",", "DatetimeIndex", "editable_types", "+=", "[", "DataFrame", ",", "Series", ",", "DatetimeIndex", "]", "except", "ImportError", ":", "pass", "picklable_types", "=", "editable_types", "[", ":", "]", "try", ":", "from", "spyder", ".", "pil_patch", "import", "Image", "editable_types", ".", "append", "(", "Image", ".", "Image", ")", "except", "ImportError", ":", "pass", "return", "dict", "(", "picklable", "=", "picklable_types", ",", "editable", "=", "editable_types", ")" ]
return a dictionnary containing types lists supported by the namespace browser: dict see: get_remote_data function in spyder/widgets/variableexplorer/utils/monitor .
train
true
37,389
def default_app_action(parent, fn): action = cmd_action(parent, cmds.OpenDefaultApp, fn, hotkeys.PRIMARY_ACTION) action.setIcon(icons.default_app()) return action
[ "def", "default_app_action", "(", "parent", ",", "fn", ")", ":", "action", "=", "cmd_action", "(", "parent", ",", "cmds", ".", "OpenDefaultApp", ",", "fn", ",", "hotkeys", ".", "PRIMARY_ACTION", ")", "action", ".", "setIcon", "(", "icons", ".", "default_app", "(", ")", ")", "return", "action" ]
open paths with the os-default app -> qaction .
train
false
37,390
def set_language(request): next = request.POST.get('next', request.GET.get('next')) if ((next or (not request.is_ajax())) and (not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure()))): next = request.META.get('HTTP_REFERER') if next: next = urlunquote(next) if (not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure())): next = '/' response = (http.HttpResponseRedirect(next) if next else http.HttpResponse(status=204)) if (request.method == 'POST'): lang_code = request.POST.get(LANGUAGE_QUERY_PARAMETER) if (lang_code and check_for_language(lang_code)): if next: next_trans = translate_url(next, lang_code) if (next_trans != next): response = http.HttpResponseRedirect(next_trans) if hasattr(request, 'session'): request.session[LANGUAGE_SESSION_KEY] = lang_code else: response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang_code, max_age=settings.LANGUAGE_COOKIE_AGE, path=settings.LANGUAGE_COOKIE_PATH, domain=settings.LANGUAGE_COOKIE_DOMAIN) return response
[ "def", "set_language", "(", "request", ")", ":", "next", "=", "request", ".", "POST", ".", "get", "(", "'next'", ",", "request", ".", "GET", ".", "get", "(", "'next'", ")", ")", "if", "(", "(", "next", "or", "(", "not", "request", ".", "is_ajax", "(", ")", ")", ")", "and", "(", "not", "is_safe_url", "(", "url", "=", "next", ",", "allowed_hosts", "=", "{", "request", ".", "get_host", "(", ")", "}", ",", "require_https", "=", "request", ".", "is_secure", "(", ")", ")", ")", ")", ":", "next", "=", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ")", "if", "next", ":", "next", "=", "urlunquote", "(", "next", ")", "if", "(", "not", "is_safe_url", "(", "url", "=", "next", ",", "allowed_hosts", "=", "{", "request", ".", "get_host", "(", ")", "}", ",", "require_https", "=", "request", ".", "is_secure", "(", ")", ")", ")", ":", "next", "=", "'/'", "response", "=", "(", "http", ".", "HttpResponseRedirect", "(", "next", ")", "if", "next", "else", "http", ".", "HttpResponse", "(", "status", "=", "204", ")", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "lang_code", "=", "request", ".", "POST", ".", "get", "(", "LANGUAGE_QUERY_PARAMETER", ")", "if", "(", "lang_code", "and", "check_for_language", "(", "lang_code", ")", ")", ":", "if", "next", ":", "next_trans", "=", "translate_url", "(", "next", ",", "lang_code", ")", "if", "(", "next_trans", "!=", "next", ")", ":", "response", "=", "http", ".", "HttpResponseRedirect", "(", "next_trans", ")", "if", "hasattr", "(", "request", ",", "'session'", ")", ":", "request", ".", "session", "[", "LANGUAGE_SESSION_KEY", "]", "=", "lang_code", "else", ":", "response", ".", "set_cookie", "(", "settings", ".", "LANGUAGE_COOKIE_NAME", ",", "lang_code", ",", "max_age", "=", "settings", ".", "LANGUAGE_COOKIE_AGE", ",", "path", "=", "settings", ".", "LANGUAGE_COOKIE_PATH", ",", "domain", "=", "settings", ".", "LANGUAGE_COOKIE_DOMAIN", ")", "return", "response" ]
redirect to a given url while setting the chosen language in the session or cookie .
train
false
37,392
def _run_active(callback, games): global _last if (len(games) == 0): return False (active, completed) = ([], []) for info in games: game = nflgame.game.Game(info['eid']) if (game is None): continue if game.game_over(): completed.append(game) _completed.append(info['eid']) else: active.append(game) diffs = [] for game in (active + completed): for last_game in (_last or []): if (game.eid != last_game.eid): continue diffs.append((game - last_game)) _last = active callback(active, completed, diffs) return True
[ "def", "_run_active", "(", "callback", ",", "games", ")", ":", "global", "_last", "if", "(", "len", "(", "games", ")", "==", "0", ")", ":", "return", "False", "(", "active", ",", "completed", ")", "=", "(", "[", "]", ",", "[", "]", ")", "for", "info", "in", "games", ":", "game", "=", "nflgame", ".", "game", ".", "Game", "(", "info", "[", "'eid'", "]", ")", "if", "(", "game", "is", "None", ")", ":", "continue", "if", "game", ".", "game_over", "(", ")", ":", "completed", ".", "append", "(", "game", ")", "_completed", ".", "append", "(", "info", "[", "'eid'", "]", ")", "else", ":", "active", ".", "append", "(", "game", ")", "diffs", "=", "[", "]", "for", "game", "in", "(", "active", "+", "completed", ")", ":", "for", "last_game", "in", "(", "_last", "or", "[", "]", ")", ":", "if", "(", "game", ".", "eid", "!=", "last_game", ".", "eid", ")", ":", "continue", "diffs", ".", "append", "(", "(", "game", "-", "last_game", ")", ")", "_last", "=", "active", "callback", "(", "active", ",", "completed", ",", "diffs", ")", "return", "True" ]
the active mode traverses each of the active games and fetches info for each from nfl .
train
false
37,394
def detachRequestMsOriginating(): a = TpPd(pd=3) b = MessageType(mesType=5) c = DetachTypeAndSpareHalfOctets() packet = ((a / b) / c) return packet
[ "def", "detachRequestMsOriginating", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "5", ")", "c", "=", "DetachTypeAndSpareHalfOctets", "(", ")", "packet", "=", "(", "(", "a", "/", "b", ")", "/", "c", ")", "return", "packet" ]
detach request section 9 .
train
true
37,395
@pytest.mark.django_db def test_project_save_wrong_checker(project0): project0.checkstyle = 'foobar' with pytest.raises(ValidationError): project0.save()
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_project_save_wrong_checker", "(", "project0", ")", ":", "project0", ".", "checkstyle", "=", "'foobar'", "with", "pytest", ".", "raises", "(", "ValidationError", ")", ":", "project0", ".", "save", "(", ")" ]
test that an existing project cant be removed its code .
train
false
37,396
def __hash_new(name, string=''): try: return _hashlib.new(name, string) except ValueError: return __get_builtin_constructor(name)(string)
[ "def", "__hash_new", "(", "name", ",", "string", "=", "''", ")", ":", "try", ":", "return", "_hashlib", ".", "new", "(", "name", ",", "string", ")", "except", "ValueError", ":", "return", "__get_builtin_constructor", "(", "name", ")", "(", "string", ")" ]
new - return a new hashing object using the named algorithm; optionally initialized with data .
train
false
37,398
def conjugate_gauss_beams(wavelen, waist_in, waist_out, **kwargs): (wavelen, waist_in, waist_out) = map(sympify, (wavelen, waist_in, waist_out)) m = (waist_out / waist_in) z = waist2rayleigh(waist_in, wavelen) if (len(kwargs) != 1): raise ValueError('The function expects only one named argument') elif ('dist' in kwargs): raise NotImplementedError(filldedent('\n Currently only focal length is supported as a parameter')) elif ('f' in kwargs): f = sympify(kwargs['f']) s_in = (f * (1 - sqrt(((1 / (m ** 2)) - ((z ** 2) / (f ** 2)))))) s_out = gaussian_conj(s_in, z, f)[0] elif ('s_in' in kwargs): raise NotImplementedError(filldedent('\n Currently only focal length is supported as a parameter')) else: raise ValueError(filldedent('\n The functions expects the focal length as a named argument')) return (s_in, s_out, f)
[ "def", "conjugate_gauss_beams", "(", "wavelen", ",", "waist_in", ",", "waist_out", ",", "**", "kwargs", ")", ":", "(", "wavelen", ",", "waist_in", ",", "waist_out", ")", "=", "map", "(", "sympify", ",", "(", "wavelen", ",", "waist_in", ",", "waist_out", ")", ")", "m", "=", "(", "waist_out", "/", "waist_in", ")", "z", "=", "waist2rayleigh", "(", "waist_in", ",", "wavelen", ")", "if", "(", "len", "(", "kwargs", ")", "!=", "1", ")", ":", "raise", "ValueError", "(", "'The function expects only one named argument'", ")", "elif", "(", "'dist'", "in", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "filldedent", "(", "'\\n Currently only focal length is supported as a parameter'", ")", ")", "elif", "(", "'f'", "in", "kwargs", ")", ":", "f", "=", "sympify", "(", "kwargs", "[", "'f'", "]", ")", "s_in", "=", "(", "f", "*", "(", "1", "-", "sqrt", "(", "(", "(", "1", "/", "(", "m", "**", "2", ")", ")", "-", "(", "(", "z", "**", "2", ")", "/", "(", "f", "**", "2", ")", ")", ")", ")", ")", ")", "s_out", "=", "gaussian_conj", "(", "s_in", ",", "z", ",", "f", ")", "[", "0", "]", "elif", "(", "'s_in'", "in", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "filldedent", "(", "'\\n Currently only focal length is supported as a parameter'", ")", ")", "else", ":", "raise", "ValueError", "(", "filldedent", "(", "'\\n The functions expects the focal length as a named argument'", ")", ")", "return", "(", "s_in", ",", "s_out", ",", "f", ")" ]
find the optical setup conjugating the object/image waists .
train
false
37,399
@no_auto_transaction @must_be_valid_project @must_have_permission(ADMIN) @must_not_be_registration def project_manage_contributors(auth, node, **kwargs): contributors = request.json.get('contributors') try: node.manage_contributors(contributors, auth=auth, save=True) except (ValueError, NodeStateError) as error: raise HTTPError(http.BAD_REQUEST, data={'message_long': error.args[0]}) if (not node.is_contributor(auth.user)): status.push_status_message('You have removed yourself as a contributor from this project', kind='success', trust=False) if node.is_public: return {'redirectUrl': node.url} return {'redirectUrl': web_url_for('dashboard')} if (not node.has_permission(auth.user, ADMIN)): status.push_status_message('You have removed your administrative privileges for this project', kind='success', trust=False) return {}
[ "@", "no_auto_transaction", "@", "must_be_valid_project", "@", "must_have_permission", "(", "ADMIN", ")", "@", "must_not_be_registration", "def", "project_manage_contributors", "(", "auth", ",", "node", ",", "**", "kwargs", ")", ":", "contributors", "=", "request", ".", "json", ".", "get", "(", "'contributors'", ")", "try", ":", "node", ".", "manage_contributors", "(", "contributors", ",", "auth", "=", "auth", ",", "save", "=", "True", ")", "except", "(", "ValueError", ",", "NodeStateError", ")", "as", "error", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ",", "data", "=", "{", "'message_long'", ":", "error", ".", "args", "[", "0", "]", "}", ")", "if", "(", "not", "node", ".", "is_contributor", "(", "auth", ".", "user", ")", ")", ":", "status", ".", "push_status_message", "(", "'You have removed yourself as a contributor from this project'", ",", "kind", "=", "'success'", ",", "trust", "=", "False", ")", "if", "node", ".", "is_public", ":", "return", "{", "'redirectUrl'", ":", "node", ".", "url", "}", "return", "{", "'redirectUrl'", ":", "web_url_for", "(", "'dashboard'", ")", "}", "if", "(", "not", "node", ".", "has_permission", "(", "auth", ".", "user", ",", "ADMIN", ")", ")", ":", "status", ".", "push_status_message", "(", "'You have removed your administrative privileges for this project'", ",", "kind", "=", "'success'", ",", "trust", "=", "False", ")", "return", "{", "}" ]
reorder and remove contributors .
train
false
37,400
@db_api.retry_if_session_inactive() def get_reservations_for_resources(context, tenant_id, resources, expired=False): if (not resources): return now = utcnow() resv_query = context.session.query(quota_models.ResourceDelta.resource, quota_models.Reservation.expiration, sql.func.sum(quota_models.ResourceDelta.amount)).join(quota_models.Reservation) if expired: exp_expr = (quota_models.Reservation.expiration < now) else: exp_expr = (quota_models.Reservation.expiration >= now) resv_query = resv_query.filter(sa.and_((quota_models.Reservation.tenant_id == tenant_id), quota_models.ResourceDelta.resource.in_(resources), exp_expr)).group_by(quota_models.ResourceDelta.resource, quota_models.Reservation.expiration) return dict(((resource, total_reserved) for (resource, exp, total_reserved) in resv_query))
[ "@", "db_api", ".", "retry_if_session_inactive", "(", ")", "def", "get_reservations_for_resources", "(", "context", ",", "tenant_id", ",", "resources", ",", "expired", "=", "False", ")", ":", "if", "(", "not", "resources", ")", ":", "return", "now", "=", "utcnow", "(", ")", "resv_query", "=", "context", ".", "session", ".", "query", "(", "quota_models", ".", "ResourceDelta", ".", "resource", ",", "quota_models", ".", "Reservation", ".", "expiration", ",", "sql", ".", "func", ".", "sum", "(", "quota_models", ".", "ResourceDelta", ".", "amount", ")", ")", ".", "join", "(", "quota_models", ".", "Reservation", ")", "if", "expired", ":", "exp_expr", "=", "(", "quota_models", ".", "Reservation", ".", "expiration", "<", "now", ")", "else", ":", "exp_expr", "=", "(", "quota_models", ".", "Reservation", ".", "expiration", ">=", "now", ")", "resv_query", "=", "resv_query", ".", "filter", "(", "sa", ".", "and_", "(", "(", "quota_models", ".", "Reservation", ".", "tenant_id", "==", "tenant_id", ")", ",", "quota_models", ".", "ResourceDelta", ".", "resource", ".", "in_", "(", "resources", ")", ",", "exp_expr", ")", ")", ".", "group_by", "(", "quota_models", ".", "ResourceDelta", ".", "resource", ",", "quota_models", ".", "Reservation", ".", "expiration", ")", "return", "dict", "(", "(", "(", "resource", ",", "total_reserved", ")", "for", "(", "resource", ",", "exp", ",", "total_reserved", ")", "in", "resv_query", ")", ")" ]
retrieve total amount of reservations for specified resources .
train
false
37,401
@scopes.add_arg_scope def batch_norm(inputs, decay=0.999, center=True, scale=False, epsilon=0.001, moving_vars='moving_vars', activation=None, is_training=True, trainable=True, restore=True, scope=None, reuse=None): inputs_shape = inputs.get_shape() with tf.variable_scope(scope, 'BatchNorm', [inputs], reuse=reuse): axis = list(range((len(inputs_shape) - 1))) params_shape = inputs_shape[(-1):] (beta, gamma) = (None, None) if center: beta = variables.variable('beta', params_shape, initializer=tf.zeros_initializer(), trainable=trainable, restore=restore) if scale: gamma = variables.variable('gamma', params_shape, initializer=tf.ones_initializer(), trainable=trainable, restore=restore) moving_collections = [moving_vars, tf.GraphKeys.MOVING_AVERAGE_VARIABLES] moving_mean = variables.variable('moving_mean', params_shape, initializer=tf.zeros_initializer(), trainable=False, restore=restore, collections=moving_collections) moving_variance = variables.variable('moving_variance', params_shape, initializer=tf.ones_initializer(), trainable=False, restore=restore, collections=moving_collections) if is_training: (mean, variance) = tf.nn.moments(inputs, axis) update_moving_mean = moving_averages.assign_moving_average(moving_mean, mean, decay) tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_mean) update_moving_variance = moving_averages.assign_moving_average(moving_variance, variance, decay) tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_variance) else: mean = moving_mean variance = moving_variance outputs = tf.nn.batch_normalization(inputs, mean, variance, beta, gamma, epsilon) outputs.set_shape(inputs.get_shape()) if activation: outputs = activation(outputs) return outputs
[ "@", "scopes", ".", "add_arg_scope", "def", "batch_norm", "(", "inputs", ",", "decay", "=", "0.999", ",", "center", "=", "True", ",", "scale", "=", "False", ",", "epsilon", "=", "0.001", ",", "moving_vars", "=", "'moving_vars'", ",", "activation", "=", "None", ",", "is_training", "=", "True", ",", "trainable", "=", "True", ",", "restore", "=", "True", ",", "scope", "=", "None", ",", "reuse", "=", "None", ")", ":", "inputs_shape", "=", "inputs", ".", "get_shape", "(", ")", "with", "tf", ".", "variable_scope", "(", "scope", ",", "'BatchNorm'", ",", "[", "inputs", "]", ",", "reuse", "=", "reuse", ")", ":", "axis", "=", "list", "(", "range", "(", "(", "len", "(", "inputs_shape", ")", "-", "1", ")", ")", ")", "params_shape", "=", "inputs_shape", "[", "(", "-", "1", ")", ":", "]", "(", "beta", ",", "gamma", ")", "=", "(", "None", ",", "None", ")", "if", "center", ":", "beta", "=", "variables", ".", "variable", "(", "'beta'", ",", "params_shape", ",", "initializer", "=", "tf", ".", "zeros_initializer", "(", ")", ",", "trainable", "=", "trainable", ",", "restore", "=", "restore", ")", "if", "scale", ":", "gamma", "=", "variables", ".", "variable", "(", "'gamma'", ",", "params_shape", ",", "initializer", "=", "tf", ".", "ones_initializer", "(", ")", ",", "trainable", "=", "trainable", ",", "restore", "=", "restore", ")", "moving_collections", "=", "[", "moving_vars", ",", "tf", ".", "GraphKeys", ".", "MOVING_AVERAGE_VARIABLES", "]", "moving_mean", "=", "variables", ".", "variable", "(", "'moving_mean'", ",", "params_shape", ",", "initializer", "=", "tf", ".", "zeros_initializer", "(", ")", ",", "trainable", "=", "False", ",", "restore", "=", "restore", ",", "collections", "=", "moving_collections", ")", "moving_variance", "=", "variables", ".", "variable", "(", "'moving_variance'", ",", "params_shape", ",", "initializer", "=", "tf", ".", "ones_initializer", "(", ")", ",", "trainable", "=", "False", ",", "restore", "=", "restore", ",", "collections", "=", "moving_collections", ")", "if", "is_training", ":", "(", "mean", ",", "variance", ")", "=", "tf", ".", "nn", ".", "moments", "(", "inputs", ",", "axis", ")", "update_moving_mean", "=", "moving_averages", ".", "assign_moving_average", "(", "moving_mean", ",", "mean", ",", "decay", ")", "tf", ".", "add_to_collection", "(", "UPDATE_OPS_COLLECTION", ",", "update_moving_mean", ")", "update_moving_variance", "=", "moving_averages", ".", "assign_moving_average", "(", "moving_variance", ",", "variance", ",", "decay", ")", "tf", ".", "add_to_collection", "(", "UPDATE_OPS_COLLECTION", ",", "update_moving_variance", ")", "else", ":", "mean", "=", "moving_mean", "variance", "=", "moving_variance", "outputs", "=", "tf", ".", "nn", ".", "batch_normalization", "(", "inputs", ",", "mean", ",", "variance", ",", "beta", ",", "gamma", ",", "epsilon", ")", "outputs", ".", "set_shape", "(", "inputs", ".", "get_shape", "(", ")", ")", "if", "activation", ":", "outputs", "=", "activation", "(", "outputs", ")", "return", "outputs" ]
apply batch normalization to an existing layer .
train
true
37,402
def _dmi_parse(data, clean=True, fields=None): dmi = [] dmi_split = re.compile('(handle [0-9]x[0-9a-f]+[^\n]+)\n', (re.MULTILINE + re.IGNORECASE)) dmi_raw = iter(re.split(dmi_split, data)[1:]) for (handle, dmi_raw) in zip(dmi_raw, dmi_raw): (handle, htype) = [hline.split()[(-1)] for hline in handle.split(',')][0:2] dmi_raw = dmi_raw.split('\n') log.debug('Parsing handle {0}'.format(handle)) record = {'handle': handle, 'description': dmi_raw.pop(0).strip(), 'type': int(htype)} if (not len(dmi_raw)): if (not clean): dmi.append(record) continue dmi_data = _dmi_data(dmi_raw, clean, fields) if len(dmi_data): record['data'] = dmi_data dmi.append(record) elif (not clean): dmi.append(record) return dmi
[ "def", "_dmi_parse", "(", "data", ",", "clean", "=", "True", ",", "fields", "=", "None", ")", ":", "dmi", "=", "[", "]", "dmi_split", "=", "re", ".", "compile", "(", "'(handle [0-9]x[0-9a-f]+[^\\n]+)\\n'", ",", "(", "re", ".", "MULTILINE", "+", "re", ".", "IGNORECASE", ")", ")", "dmi_raw", "=", "iter", "(", "re", ".", "split", "(", "dmi_split", ",", "data", ")", "[", "1", ":", "]", ")", "for", "(", "handle", ",", "dmi_raw", ")", "in", "zip", "(", "dmi_raw", ",", "dmi_raw", ")", ":", "(", "handle", ",", "htype", ")", "=", "[", "hline", ".", "split", "(", ")", "[", "(", "-", "1", ")", "]", "for", "hline", "in", "handle", ".", "split", "(", "','", ")", "]", "[", "0", ":", "2", "]", "dmi_raw", "=", "dmi_raw", ".", "split", "(", "'\\n'", ")", "log", ".", "debug", "(", "'Parsing handle {0}'", ".", "format", "(", "handle", ")", ")", "record", "=", "{", "'handle'", ":", "handle", ",", "'description'", ":", "dmi_raw", ".", "pop", "(", "0", ")", ".", "strip", "(", ")", ",", "'type'", ":", "int", "(", "htype", ")", "}", "if", "(", "not", "len", "(", "dmi_raw", ")", ")", ":", "if", "(", "not", "clean", ")", ":", "dmi", ".", "append", "(", "record", ")", "continue", "dmi_data", "=", "_dmi_data", "(", "dmi_raw", ",", "clean", ",", "fields", ")", "if", "len", "(", "dmi_data", ")", ":", "record", "[", "'data'", "]", "=", "dmi_data", "dmi", ".", "append", "(", "record", ")", "elif", "(", "not", "clean", ")", ":", "dmi", ".", "append", "(", "record", ")", "return", "dmi" ]
structurize dmi records into a nice list optionally trash bogus entries and filter output .
train
true
37,403
def set_signals(sig_handler_dict): for (s, h) in sig_handler_dict.items(): signal.signal(s, h)
[ "def", "set_signals", "(", "sig_handler_dict", ")", ":", "for", "(", "s", ",", "h", ")", "in", "sig_handler_dict", ".", "items", "(", ")", ":", "signal", ".", "signal", "(", "s", ",", "h", ")" ]
set the signal with the handler from the input dict .
train
false
37,404
def get_more(collection_name, num_to_return, cursor_id): data = _ZERO_32 data += bson._make_c_string(collection_name) data += struct.pack('<i', num_to_return) data += struct.pack('<q', cursor_id) return __pack_message(2005, data)
[ "def", "get_more", "(", "collection_name", ",", "num_to_return", ",", "cursor_id", ")", ":", "data", "=", "_ZERO_32", "data", "+=", "bson", ".", "_make_c_string", "(", "collection_name", ")", "data", "+=", "struct", ".", "pack", "(", "'<i'", ",", "num_to_return", ")", "data", "+=", "struct", ".", "pack", "(", "'<q'", ",", "cursor_id", ")", "return", "__pack_message", "(", "2005", ",", "data", ")" ]
get a **getmore** message .
train
true
37,405
def iter_mode(n, obj='ndarray'): for mode in cap[obj][MODE]: for char in fmtdict[mode]: (yield randitems(n, obj, mode, char))
[ "def", "iter_mode", "(", "n", ",", "obj", "=", "'ndarray'", ")", ":", "for", "mode", "in", "cap", "[", "obj", "]", "[", "MODE", "]", ":", "for", "char", "in", "fmtdict", "[", "mode", "]", ":", "(", "yield", "randitems", "(", "n", ",", "obj", ",", "mode", ",", "char", ")", ")" ]
iterate through supported mode/char combinations .
train
false
37,406
def generate_in_background(generator, num_cached=10): import Queue queue = Queue.Queue(maxsize=num_cached) sentinel = object() def producer(): for item in generator: queue.put(item) queue.put(sentinel) import threading thread = threading.Thread(target=producer) thread.daemon = True thread.start() item = queue.get() while (item is not sentinel): (yield item) item = queue.get()
[ "def", "generate_in_background", "(", "generator", ",", "num_cached", "=", "10", ")", ":", "import", "Queue", "queue", "=", "Queue", ".", "Queue", "(", "maxsize", "=", "num_cached", ")", "sentinel", "=", "object", "(", ")", "def", "producer", "(", ")", ":", "for", "item", "in", "generator", ":", "queue", ".", "put", "(", "item", ")", "queue", ".", "put", "(", "sentinel", ")", "import", "threading", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "producer", ")", "thread", ".", "daemon", "=", "True", "thread", ".", "start", "(", ")", "item", "=", "queue", ".", "get", "(", ")", "while", "(", "item", "is", "not", "sentinel", ")", ":", "(", "yield", "item", ")", "item", "=", "queue", ".", "get", "(", ")" ]
runs a generator in a background thread .
train
false
37,407
def volume_glance_metadata_copy_to_volume(context, volume_id, snapshot_id): return IMPL.volume_glance_metadata_copy_to_volume(context, volume_id, snapshot_id)
[ "def", "volume_glance_metadata_copy_to_volume", "(", "context", ",", "volume_id", ",", "snapshot_id", ")", ":", "return", "IMPL", ".", "volume_glance_metadata_copy_to_volume", "(", "context", ",", "volume_id", ",", "snapshot_id", ")" ]
update glance metadata from a volume .
train
false
37,408
def print_jscode(expr, **settings): print(jscode(expr, **settings))
[ "def", "print_jscode", "(", "expr", ",", "**", "settings", ")", ":", "print", "(", "jscode", "(", "expr", ",", "**", "settings", ")", ")" ]
prints the javascript representation of the given expression .
train
false
37,409
def _plot_update_epochs_proj(params, bools=None): if (bools is not None): inds = np.where(bools)[0] params['info']['projs'] = [copy.deepcopy(params['projs'][ii]) for ii in inds] params['proj_bools'] = bools (params['projector'], _) = setup_proj(params['info'], add_eeg_ref=False, verbose=False) start = int((params['t_start'] / len(params['epochs'].times))) n_epochs = params['n_epochs'] end = (start + n_epochs) data = np.concatenate(params['epochs'][start:end].get_data(), axis=1) if (params['projector'] is not None): data = np.dot(params['projector'], data) types = params['types'] for (pick, ind) in enumerate(params['inds']): params['data'][pick] = (data[ind] / params['scalings'][types[pick]]) params['plot_fun']()
[ "def", "_plot_update_epochs_proj", "(", "params", ",", "bools", "=", "None", ")", ":", "if", "(", "bools", "is", "not", "None", ")", ":", "inds", "=", "np", ".", "where", "(", "bools", ")", "[", "0", "]", "params", "[", "'info'", "]", "[", "'projs'", "]", "=", "[", "copy", ".", "deepcopy", "(", "params", "[", "'projs'", "]", "[", "ii", "]", ")", "for", "ii", "in", "inds", "]", "params", "[", "'proj_bools'", "]", "=", "bools", "(", "params", "[", "'projector'", "]", ",", "_", ")", "=", "setup_proj", "(", "params", "[", "'info'", "]", ",", "add_eeg_ref", "=", "False", ",", "verbose", "=", "False", ")", "start", "=", "int", "(", "(", "params", "[", "'t_start'", "]", "/", "len", "(", "params", "[", "'epochs'", "]", ".", "times", ")", ")", ")", "n_epochs", "=", "params", "[", "'n_epochs'", "]", "end", "=", "(", "start", "+", "n_epochs", ")", "data", "=", "np", ".", "concatenate", "(", "params", "[", "'epochs'", "]", "[", "start", ":", "end", "]", ".", "get_data", "(", ")", ",", "axis", "=", "1", ")", "if", "(", "params", "[", "'projector'", "]", "is", "not", "None", ")", ":", "data", "=", "np", ".", "dot", "(", "params", "[", "'projector'", "]", ",", "data", ")", "types", "=", "params", "[", "'types'", "]", "for", "(", "pick", ",", "ind", ")", "in", "enumerate", "(", "params", "[", "'inds'", "]", ")", ":", "params", "[", "'data'", "]", "[", "pick", "]", "=", "(", "data", "[", "ind", "]", "/", "params", "[", "'scalings'", "]", "[", "types", "[", "pick", "]", "]", ")", "params", "[", "'plot_fun'", "]", "(", ")" ]
deal with proj changed .
train
false
37,410
def _plot_unit_traces(ax, x, data, ci, color, err_kws, **kwargs): if isinstance(color, list): if ('alpha' not in err_kws): err_kws['alpha'] = 0.5 for (i, obs) in enumerate(data): ax.plot(x, obs, color=color[i], label='_nolegend_', **err_kws) else: if ('alpha' not in err_kws): err_kws['alpha'] = 0.2 ax.plot(x, data.T, color=color, label='_nolegend_', **err_kws)
[ "def", "_plot_unit_traces", "(", "ax", ",", "x", ",", "data", ",", "ci", ",", "color", ",", "err_kws", ",", "**", "kwargs", ")", ":", "if", "isinstance", "(", "color", ",", "list", ")", ":", "if", "(", "'alpha'", "not", "in", "err_kws", ")", ":", "err_kws", "[", "'alpha'", "]", "=", "0.5", "for", "(", "i", ",", "obs", ")", "in", "enumerate", "(", "data", ")", ":", "ax", ".", "plot", "(", "x", ",", "obs", ",", "color", "=", "color", "[", "i", "]", ",", "label", "=", "'_nolegend_'", ",", "**", "err_kws", ")", "else", ":", "if", "(", "'alpha'", "not", "in", "err_kws", ")", ":", "err_kws", "[", "'alpha'", "]", "=", "0.2", "ax", ".", "plot", "(", "x", ",", "data", ".", "T", ",", "color", "=", "color", ",", "label", "=", "'_nolegend_'", ",", "**", "err_kws", ")" ]
plot a trace for each observation in the original data .
train
false
37,411
def _get_deployment_config_file(): path = CONF.paste_deploy.config_file if (not path): path = _get_paste_config_path() if (not path): msg = (_('Unable to locate paste config file for %s.') % CONF.prog) raise RuntimeError(msg) return os.path.abspath(path)
[ "def", "_get_deployment_config_file", "(", ")", ":", "path", "=", "CONF", ".", "paste_deploy", ".", "config_file", "if", "(", "not", "path", ")", ":", "path", "=", "_get_paste_config_path", "(", ")", "if", "(", "not", "path", ")", ":", "msg", "=", "(", "_", "(", "'Unable to locate paste config file for %s.'", ")", "%", "CONF", ".", "prog", ")", "raise", "RuntimeError", "(", "msg", ")", "return", "os", ".", "path", ".", "abspath", "(", "path", ")" ]
retrieve the deployment_config_file config item .
train
false
37,414
def _get_first_sentence(s): x = re.match(u'.*?\\S\\.\\s', s) if (x is not None): s = x.group(0) return s.replace(u'\n', u' ')
[ "def", "_get_first_sentence", "(", "s", ")", ":", "x", "=", "re", ".", "match", "(", "u'.*?\\\\S\\\\.\\\\s'", ",", "s", ")", "if", "(", "x", "is", "not", "None", ")", ":", "s", "=", "x", ".", "group", "(", "0", ")", "return", "s", ".", "replace", "(", "u'\\n'", ",", "u' '", ")" ]
get the first sentence from a string and remove any carriage returns .
train
false
37,420
def keepdims_wrapper(a_callable): if ('keepdims' in getargspec(a_callable).args): return a_callable @wraps(a_callable) def keepdims_wrapped_callable(x, axis=None, keepdims=None, *args, **kwargs): r = a_callable(x, axis=axis, *args, **kwargs) if (not keepdims): return r axes = axis if (axes is None): axes = range(x.ndim) if (not isinstance(axes, (Container, Iterable, Sequence))): axes = [axes] r_slice = tuple() for each_axis in range(x.ndim): if (each_axis in axes): r_slice += (None,) else: r_slice += (slice(None),) r = r[r_slice] return r return keepdims_wrapped_callable
[ "def", "keepdims_wrapper", "(", "a_callable", ")", ":", "if", "(", "'keepdims'", "in", "getargspec", "(", "a_callable", ")", ".", "args", ")", ":", "return", "a_callable", "@", "wraps", "(", "a_callable", ")", "def", "keepdims_wrapped_callable", "(", "x", ",", "axis", "=", "None", ",", "keepdims", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "r", "=", "a_callable", "(", "x", ",", "axis", "=", "axis", ",", "*", "args", ",", "**", "kwargs", ")", "if", "(", "not", "keepdims", ")", ":", "return", "r", "axes", "=", "axis", "if", "(", "axes", "is", "None", ")", ":", "axes", "=", "range", "(", "x", ".", "ndim", ")", "if", "(", "not", "isinstance", "(", "axes", ",", "(", "Container", ",", "Iterable", ",", "Sequence", ")", ")", ")", ":", "axes", "=", "[", "axes", "]", "r_slice", "=", "tuple", "(", ")", "for", "each_axis", "in", "range", "(", "x", ".", "ndim", ")", ":", "if", "(", "each_axis", "in", "axes", ")", ":", "r_slice", "+=", "(", "None", ",", ")", "else", ":", "r_slice", "+=", "(", "slice", "(", "None", ")", ",", ")", "r", "=", "r", "[", "r_slice", "]", "return", "r", "return", "keepdims_wrapped_callable" ]
a wrapper for functions that dont provide keepdims to ensure that they do .
train
false
37,421
def test_getouterframes(): curr = inspect.currentframe() getouterframes(curr, context=0)
[ "def", "test_getouterframes", "(", ")", ":", "curr", "=", "inspect", ".", "currentframe", "(", ")", "getouterframes", "(", "curr", ",", "context", "=", "0", ")" ]
just test that this works .
train
false
37,423
def luhn(candidate): if (not isinstance(candidate, basestring)): candidate = str(candidate) try: evens = sum([int(c) for c in candidate[(-1)::(-2)]]) odds = sum([LUHN_ODD_LOOKUP[int(c)] for c in candidate[(-2)::(-2)]]) return (((evens + odds) % 10) == 0) except ValueError: return False
[ "def", "luhn", "(", "candidate", ")", ":", "if", "(", "not", "isinstance", "(", "candidate", ",", "basestring", ")", ")", ":", "candidate", "=", "str", "(", "candidate", ")", "try", ":", "evens", "=", "sum", "(", "[", "int", "(", "c", ")", "for", "c", "in", "candidate", "[", "(", "-", "1", ")", ":", ":", "(", "-", "2", ")", "]", "]", ")", "odds", "=", "sum", "(", "[", "LUHN_ODD_LOOKUP", "[", "int", "(", "c", ")", "]", "for", "c", "in", "candidate", "[", "(", "-", "2", ")", ":", ":", "(", "-", "2", ")", "]", "]", ")", "return", "(", "(", "(", "evens", "+", "odds", ")", "%", "10", ")", "==", "0", ")", "except", "ValueError", ":", "return", "False" ]
checks a candidate number for validity according to the luhn algorithm .
train
true
37,427
def num_questions(user): return Question.objects.filter(creator=user).count()
[ "def", "num_questions", "(", "user", ")", ":", "return", "Question", ".", "objects", ".", "filter", "(", "creator", "=", "user", ")", ".", "count", "(", ")" ]
returns the number of questions a user has .
train
false
37,428
def infer_context_name(*vars): todo = deque() todo.extendleft(vars) while todo: v = todo.pop() if isinstance(v.type, GpuArrayType): return v.type.context_name if hasattr(v.tag, 'context_name'): return v.tag.context_name if v.owner: if isinstance(v.owner.op, HostFromGpu): return v.owner.inputs[0].type.context_name if (len(v.owner.inputs) == 1): todo.extendleft(v.owner.inputs) try: get_context(None) return None except ContextNotDefined: raise ValueError('Could not infer context from inputs')
[ "def", "infer_context_name", "(", "*", "vars", ")", ":", "todo", "=", "deque", "(", ")", "todo", ".", "extendleft", "(", "vars", ")", "while", "todo", ":", "v", "=", "todo", ".", "pop", "(", ")", "if", "isinstance", "(", "v", ".", "type", ",", "GpuArrayType", ")", ":", "return", "v", ".", "type", ".", "context_name", "if", "hasattr", "(", "v", ".", "tag", ",", "'context_name'", ")", ":", "return", "v", ".", "tag", ".", "context_name", "if", "v", ".", "owner", ":", "if", "isinstance", "(", "v", ".", "owner", ".", "op", ",", "HostFromGpu", ")", ":", "return", "v", ".", "owner", ".", "inputs", "[", "0", "]", ".", "type", ".", "context_name", "if", "(", "len", "(", "v", ".", "owner", ".", "inputs", ")", "==", "1", ")", ":", "todo", ".", "extendleft", "(", "v", ".", "owner", ".", "inputs", ")", "try", ":", "get_context", "(", "None", ")", "return", "None", "except", "ContextNotDefined", ":", "raise", "ValueError", "(", "'Could not infer context from inputs'", ")" ]
infer the context name to use from the inputs given .
train
false
37,429
@cache_permission def can_vote_suggestion(user, translation): if (not translation.subproject.suggestion_voting): return False if translation.subproject.locked: return False project = translation.subproject.project if check_owner(user, project, 'trans.vote_suggestion'): return True if (not has_group_perm(user, 'trans.vote_suggestion', translation)): return False if (translation.is_template() and (not has_group_perm(user, 'trans.save_template', translation))): return False return True
[ "@", "cache_permission", "def", "can_vote_suggestion", "(", "user", ",", "translation", ")", ":", "if", "(", "not", "translation", ".", "subproject", ".", "suggestion_voting", ")", ":", "return", "False", "if", "translation", ".", "subproject", ".", "locked", ":", "return", "False", "project", "=", "translation", ".", "subproject", ".", "project", "if", "check_owner", "(", "user", ",", "project", ",", "'trans.vote_suggestion'", ")", ":", "return", "True", "if", "(", "not", "has_group_perm", "(", "user", ",", "'trans.vote_suggestion'", ",", "translation", ")", ")", ":", "return", "False", "if", "(", "translation", ".", "is_template", "(", ")", "and", "(", "not", "has_group_perm", "(", "user", ",", "'trans.save_template'", ",", "translation", ")", ")", ")", ":", "return", "False", "return", "True" ]
checks whether user can vote suggestions on given translation .
train
false
37,430
def cleanup_attached_vdis(session): this_vm_ref = _get_this_vm_ref(session) vbd_refs = session.call_xenapi('VM.get_VBDs', this_vm_ref) for vbd_ref in vbd_refs: try: vdi_ref = session.call_xenapi('VBD.get_VDI', vbd_ref) vdi_rec = session.call_xenapi('VDI.get_record', vdi_ref) except session.XenAPI.Failure as e: if (e.details[0] != 'HANDLE_INVALID'): raise continue if ('nova_instance_uuid' in vdi_rec['other_config']): LOG.info(_LI('Disconnecting stale VDI %s from compute domU'), vdi_rec['uuid']) unplug_vbd(session, vbd_ref, this_vm_ref) destroy_vbd(session, vbd_ref)
[ "def", "cleanup_attached_vdis", "(", "session", ")", ":", "this_vm_ref", "=", "_get_this_vm_ref", "(", "session", ")", "vbd_refs", "=", "session", ".", "call_xenapi", "(", "'VM.get_VBDs'", ",", "this_vm_ref", ")", "for", "vbd_ref", "in", "vbd_refs", ":", "try", ":", "vdi_ref", "=", "session", ".", "call_xenapi", "(", "'VBD.get_VDI'", ",", "vbd_ref", ")", "vdi_rec", "=", "session", ".", "call_xenapi", "(", "'VDI.get_record'", ",", "vdi_ref", ")", "except", "session", ".", "XenAPI", ".", "Failure", "as", "e", ":", "if", "(", "e", ".", "details", "[", "0", "]", "!=", "'HANDLE_INVALID'", ")", ":", "raise", "continue", "if", "(", "'nova_instance_uuid'", "in", "vdi_rec", "[", "'other_config'", "]", ")", ":", "LOG", ".", "info", "(", "_LI", "(", "'Disconnecting stale VDI %s from compute domU'", ")", ",", "vdi_rec", "[", "'uuid'", "]", ")", "unplug_vbd", "(", "session", ",", "vbd_ref", ",", "this_vm_ref", ")", "destroy_vbd", "(", "session", ",", "vbd_ref", ")" ]
unplug any instance vdis left after an unclean restart .
train
false
37,431
def get_node_lineage(node): lineage = [node._id] while node.parent_id: node = node.parent_node lineage = ([node._id] + lineage) return lineage
[ "def", "get_node_lineage", "(", "node", ")", ":", "lineage", "=", "[", "node", ".", "_id", "]", "while", "node", ".", "parent_id", ":", "node", "=", "node", ".", "parent_node", "lineage", "=", "(", "[", "node", ".", "_id", "]", "+", "lineage", ")", "return", "lineage" ]
get a list of node ids in order from the node to top most project e .
train
false
37,432
def get_machine_id(): locations = ['/etc/machine-id', '/var/lib/dbus/machine-id'] existing_locations = [loc for loc in locations if os.path.exists(loc)] if (not existing_locations): return {} else: with salt.utils.fopen(existing_locations[0]) as machineid: return {'machine_id': machineid.read().strip()}
[ "def", "get_machine_id", "(", ")", ":", "locations", "=", "[", "'/etc/machine-id'", ",", "'/var/lib/dbus/machine-id'", "]", "existing_locations", "=", "[", "loc", "for", "loc", "in", "locations", "if", "os", ".", "path", ".", "exists", "(", "loc", ")", "]", "if", "(", "not", "existing_locations", ")", ":", "return", "{", "}", "else", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "existing_locations", "[", "0", "]", ")", "as", "machineid", ":", "return", "{", "'machine_id'", ":", "machineid", ".", "read", "(", ")", ".", "strip", "(", ")", "}" ]
provide the machine-id .
train
false
37,434
def get_private_rsa_fingerprint(key_location=None, key_file_obj=None, passphrase=None): k = get_rsa_key(key_location=key_location, key_file_obj=key_file_obj, passphrase=passphrase, use_pycrypto=True) sha1digest = hashlib.sha1(k.exportKey('DER', pkcs=8)).hexdigest() fingerprint = insert_char_every_n_chars(sha1digest, ':', 2) key = (key_location or key_file_obj) log.debug(('rsa private key fingerprint (%s): %s' % (key, fingerprint))) return fingerprint
[ "def", "get_private_rsa_fingerprint", "(", "key_location", "=", "None", ",", "key_file_obj", "=", "None", ",", "passphrase", "=", "None", ")", ":", "k", "=", "get_rsa_key", "(", "key_location", "=", "key_location", ",", "key_file_obj", "=", "key_file_obj", ",", "passphrase", "=", "passphrase", ",", "use_pycrypto", "=", "True", ")", "sha1digest", "=", "hashlib", ".", "sha1", "(", "k", ".", "exportKey", "(", "'DER'", ",", "pkcs", "=", "8", ")", ")", ".", "hexdigest", "(", ")", "fingerprint", "=", "insert_char_every_n_chars", "(", "sha1digest", ",", "':'", ",", "2", ")", "key", "=", "(", "key_location", "or", "key_file_obj", ")", "log", ".", "debug", "(", "(", "'rsa private key fingerprint (%s): %s'", "%", "(", "key", ",", "fingerprint", ")", ")", ")", "return", "fingerprint" ]
returns the fingerprint of a private rsa key as a 59-character string .
train
false
37,437
def get_default_version_hostname(): return os.getenv('DEFAULT_VERSION_HOSTNAME')
[ "def", "get_default_version_hostname", "(", ")", ":", "return", "os", ".", "getenv", "(", "'DEFAULT_VERSION_HOSTNAME'", ")" ]
get the standard hostname of the default version of the app .
train
false
37,438
def removeQuotes(s, l, t): return t[0][1:(-1)]
[ "def", "removeQuotes", "(", "s", ",", "l", ",", "t", ")", ":", "return", "t", "[", "0", "]", "[", "1", ":", "(", "-", "1", ")", "]" ]
helper parse action for removing quotation marks from parsed quoted strings .
train
false
37,439
def _keep_alive(x, memo): try: memo[id(memo)].append(x) except KeyError: memo[id(memo)] = [x]
[ "def", "_keep_alive", "(", "x", ",", "memo", ")", ":", "try", ":", "memo", "[", "id", "(", "memo", ")", "]", ".", "append", "(", "x", ")", "except", "KeyError", ":", "memo", "[", "id", "(", "memo", ")", "]", "=", "[", "x", "]" ]
keeps a reference to the object x in the memo .
train
true
37,440
def es_analyzer_for_locale(locale, synonyms=False, fallback='standard'): if (locale in settings.ES_LOCALE_ANALYZERS): analyzer = settings.ES_LOCALE_ANALYZERS[locale] if (synonyms and (locale in config.ES_SYNONYM_LOCALES)): analyzer += '-synonyms' else: analyzer = fallback if ((not settings.ES_USE_PLUGINS) and (analyzer in settings.ES_PLUGIN_ANALYZERS)): analyzer = fallback return analyzer
[ "def", "es_analyzer_for_locale", "(", "locale", ",", "synonyms", "=", "False", ",", "fallback", "=", "'standard'", ")", ":", "if", "(", "locale", "in", "settings", ".", "ES_LOCALE_ANALYZERS", ")", ":", "analyzer", "=", "settings", ".", "ES_LOCALE_ANALYZERS", "[", "locale", "]", "if", "(", "synonyms", "and", "(", "locale", "in", "config", ".", "ES_SYNONYM_LOCALES", ")", ")", ":", "analyzer", "+=", "'-synonyms'", "else", ":", "analyzer", "=", "fallback", "if", "(", "(", "not", "settings", ".", "ES_USE_PLUGINS", ")", "and", "(", "analyzer", "in", "settings", ".", "ES_PLUGIN_ANALYZERS", ")", ")", ":", "analyzer", "=", "fallback", "return", "analyzer" ]
pick an appropriate analyzer for a given locale .
train
false
37,441
def _as_inexact(x): x = asarray(x) if (not np.issubdtype(x.dtype, np.inexact)): return asarray(x, dtype=np.float_) return x
[ "def", "_as_inexact", "(", "x", ")", ":", "x", "=", "asarray", "(", "x", ")", "if", "(", "not", "np", ".", "issubdtype", "(", "x", ".", "dtype", ",", "np", ".", "inexact", ")", ")", ":", "return", "asarray", "(", "x", ",", "dtype", "=", "np", ".", "float_", ")", "return", "x" ]
return x as an array .
train
false
37,442
def unpad(padded_data, block_size, style='pkcs7'): pdata_len = len(padded_data) if (pdata_len % block_size): raise ValueError('Input data is not padded') if (style in ('pkcs7', 'x923')): padding_len = bord(padded_data[(-1)]) if ((padding_len < 1) or (padding_len > min(block_size, pdata_len))): raise ValueError('Padding is incorrect.') if (style == 'pkcs7'): if (padded_data[(- padding_len):] != (bchr(padding_len) * padding_len)): raise ValueError('PKCS#7 padding is incorrect.') elif (padded_data[(- padding_len):(-1)] != (bchr(0) * (padding_len - 1))): raise ValueError('ANSI X.923 padding is incorrect.') elif (style == 'iso7816'): padding_len = (pdata_len - padded_data.rfind(bchr(128))) if ((padding_len < 1) or (padding_len > min(block_size, pdata_len))): raise ValueError('Padding is incorrect.') if ((padding_len > 1) and (padded_data[(1 - padding_len):] != (bchr(0) * (padding_len - 1)))): raise ValueError('ISO 7816-4 padding is incorrect.') else: raise ValueError('Unknown padding style') return padded_data[:(- padding_len)]
[ "def", "unpad", "(", "padded_data", ",", "block_size", ",", "style", "=", "'pkcs7'", ")", ":", "pdata_len", "=", "len", "(", "padded_data", ")", "if", "(", "pdata_len", "%", "block_size", ")", ":", "raise", "ValueError", "(", "'Input data is not padded'", ")", "if", "(", "style", "in", "(", "'pkcs7'", ",", "'x923'", ")", ")", ":", "padding_len", "=", "bord", "(", "padded_data", "[", "(", "-", "1", ")", "]", ")", "if", "(", "(", "padding_len", "<", "1", ")", "or", "(", "padding_len", ">", "min", "(", "block_size", ",", "pdata_len", ")", ")", ")", ":", "raise", "ValueError", "(", "'Padding is incorrect.'", ")", "if", "(", "style", "==", "'pkcs7'", ")", ":", "if", "(", "padded_data", "[", "(", "-", "padding_len", ")", ":", "]", "!=", "(", "bchr", "(", "padding_len", ")", "*", "padding_len", ")", ")", ":", "raise", "ValueError", "(", "'PKCS#7 padding is incorrect.'", ")", "elif", "(", "padded_data", "[", "(", "-", "padding_len", ")", ":", "(", "-", "1", ")", "]", "!=", "(", "bchr", "(", "0", ")", "*", "(", "padding_len", "-", "1", ")", ")", ")", ":", "raise", "ValueError", "(", "'ANSI X.923 padding is incorrect.'", ")", "elif", "(", "style", "==", "'iso7816'", ")", ":", "padding_len", "=", "(", "pdata_len", "-", "padded_data", ".", "rfind", "(", "bchr", "(", "128", ")", ")", ")", "if", "(", "(", "padding_len", "<", "1", ")", "or", "(", "padding_len", ">", "min", "(", "block_size", ",", "pdata_len", ")", ")", ")", ":", "raise", "ValueError", "(", "'Padding is incorrect.'", ")", "if", "(", "(", "padding_len", ">", "1", ")", "and", "(", "padded_data", "[", "(", "1", "-", "padding_len", ")", ":", "]", "!=", "(", "bchr", "(", "0", ")", "*", "(", "padding_len", "-", "1", ")", ")", ")", ")", ":", "raise", "ValueError", "(", "'ISO 7816-4 padding is incorrect.'", ")", "else", ":", "raise", "ValueError", "(", "'Unknown padding style'", ")", "return", "padded_data", "[", ":", "(", "-", "padding_len", ")", "]" ]
remove all padding from padded_data .
train
true
37,443
def ftp_makedirs_cwd(ftp, path, first_call=True): try: ftp.cwd(path) except error_perm: ftp_makedirs_cwd(ftp, dirname(path), False) ftp.mkd(path) if first_call: ftp.cwd(path)
[ "def", "ftp_makedirs_cwd", "(", "ftp", ",", "path", ",", "first_call", "=", "True", ")", ":", "try", ":", "ftp", ".", "cwd", "(", "path", ")", "except", "error_perm", ":", "ftp_makedirs_cwd", "(", "ftp", ",", "dirname", "(", "path", ")", ",", "False", ")", "ftp", ".", "mkd", "(", "path", ")", "if", "first_call", ":", "ftp", ".", "cwd", "(", "path", ")" ]
set the current directory of the ftp connection given in the ftp argument .
train
false
37,444
def emboss_convert(filename, old_format, new_format): cline = SeqretCommandline(exes['seqret'], sequence=filename, sformat=old_format, osformat=new_format, auto=True, stdout=True) child = subprocess.Popen(str(cline), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=(sys.platform != 'win32')) child.stdin.close() child.stderr.close() return child.stdout
[ "def", "emboss_convert", "(", "filename", ",", "old_format", ",", "new_format", ")", ":", "cline", "=", "SeqretCommandline", "(", "exes", "[", "'seqret'", "]", ",", "sequence", "=", "filename", ",", "sformat", "=", "old_format", ",", "osformat", "=", "new_format", ",", "auto", "=", "True", ",", "stdout", "=", "True", ")", "child", "=", "subprocess", ".", "Popen", "(", "str", "(", "cline", ")", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ",", "shell", "=", "(", "sys", ".", "platform", "!=", "'win32'", ")", ")", "child", ".", "stdin", ".", "close", "(", ")", "child", ".", "stderr", ".", "close", "(", ")", "return", "child", ".", "stdout" ]
run seqret .
train
false
37,445
def WriteToPreviewWindow(message): ClosePreviewWindow() OpenFileInPreviewWindow(vim.eval(u'tempname()')) if JumpToPreviewWindow(): vim.current.buffer.options[u'modifiable'] = True vim.current.buffer.options[u'readonly'] = False vim.current.buffer[:] = message.splitlines() vim.current.buffer.options[u'buftype'] = u'nofile' vim.current.buffer.options[u'bufhidden'] = u'wipe' vim.current.buffer.options[u'buflisted'] = False vim.current.buffer.options[u'swapfile'] = False vim.current.buffer.options[u'modifiable'] = False vim.current.buffer.options[u'readonly'] = True vim.current.buffer.options[u'modified'] = False JumpToPreviousWindow() else: PostVimMessage(message, warning=False)
[ "def", "WriteToPreviewWindow", "(", "message", ")", ":", "ClosePreviewWindow", "(", ")", "OpenFileInPreviewWindow", "(", "vim", ".", "eval", "(", "u'tempname()'", ")", ")", "if", "JumpToPreviewWindow", "(", ")", ":", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'modifiable'", "]", "=", "True", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'readonly'", "]", "=", "False", "vim", ".", "current", ".", "buffer", "[", ":", "]", "=", "message", ".", "splitlines", "(", ")", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'buftype'", "]", "=", "u'nofile'", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'bufhidden'", "]", "=", "u'wipe'", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'buflisted'", "]", "=", "False", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'swapfile'", "]", "=", "False", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'modifiable'", "]", "=", "False", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'readonly'", "]", "=", "True", "vim", ".", "current", ".", "buffer", ".", "options", "[", "u'modified'", "]", "=", "False", "JumpToPreviousWindow", "(", ")", "else", ":", "PostVimMessage", "(", "message", ",", "warning", "=", "False", ")" ]
display the supplied message in the preview window .
train
false
37,446
def _move_el_inside_block(el, tag): for child in el: if _contains_block_level_tag(child): break else: import sys children_tag = etree.Element(tag) children_tag.text = el.text el.text = None children_tag.extend(list(el)) el[:] = [children_tag] return for child in list(el): if _contains_block_level_tag(child): _move_el_inside_block(child, tag) if child.tail: tail_tag = etree.Element(tag) tail_tag.text = child.tail child.tail = None el.insert((el.index(child) + 1), tail_tag) else: child_tag = etree.Element(tag) el.replace(child, child_tag) child_tag.append(child) if el.text: text_tag = etree.Element(tag) text_tag.text = el.text el.text = None el.insert(0, text_tag)
[ "def", "_move_el_inside_block", "(", "el", ",", "tag", ")", ":", "for", "child", "in", "el", ":", "if", "_contains_block_level_tag", "(", "child", ")", ":", "break", "else", ":", "import", "sys", "children_tag", "=", "etree", ".", "Element", "(", "tag", ")", "children_tag", ".", "text", "=", "el", ".", "text", "el", ".", "text", "=", "None", "children_tag", ".", "extend", "(", "list", "(", "el", ")", ")", "el", "[", ":", "]", "=", "[", "children_tag", "]", "return", "for", "child", "in", "list", "(", "el", ")", ":", "if", "_contains_block_level_tag", "(", "child", ")", ":", "_move_el_inside_block", "(", "child", ",", "tag", ")", "if", "child", ".", "tail", ":", "tail_tag", "=", "etree", ".", "Element", "(", "tag", ")", "tail_tag", ".", "text", "=", "child", ".", "tail", "child", ".", "tail", "=", "None", "el", ".", "insert", "(", "(", "el", ".", "index", "(", "child", ")", "+", "1", ")", ",", "tail_tag", ")", "else", ":", "child_tag", "=", "etree", ".", "Element", "(", "tag", ")", "el", ".", "replace", "(", "child", ",", "child_tag", ")", "child_tag", ".", "append", "(", "child", ")", "if", "el", ".", "text", ":", "text_tag", "=", "etree", ".", "Element", "(", "tag", ")", "text_tag", ".", "text", "=", "el", ".", "text", "el", ".", "text", "=", "None", "el", ".", "insert", "(", "0", ",", "text_tag", ")" ]
helper for _fixup_ins_del_tags; actually takes the <ins> etc tags and moves them inside any block-level tags .
train
true
37,447
def p_suite(p): if (len(p) == 2): p[0] = ast.Stmt(p[1]) else: p[0] = ast.Stmt(p[3])
[ "def", "p_suite", "(", "p", ")", ":", "if", "(", "len", "(", "p", ")", "==", "2", ")", ":", "p", "[", "0", "]", "=", "ast", ".", "Stmt", "(", "p", "[", "1", "]", ")", "else", ":", "p", "[", "0", "]", "=", "ast", ".", "Stmt", "(", "p", "[", "3", "]", ")" ]
suite : simple_stmt | newline indent stmts dedent .
train
false
37,449
def get_body_barycentric(body, time, ephemeris=None): return _get_body_barycentric_posvel(body, time, ephemeris, get_velocity=False)
[ "def", "get_body_barycentric", "(", "body", ",", "time", ",", "ephemeris", "=", "None", ")", ":", "return", "_get_body_barycentric_posvel", "(", "body", ",", "time", ",", "ephemeris", ",", "get_velocity", "=", "False", ")" ]
calculate the barycentric position of a solar system body .
train
false
37,450
@profiler.trace @memoized_with_request(novaclient) def flavor_access_list(nova_api, flavor=None): return nova_api.flavor_access.list(flavor=flavor)
[ "@", "profiler", ".", "trace", "@", "memoized_with_request", "(", "novaclient", ")", "def", "flavor_access_list", "(", "nova_api", ",", "flavor", "=", "None", ")", ":", "return", "nova_api", ".", "flavor_access", ".", "list", "(", "flavor", "=", "flavor", ")" ]
get the list of access instance sizes .
train
false
37,451
def _check_response(response): if (response is None): raise RequestError('Request did not return a response.') elif ((response.status_code >= 500) or (response.status_code == TOO_MANY_REQUESTS)): raise BadStatusCodeError.from_response(response) elif response.retry_after: raise RetryAfterError.from_response(response)
[ "def", "_check_response", "(", "response", ")", ":", "if", "(", "response", "is", "None", ")", ":", "raise", "RequestError", "(", "'Request did not return a response.'", ")", "elif", "(", "(", "response", ".", "status_code", ">=", "500", ")", "or", "(", "response", ".", "status_code", "==", "TOO_MANY_REQUESTS", ")", ")", ":", "raise", "BadStatusCodeError", ".", "from_response", "(", "response", ")", "elif", "response", ".", "retry_after", ":", "raise", "RetryAfterError", ".", "from_response", "(", "response", ")" ]
validate a response :type response: :class:response .
train
false
37,452
def make_text_box(parent, width=0, height=0, hbar=0, vbar=1, fill=BOTH, expand=1, wrap=WORD, pack=1, class_=None, name=None, takefocus=None): (hbar, vbar, frame) = make_scrollbars(parent, hbar, vbar, pack, class_=class_, name=name, takefocus=takefocus) widget = Text(frame, wrap=wrap, name='text') if width: widget.config(width=width) if height: widget.config(height=height) widget.pack(expand=expand, fill=fill, side=LEFT) set_scroll_commands(widget, hbar, vbar) return (widget, frame)
[ "def", "make_text_box", "(", "parent", ",", "width", "=", "0", ",", "height", "=", "0", ",", "hbar", "=", "0", ",", "vbar", "=", "1", ",", "fill", "=", "BOTH", ",", "expand", "=", "1", ",", "wrap", "=", "WORD", ",", "pack", "=", "1", ",", "class_", "=", "None", ",", "name", "=", "None", ",", "takefocus", "=", "None", ")", ":", "(", "hbar", ",", "vbar", ",", "frame", ")", "=", "make_scrollbars", "(", "parent", ",", "hbar", ",", "vbar", ",", "pack", ",", "class_", "=", "class_", ",", "name", "=", "name", ",", "takefocus", "=", "takefocus", ")", "widget", "=", "Text", "(", "frame", ",", "wrap", "=", "wrap", ",", "name", "=", "'text'", ")", "if", "width", ":", "widget", ".", "config", "(", "width", "=", "width", ")", "if", "height", ":", "widget", ".", "config", "(", "height", "=", "height", ")", "widget", ".", "pack", "(", "expand", "=", "expand", ",", "fill", "=", "fill", ",", "side", "=", "LEFT", ")", "set_scroll_commands", "(", "widget", ",", "hbar", ",", "vbar", ")", "return", "(", "widget", ",", "frame", ")" ]
subroutine to create a text box .
train
false