id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
26,365
def cache_userdata(userterm, username, channel_id): userterm = ''.join([t.strip().lower() for t in userterm.split(' ')]) g.username_query_cache[userterm] = (username, channel_id) util.dbg('Cache data for username search query "{}": {} ({})'.format(userterm, username, channel_id)) while (len(g.username_query_cache) > 300): g.username_query_cache.popitem(last=False) return (username, channel_id)
[ "def", "cache_userdata", "(", "userterm", ",", "username", ",", "channel_id", ")", ":", "userterm", "=", "''", ".", "join", "(", "[", "t", ".", "strip", "(", ")", ".", "lower", "(", ")", "for", "t", "in", "userterm", ".", "split", "(", "' '", ")", ...
cache user name and channel id tuple .
train
false
26,366
def call_blink(*args, **kwargs): devices = _get_lights() pause = kwargs.get('pause', 0) res = dict() for dev_id in ((('id' not in kwargs) and sorted(devices.keys())) or _get_devices(kwargs)): state = devices[str(dev_id)]['state']['on'] _set(dev_id, ((state and Const.LAMP_OFF) or Const.LAMP_ON)) if pause: time.sleep(pause) res[dev_id] = _set(dev_id, (((not state) and Const.LAMP_OFF) or Const.LAMP_ON)) return res
[ "def", "call_blink", "(", "*", "args", ",", "**", "kwargs", ")", ":", "devices", "=", "_get_lights", "(", ")", "pause", "=", "kwargs", ".", "get", "(", "'pause'", ",", "0", ")", "res", "=", "dict", "(", ")", "for", "dev_id", "in", "(", "(", "(", ...
blink a lamp .
train
true
26,367
def package_installed(name, ignore_check=False, prevent_pending=False, image=None, restart=False): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} old = __salt__['dism.installed_packages']() package_info = __salt__['dism.package_info'](name) if (package_info['Package Identity'] in old): ret['comment'] = 'The package {0} is already installed: {1}'.format(name, package_info['Package Identity']) return ret if __opts__['test']: ret['changes']['package'] = '{0} will be installed'.format(name) ret['result'] = None return ret status = __salt__['dism.add_package'](name, ignore_check, prevent_pending, image, restart) if (status['retcode'] not in [0, 1641, 3010]): ret['comment'] = 'Failed to install {0}: {1}'.format(name, status['stdout']) ret['result'] = False new = __salt__['dism.installed_packages']() changes = salt.utils.compare_lists(old, new) if changes: ret['comment'] = 'Installed {0}'.format(name) ret['changes'] = status ret['changes']['package'] = changes return ret
[ "def", "package_installed", "(", "name", ",", "ignore_check", "=", "False", ",", "prevent_pending", "=", "False", ",", "image", "=", "None", ",", "restart", "=", "False", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", "...
install a package .
train
true
26,368
def datetime_to_epoch(dt): delta = (dt - epoch) since_epoch = delta.total_seconds() return since_epoch
[ "def", "datetime_to_epoch", "(", "dt", ")", ":", "delta", "=", "(", "dt", "-", "epoch", ")", "since_epoch", "=", "delta", ".", "total_seconds", "(", ")", "return", "since_epoch" ]
convert datetime object to epoch with millisecond accuracy .
train
false
26,369
def dhcp_option_def(msg_type): def f(cls): _dhcp_option_unpackers[msg_type] = cls.unpack cls.CODE = msg_type return cls return f
[ "def", "dhcp_option_def", "(", "msg_type", ")", ":", "def", "f", "(", "cls", ")", ":", "_dhcp_option_unpackers", "[", "msg_type", "]", "=", "cls", ".", "unpack", "cls", ".", "CODE", "=", "msg_type", "return", "cls", "return", "f" ]
dpcp option decorator .
train
false
26,372
def logout_required(redirect): def redirect_func(user): return user.is_authenticated() if hasattr(redirect, '__call__'): return user_access_decorator(redirect_func, redirect_field=None, redirect_url_func=(lambda : reverse('home')))(redirect) else: return user_access_decorator(redirect_func, redirect_field=None, redirect_url_func=(lambda : redirect))
[ "def", "logout_required", "(", "redirect", ")", ":", "def", "redirect_func", "(", "user", ")", ":", "return", "user", ".", "is_authenticated", "(", ")", "if", "hasattr", "(", "redirect", ",", "'__call__'", ")", ":", "return", "user_access_decorator", "(", "r...
requires that the user *not* be logged in .
train
false
26,373
def IsPathInSubdirectories(filename, subdirectories, normcase=os.path.normcase): file_dir = normcase(os.path.dirname(os.path.abspath(filename))) for parent in subdirectories: fixed_parent = normcase(os.path.abspath(parent)) if (os.path.commonprefix([file_dir, fixed_parent]) == fixed_parent): return True return False
[ "def", "IsPathInSubdirectories", "(", "filename", ",", "subdirectories", ",", "normcase", "=", "os", ".", "path", ".", "normcase", ")", ":", "file_dir", "=", "normcase", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(",...
determines if a filename is contained within one of a set of directories .
train
false
26,375
def p_enum_specifier_3(t): pass
[ "def", "p_enum_specifier_3", "(", "t", ")", ":", "pass" ]
enum_specifier : enum id .
train
false
26,377
@snippet def dataset_delete(client, _): DATASET_NAME = ('dataset_delete_%d' % (_millis(),)) dataset = client.dataset(DATASET_NAME) dataset.create() assert dataset.exists() dataset.delete() assert (not dataset.exists())
[ "@", "snippet", "def", "dataset_delete", "(", "client", ",", "_", ")", ":", "DATASET_NAME", "=", "(", "'dataset_delete_%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "dataset", "=", "client", ".", "dataset", "(", "DATASET_NAME", ")", "dataset", ".",...
delete a dataset .
train
false
26,378
def submit_proctored_exam_results_report(request, course_key, features): task_type = 'proctored_exam_results_report' task_class = proctored_exam_results_csv task_input = {'features': features} task_key = '' return submit_task(request, task_type, task_class, course_key, task_input, task_key)
[ "def", "submit_proctored_exam_results_report", "(", "request", ",", "course_key", ",", "features", ")", ":", "task_type", "=", "'proctored_exam_results_report'", "task_class", "=", "proctored_exam_results_csv", "task_input", "=", "{", "'features'", ":", "features", "}", ...
submits a task to generate a html file containing the executive summary report .
train
false
26,379
def patch_scenario_with_autoretry(scenario, max_attempts=3): def scenario_run_with_retries(scenario_run, *args, **kwargs): for attempt in range(1, (max_attempts + 1)): if (not scenario_run(*args, **kwargs)): if (attempt > 1): message = u'AUTO-RETRY SCENARIO PASSED (after {0} attempts)' print(message.format(attempt)) return False if (attempt < max_attempts): print(u'AUTO-RETRY SCENARIO (attempt {0})'.format(attempt)) message = u'AUTO-RETRY SCENARIO FAILED (after {0} attempts)' print(message.format(max_attempts)) return True if isinstance(scenario, ScenarioOutline): scenario_outline = scenario for scenario in scenario_outline.scenarios: scenario_run = scenario.run scenario.run = functools.partial(scenario_run_with_retries, scenario_run) else: scenario_run = scenario.run scenario.run = functools.partial(scenario_run_with_retries, scenario_run)
[ "def", "patch_scenario_with_autoretry", "(", "scenario", ",", "max_attempts", "=", "3", ")", ":", "def", "scenario_run_with_retries", "(", "scenario_run", ",", "*", "args", ",", "**", "kwargs", ")", ":", "for", "attempt", "in", "range", "(", "1", ",", "(", ...
monkey-patches :func:~behave .
train
false
26,381
def redefined_by_decorator(node): if node.decorators: for decorator in node.decorators.nodes: if (isinstance(decorator, astroid.Getattr) and (getattr(decorator.expr, 'name', None) == node.name)): return True return False
[ "def", "redefined_by_decorator", "(", "node", ")", ":", "if", "node", ".", "decorators", ":", "for", "decorator", "in", "node", ".", "decorators", ".", "nodes", ":", "if", "(", "isinstance", "(", "decorator", ",", "astroid", ".", "Getattr", ")", "and", "...
return true if the object is a method redefined via decorator .
train
true
26,382
def function_exists(FunctionName, region=None, key=None, keyid=None, profile=None): try: func = _find_function(FunctionName, region=region, key=key, keyid=keyid, profile=profile) return {'exists': bool(func)} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "function_exists", "(", "FunctionName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "func", "=", "_find_function", "(", "FunctionName", ",", "region", "=", "r...
given a function name .
train
false
26,383
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
26,384
def test_suggestions(keyhint, key_config_stub): key_config_stub.set_bindings_for('normal', OrderedDict([('aa', 'cmd-aa'), ('ab', 'cmd-ab'), ('aba', 'cmd-aba'), ('abb', 'cmd-abb'), ('xd', 'cmd-xd'), ('xe', 'cmd-xe')])) keyhint.update_keyhint('normal', 'a') assert (keyhint.text() == expected_text(('a', 'yellow', 'a', 'cmd-aa'), ('a', 'yellow', 'b', 'cmd-ab'), ('a', 'yellow', 'ba', 'cmd-aba'), ('a', 'yellow', 'bb', 'cmd-abb')))
[ "def", "test_suggestions", "(", "keyhint", ",", "key_config_stub", ")", ":", "key_config_stub", ".", "set_bindings_for", "(", "'normal'", ",", "OrderedDict", "(", "[", "(", "'aa'", ",", "'cmd-aa'", ")", ",", "(", "'ab'", ",", "'cmd-ab'", ")", ",", "(", "'a...
test that keyhints are shown based on a prefix .
train
false
26,385
def sh3(cmd): p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) (out, err) = p.communicate() retcode = p.returncode if retcode: raise CalledProcessError(retcode, cmd) else: return (out.rstrip(), err.rstrip())
[ "def", "sh3", "(", "cmd", ")", ":", "p", "=", "Popen", "(", "cmd", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "True", ")", "(", "out", ",", "err", ")", "=", "p", ".", "communicate", "(", ")", "retcode", "=", "p...
execute command in a subshell .
train
true
26,386
def _list_nodes_full(location=None): provider = (__active_provider_name__ or 'ec2') if (':' in provider): comps = provider.split(':') provider = comps[0] params = {'Action': 'DescribeInstances'} instances = aws.query(params, location=location, provider=provider, opts=__opts__, sigver='4') if ('error' in instances): raise SaltCloudSystemExit('An error occurred while listing nodes: {0}'.format(instances['error']['Errors']['Error']['Message'])) ret = _extract_instance_info(instances) __utils__['cloud.cache_node_list'](ret, provider, __opts__) return ret
[ "def", "_list_nodes_full", "(", "location", "=", "None", ")", ":", "provider", "=", "(", "__active_provider_name__", "or", "'ec2'", ")", "if", "(", "':'", "in", "provider", ")", ":", "comps", "=", "provider", ".", "split", "(", "':'", ")", "provider", "=...
return a list of the vms that in this location .
train
true
26,387
def performance(registry, xml_parent, data): perf = XML.SubElement(xml_parent, 'hudson.plugins.performance.PerformancePublisher') perf.set('plugin', 'performance') types = ['ART', 'MRT', 'PRT'] mappings = [('failed-threshold', 'errorFailedThreshold', 0), ('unstable-threshold', 'errorUnstableThreshold', 0), ('unstable-response-time-threshold', 'errorUnstableResponseTimeThreshold', ''), ('failed-threshold-positive', 'relativeFailedThresholdPositive', '0.0'), ('failed-threshold-negative', 'relativeFailedThresholdNegative', '0.0'), ('unstable-threshold-positive', 'relativeUnstableThresholdPositive', '0.0'), ('unstable-threshold-negative', 'relativeUnstableThresholdNegative', '0.0'), ('nth-build-number', 'nthBuildNumber', 0), ('mode-relative-thresholds', 'modeRelativeThresholds', False), ('config-type', 'configType', 'ART', types), ('mode-of-threshold', 'modeOfThreshold', False), ('fail-build', 'failBuildIfNoResultFile', False), ('compare-build-previous', 'compareBuildPrevious', False), ('mode-performance-per-test-case', 'modePerformancePerTestCase', True), ('mode-thoughput', 'modeThroughput', False)] helpers.convert_mapping_to_xml(perf, data, mappings, fail_required=True) parsers = XML.SubElement(perf, 'parsers') if ('report' in data): for item in data['report']: if isinstance(item, dict): item_name = next(iter(item.keys())) item_values = item.get(item_name, None) if (item_name == 'jmeter'): jmhold = XML.SubElement(parsers, 'hudson.plugins.performance.JMeterParser') XML.SubElement(jmhold, 'glob').text = str(item_values) elif (item_name == 'junit'): juhold = XML.SubElement(parsers, 'hudson.plugins.performance.JUnitParser') XML.SubElement(juhold, 'glob').text = str(item_values) else: raise JenkinsJobsException('You have not specified jmeter or junit, or you have incorrectly assigned the key value.') elif isinstance(item, str): if (item == 'jmeter'): jmhold = XML.SubElement(parsers, 'hudson.plugins.performance.JMeterParser') XML.SubElement(jmhold, 'glob').text = '**/*.jtl' elif (item == 'junit'): juhold = XML.SubElement(parsers, 'hudson.plugins.performance.JUnitParser') XML.SubElement(juhold, 'glob').text = '**/TEST-*.xml' else: raise JenkinsJobsException('You have not specified jmeter or junit, or you have incorrectly assigned the key value.')
[ "def", "performance", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "perf", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.performance.PerformancePublisher'", ")", "perf", ".", "set", "(", "'plugin'", ",", "'performance'", "...
returns the performance of a partition .
train
false
26,388
def _where(filename, dirs=[], env='PATH'): if (not isinstance(dirs, list)): dirs = [dirs] if glob(filename): return filename paths = (([os.curdir] + os.environ[env].split(os.path.pathsep)) + dirs) for path in paths: for match in glob(os.path.join(path, filename)): if match: return os.path.normpath(match) raise IOError(('File not found: %s' % filename))
[ "def", "_where", "(", "filename", ",", "dirs", "=", "[", "]", ",", "env", "=", "'PATH'", ")", ":", "if", "(", "not", "isinstance", "(", "dirs", ",", "list", ")", ")", ":", "dirs", "=", "[", "dirs", "]", "if", "glob", "(", "filename", ")", ":", ...
find file in current dir or system path .
train
true
26,389
def _netstat_route_linux(): ret = [] cmd = 'netstat -A inet -rn | tail -n+3' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() ret.append({'addr_family': 'inet', 'destination': comps[0], 'gateway': comps[1], 'netmask': comps[2], 'flags': comps[3], 'interface': comps[7]}) cmd = 'netstat -A inet6 -rn | tail -n+3' out = __salt__['cmd.run'](cmd, python_shell=True) for line in out.splitlines(): comps = line.split() if (len(comps) == 6): ret.append({'addr_family': 'inet6', 'destination': comps[0], 'gateway': comps[1], 'netmask': '', 'flags': comps[3], 'interface': comps[5]}) elif (len(comps) == 7): ret.append({'addr_family': 'inet6', 'destination': comps[0], 'gateway': comps[1], 'netmask': '', 'flags': comps[3], 'interface': comps[6]}) else: continue return ret
[ "def", "_netstat_route_linux", "(", ")", ":", "ret", "=", "[", "]", "cmd", "=", "'netstat -A inet -rn | tail -n+3'", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "True", ")", "for", "line", "in", "out", ".", "splitli...
return netstat routing information for linux distros .
train
true
26,390
def system(): try: import platform print 'System : ', platform.system() print 'OS version : ', platform.version() print 'Python version :', platform.python_version() try: from cv2 import __version__ print ('Open CV version : ' + __version__) except ImportError: print ('Open CV2 version : ' + '2.1') if PIL_ENABLED: print 'PIL version : ', pil.VERSION else: print 'PIL module not installed' if ORANGE_ENABLED: print ('Orange Version : ' + orange.version) else: print 'Orange module not installed' try: import pygame as pg print ('PyGame Version : ' + pg.__version__) except ImportError: print 'PyGame module not installed' try: import pickle print ('Pickle Version : ' + pickle.__version__) except: print 'Pickle module not installed' except ImportError: print 'You need to install Platform to use this function' print 'to install you can use:' print 'easy_install platform' return
[ "def", "system", "(", ")", ":", "try", ":", "import", "platform", "print", "'System : '", ",", "platform", ".", "system", "(", ")", "print", "'OS version : '", ",", "platform", ".", "version", "(", ")", "print", "'Python version :'", ",", "platform", ".", ...
returns the system/os name .
train
false
26,391
@config.change_filter('aliases', function=True) def _update_aliases(): update([usertypes.Completion.command])
[ "@", "config", ".", "change_filter", "(", "'aliases'", ",", "function", "=", "True", ")", "def", "_update_aliases", "(", ")", ":", "update", "(", "[", "usertypes", ".", "Completion", ".", "command", "]", ")" ]
update completions that include command aliases .
train
false
26,393
def get_id(opts, cache_minion_id=False): if (opts['root_dir'] is None): root_dir = salt.syspaths.ROOT_DIR else: root_dir = opts['root_dir'] config_dir = salt.syspaths.CONFIG_DIR if config_dir.startswith(salt.syspaths.ROOT_DIR): config_dir = config_dir.split(salt.syspaths.ROOT_DIR, 1)[(-1)] id_cache = os.path.join(root_dir, config_dir.lstrip(os.path.sep), 'minion_id') if opts.get('minion_id_caching', True): try: with salt.utils.fopen(id_cache) as idf: name = idf.readline().strip() bname = salt.utils.to_bytes(name) if bname.startswith(codecs.BOM): name = salt.utils.to_str(bname.replace(codecs.BOM, '', 1)) if (name and (name != 'localhost')): log.debug('Using cached minion ID from {0}: {1}'.format(id_cache, name)) return (name, False) except (IOError, OSError): pass if (('__role' in opts) and (opts.get('__role') == 'minion')): log.debug('Guessing ID. The id can be explicitly set in {0}'.format(os.path.join(salt.syspaths.CONFIG_DIR, 'minion'))) newid = salt.utils.network.generate_minion_id() if (('__role' in opts) and (opts.get('__role') == 'minion')): log.debug('Found minion id from generate_minion_id(): {0}'.format(newid)) if (cache_minion_id and opts.get('minion_id_caching', True)): _cache_id(newid, id_cache) is_ipv4 = salt.utils.network.is_ipv4(newid) return (newid, is_ipv4)
[ "def", "get_id", "(", "opts", ",", "cache_minion_id", "=", "False", ")", ":", "if", "(", "opts", "[", "'root_dir'", "]", "is", "None", ")", ":", "root_dir", "=", "salt", ".", "syspaths", ".", "ROOT_DIR", "else", ":", "root_dir", "=", "opts", "[", "'r...
given vpc properties .
train
true
26,394
def key_id_or_name_as_string_n(index): def transform_function(key): id_or_name = _key_id_or_name_n(key, index) if (not id_or_name): return u'' return unicode(id_or_name) return transform_function
[ "def", "key_id_or_name_as_string_n", "(", "index", ")", ":", "def", "transform_function", "(", "key", ")", ":", "id_or_name", "=", "_key_id_or_name_n", "(", "key", ",", "index", ")", "if", "(", "not", "id_or_name", ")", ":", "return", "u''", "return", "unico...
pull out the nth key id or name from a key which has parents .
train
false
26,395
def is_interactive(): return _is_interactive
[ "def", "is_interactive", "(", ")", ":", "return", "_is_interactive" ]
return true if plot mode is interactive .
train
false
26,396
def is_shutting_down(): with __shutdown_mutex: shutting_down = __shuting_down return shutting_down
[ "def", "is_shutting_down", "(", ")", ":", "with", "__shutdown_mutex", ":", "shutting_down", "=", "__shuting_down", "return", "shutting_down" ]
returns true if the server is shutting down .
train
false
26,397
def jsbeautify(javascript): if (not HAVE_JSBEAUTIFIER): return javascript with _jsbeautify_lock: (origout, sys.stdout) = (sys.stdout, StringIO()) javascript = jsbeautifier.beautify(javascript) if (sys.stdout.getvalue() not in _jsbeautify_blacklist): log.warning('jsbeautifier returned error: %s', sys.stdout.getvalue()) sys.stdout = origout return javascript
[ "def", "jsbeautify", "(", "javascript", ")", ":", "if", "(", "not", "HAVE_JSBEAUTIFIER", ")", ":", "return", "javascript", "with", "_jsbeautify_lock", ":", "(", "origout", ",", "sys", ".", "stdout", ")", "=", "(", "sys", ".", "stdout", ",", "StringIO", "...
beautifies javascript through jsbeautifier and ignore some messages .
train
false
26,398
def emails_with_users_and_watches(subject, text_template, html_template, context_vars, users_and_watches, from_email=settings.TIDINGS_FROM_ADDRESS, default_locale=settings.WIKI_DEFAULT_LANGUAGE, **extra_kwargs): @safe_translation def _make_mail(locale, user, watch): context_vars['user'] = user context_vars['watch'] = watch[0] context_vars['watches'] = watch msg = EmailMultiAlternatives((subject % context_vars), render_email(text_template, context_vars), from_email, [user.email], **extra_kwargs) if html_template: msg.attach_alternative(render_email(html_template, context_vars), 'text/html') return msg for (user, watch) in users_and_watches: if hasattr(user, 'locale'): locale = user.locale else: locale = default_locale (yield _make_mail(locale, user, watch))
[ "def", "emails_with_users_and_watches", "(", "subject", ",", "text_template", ",", "html_template", ",", "context_vars", ",", "users_and_watches", ",", "from_email", "=", "settings", ".", "TIDINGS_FROM_ADDRESS", ",", "default_locale", "=", "settings", ".", "WIKI_DEFAULT...
return iterable of emailmessages with user and watch values substituted .
train
false
26,400
def get_slotname(slot, host=None, admin_username=None, admin_password=None): slots = list_slotnames(host=host, admin_username=admin_username, admin_password=admin_password) slot = str(slot) return slots[slot]['slotname']
[ "def", "get_slotname", "(", "slot", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "slots", "=", "list_slotnames", "(", "host", "=", "host", ",", "admin_username", "=", "admin_username", ",", "ad...
get the name of a slot number in the chassis .
train
false
26,401
def find_item_at(scene, pos, order=Qt.DescendingOrder, type=None, name=None): items = scene.items(pos, Qt.IntersectsItemShape, order) for item in items: if ((type is not None) and (not isinstance(item, type))): continue if ((name is not None) and isinstance(item, QObject) and (item.objectName() != name)): continue return item else: return None
[ "def", "find_item_at", "(", "scene", ",", "pos", ",", "order", "=", "Qt", ".", "DescendingOrder", ",", "type", "=", "None", ",", "name", "=", "None", ")", ":", "items", "=", "scene", ".", "items", "(", "pos", ",", "Qt", ".", "IntersectsItemShape", ",...
find an object in a :class:qgraphicsscene scene at pos .
train
false
26,404
def undeploy(jboss_config, deployment): log.debug('======================== MODULE FUNCTION: jboss7.undeploy, deployment=%s', deployment) command = 'undeploy {deployment} '.format(deployment=deployment) return __salt__['jboss7_cli.run_command'](jboss_config, command)
[ "def", "undeploy", "(", "jboss_config", ",", "deployment", ")", ":", "log", ".", "debug", "(", "'======================== MODULE FUNCTION: jboss7.undeploy, deployment=%s'", ",", "deployment", ")", "command", "=", "'undeploy {deployment} '", ".", "format", "(", "deployment...
undeploy a webapp app the webapp context path url : URL the url of the server manager webapp timeout : 180 timeout for http request cli examples: .
train
true
26,405
def dictOf(key, value): return Dict(ZeroOrMore(Group((key + value))))
[ "def", "dictOf", "(", "key", ",", "value", ")", ":", "return", "Dict", "(", "ZeroOrMore", "(", "Group", "(", "(", "key", "+", "value", ")", ")", ")", ")" ]
helper to easily and clearly define a dictionary by specifying the respective patterns for the key and value .
train
false
26,406
def _parse_task_syslog(lines): result = {} for record in _parse_hadoop_log4j_records(lines): message = record['message'] m = _OPENING_FOR_READING_RE.match(message) if m: result['split'] = dict(path=m.group('path')) continue m = _YARN_INPUT_SPLIT_RE.match(message) if m: result['split'] = dict(path=m.group('path'), start_line=int(m.group('start_line')), num_lines=int(m.group('num_lines'))) continue m = _JAVA_TRACEBACK_RE.search(message) if m: result['hadoop_error'] = dict(message=message, num_lines=record['num_lines'], start_line=record['start_line']) break if ((record['logger'] == _SPARK_APP_MASTER_LOGGER) and (record['level'] == 'ERROR')): m = _SPARK_APP_EXITED_RE.match(message) if m: result['hadoop_error'] = dict(message=message, num_lines=record['num_lines'], start_line=record['start_line']) result['check_stdout'] = True break return result
[ "def", "_parse_task_syslog", "(", "lines", ")", ":", "result", "=", "{", "}", "for", "record", "in", "_parse_hadoop_log4j_records", "(", "lines", ")", ":", "message", "=", "record", "[", "'message'", "]", "m", "=", "_OPENING_FOR_READING_RE", ".", "match", "(...
parse an error out of a syslog file .
train
false
26,407
def loopbackTLSConnectionInMemory(trustRoot, privateKey, serverCertificate, clientProtocols=None, serverProtocols=None, clientOptions=None): if (clientOptions is None): clientOptions = sslverify.OpenSSLCertificateOptions clientCertOpts = clientOptions(trustRoot=trustRoot, acceptableProtocols=clientProtocols) serverCertOpts = sslverify.OpenSSLCertificateOptions(privateKey=privateKey, certificate=serverCertificate, acceptableProtocols=serverProtocols) return _loopbackTLSConnection(serverCertOpts, clientCertOpts)
[ "def", "loopbackTLSConnectionInMemory", "(", "trustRoot", ",", "privateKey", ",", "serverCertificate", ",", "clientProtocols", "=", "None", ",", "serverProtocols", "=", "None", ",", "clientOptions", "=", "None", ")", ":", "if", "(", "clientOptions", "is", "None", ...
create a loopback tls connection with the given trust and keys .
train
false
26,408
def shared_cluster_internal(): global _shared_cluster if (_shared_cluster is None): _shared_cluster = MiniHadoopCluster() _shared_cluster.start() atexit.register(_shared_cluster.stop) return _shared_cluster
[ "def", "shared_cluster_internal", "(", ")", ":", "global", "_shared_cluster", "if", "(", "_shared_cluster", "is", "None", ")", ":", "_shared_cluster", "=", "MiniHadoopCluster", "(", ")", "_shared_cluster", ".", "start", "(", ")", "atexit", ".", "register", "(", ...
manages _shared_cluster .
train
false
26,410
def get_installed_tool_shed_repository(app, id): rval = [] if isinstance(id, list): return_list = True else: id = [id] return_list = False for i in id: rval.append(app.install_model.context.query(app.install_model.ToolShedRepository).get(app.security.decode_id(i))) if return_list: return rval return rval[0]
[ "def", "get_installed_tool_shed_repository", "(", "app", ",", "id", ")", ":", "rval", "=", "[", "]", "if", "isinstance", "(", "id", ",", "list", ")", ":", "return_list", "=", "True", "else", ":", "id", "=", "[", "id", "]", "return_list", "=", "False", ...
get a tool shed repository record from the galaxy database defined by the id .
train
false
26,411
def lcd(path): return _change_cwd('lcwd', path)
[ "def", "lcd", "(", "path", ")", ":", "return", "_change_cwd", "(", "'lcwd'", ",", "path", ")" ]
context manager for updating local current working directory .
train
false
26,414
@logic.validate(ckan.logic.schema.default_pagination_schema) def dashboard_activity_list_html(context, data_dict): activity_stream = dashboard_activity_list(context, data_dict) model = context['model'] user_id = context['user'] offset = data_dict.get('offset', 0) extra_vars = {'controller': 'user', 'action': 'dashboard', 'offset': offset, 'id': user_id} return activity_streams.activity_list_to_html(context, activity_stream, extra_vars)
[ "@", "logic", ".", "validate", "(", "ckan", ".", "logic", ".", "schema", ".", "default_pagination_schema", ")", "def", "dashboard_activity_list_html", "(", "context", ",", "data_dict", ")", ":", "activity_stream", "=", "dashboard_activity_list", "(", "context", ",...
return the authorized users dashboard activity stream as html .
train
false
26,415
def id_to_name(config, short_name): for (k, v) in list(config.items()): if (v.get('id') == short_name): return k break else: raise Exception('No provider with id={0} found in the config!'.format(short_name))
[ "def", "id_to_name", "(", "config", ",", "short_name", ")", ":", "for", "(", "k", ",", "v", ")", "in", "list", "(", "config", ".", "items", "(", ")", ")", ":", "if", "(", "v", ".", "get", "(", "'id'", ")", "==", "short_name", ")", ":", "return"...
returns the provider :doc:config key based on its id value .
train
true
26,416
def arbitrary_state_transformation(deployment_state): uuid = uuid4() return deployment_state.transform(['nodes', uuid], NodeState(uuid=uuid, hostname=u'catcatdog'))
[ "def", "arbitrary_state_transformation", "(", "deployment_state", ")", ":", "uuid", "=", "uuid4", "(", ")", "return", "deployment_state", ".", "transform", "(", "[", "'nodes'", ",", "uuid", "]", ",", "NodeState", "(", "uuid", "=", "uuid", ",", "hostname", "=...
make some change to a deployment state .
train
false
26,417
def getsebool(boolean): return list_sebool().get(boolean, {})
[ "def", "getsebool", "(", "boolean", ")", ":", "return", "list_sebool", "(", ")", ".", "get", "(", "boolean", ",", "{", "}", ")" ]
return the information on a specific selinux boolean cli example: .
train
false
26,419
def test_wrap_long_type(): wrapped = wrap_value(long_type(0)) assert (type(wrapped) == HyInteger)
[ "def", "test_wrap_long_type", "(", ")", ":", "wrapped", "=", "wrap_value", "(", "long_type", "(", "0", ")", ")", "assert", "(", "type", "(", "wrapped", ")", "==", "HyInteger", ")" ]
test conversion of integers .
train
false
26,420
def _random_range(**kwargs): min_inclusive = kwargs.pop('min_inclusive', None) max_inclusive = kwargs.pop('max_inclusive', None) max_exclusive = kwargs.pop('max_exclusive', None) randfunc = kwargs.pop('randfunc', None) if kwargs: raise ValueError(('Unknown keywords: ' + str(kwargs.keys))) if (None not in (max_inclusive, max_exclusive)): raise ValueError('max_inclusive and max_exclusive cannot be both specified') if (max_exclusive is not None): max_inclusive = (max_exclusive - 1) if (None in (min_inclusive, max_inclusive)): raise ValueError('Missing keyword to identify the interval') if (randfunc is None): randfunc = Random.new().read norm_maximum = (max_inclusive - min_inclusive) bits_needed = Integer(norm_maximum).size_in_bits() norm_candidate = (-1) while (not (0 <= norm_candidate <= norm_maximum)): norm_candidate = _random(max_bits=bits_needed, randfunc=randfunc) return (norm_candidate + min_inclusive)
[ "def", "_random_range", "(", "**", "kwargs", ")", ":", "min_inclusive", "=", "kwargs", ".", "pop", "(", "'min_inclusive'", ",", "None", ")", "max_inclusive", "=", "kwargs", ".", "pop", "(", "'max_inclusive'", ",", "None", ")", "max_exclusive", "=", "kwargs",...
generate a random integer within a given internal .
train
false
26,421
def delete_multi(blob_keys, **options): fut = delete_multi_async(blob_keys, **options) fut.get_result()
[ "def", "delete_multi", "(", "blob_keys", ",", "**", "options", ")", ":", "fut", "=", "delete_multi_async", "(", "blob_keys", ",", "**", "options", ")", "fut", ".", "get_result", "(", ")" ]
deletes a sequence of keys .
train
false
26,423
@handle_response_format @treeio_login_required def mlist_edit(request, mlist_id, response_format='html'): user = request.user.profile mlist = get_object_or_404(MailingList, pk=mlist_id) if (not user.has_permission(mlist, mode='w')): return user_denied(request, message="You don't have access to this Mailing List", response_format=response_format) context = _get_default_context(request) context.update({'mlist': mlist}) return render_to_response('messaging/mlist_edit', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "mlist_edit", "(", "request", ",", "mlist_id", ",", "response_format", "=", "'html'", ")", ":", "user", "=", "request", ".", "user", ".", "profile", "mlist", "=", "get_object_or_404", "(", "Mail...
mailinglist edit page .
train
false
26,424
def test_meta_comments(): t = ascii.read(['#comment1', '# comment2 DCTB ', 'a,b,c', '1,2,3']) assert (t.colnames == ['a', 'b', 'c']) assert (t.meta['comments'] == ['comment1', 'comment2'])
[ "def", "test_meta_comments", "(", ")", ":", "t", "=", "ascii", ".", "read", "(", "[", "'#comment1'", ",", "'# comment2 DCTB '", ",", "'a,b,c'", ",", "'1,2,3'", "]", ")", "assert", "(", "t", ".", "colnames", "==", "[", "'a'", ",", "'b'", ",", "'c'", ...
make sure that line comments are included in the meta attribute of the output table .
train
false
26,425
def test_prefilter(): pairs = [('2+2', '2+2')] for (raw, correct) in pairs: nt.assert_equal(ip.prefilter(raw), correct)
[ "def", "test_prefilter", "(", ")", ":", "pairs", "=", "[", "(", "'2+2'", ",", "'2+2'", ")", "]", "for", "(", "raw", ",", "correct", ")", "in", "pairs", ":", "nt", ".", "assert_equal", "(", "ip", ".", "prefilter", "(", "raw", ")", ",", "correct", ...
test user input conversions .
train
false
26,426
def get_interface_addresses(): try: output = subprocess.check_output('ifconfig') except subprocess.CalledProcessError: return (['127.0.0.1'], []) v4 = re.findall('inet (addr:)?([^ ]*)', output) v6 = re.findall('inet6 (addr: )?([^ ]*)', output) v4 = [e[1] for e in v4] v6 = [e[1] for e in v6] return (v4, v6)
[ "def", "get_interface_addresses", "(", ")", ":", "try", ":", "output", "=", "subprocess", ".", "check_output", "(", "'ifconfig'", ")", "except", "subprocess", ".", "CalledProcessError", ":", "return", "(", "[", "'127.0.0.1'", "]", ",", "[", "]", ")", "v4", ...
get all ip addresses assigned to interfaces .
train
false
26,427
def notifier_program(): return wf().datafile(u'Notify.app/Contents/MacOS/applet')
[ "def", "notifier_program", "(", ")", ":", "return", "wf", "(", ")", ".", "datafile", "(", "u'Notify.app/Contents/MacOS/applet'", ")" ]
return path to notifier applet executable .
train
false
26,428
def propagate_check_resource(cnxt, rpc_client, next_res_id, current_traversal, predecessors, sender_key, sender_data, is_update, adopt_stack_data): def do_check(entity_id, data): rpc_client.check_resource(cnxt, entity_id, current_traversal, data, is_update, adopt_stack_data) sync_point.sync(cnxt, next_res_id, current_traversal, is_update, do_check, predecessors, {sender_key: sender_data})
[ "def", "propagate_check_resource", "(", "cnxt", ",", "rpc_client", ",", "next_res_id", ",", "current_traversal", ",", "predecessors", ",", "sender_key", ",", "sender_data", ",", "is_update", ",", "adopt_stack_data", ")", ":", "def", "do_check", "(", "entity_id", "...
trigger processing of node if all of its dependencies are satisfied .
train
false
26,429
def _SkipVarint(buffer, pos, end): while (ord(buffer[pos]) & 128): pos += 1 pos += 1 if (pos > end): raise _DecodeError('Truncated message.') return pos
[ "def", "_SkipVarint", "(", "buffer", ",", "pos", ",", "end", ")", ":", "while", "(", "ord", "(", "buffer", "[", "pos", "]", ")", "&", "128", ")", ":", "pos", "+=", "1", "pos", "+=", "1", "if", "(", "pos", ">", "end", ")", ":", "raise", "_Deco...
skip a varint value .
train
true
26,430
def get_issues_list(project, auth=False, **params): params.setdefault('state', 'closed') url = 'https://api.github.com/repos/{project}/issues'.format(project=project) if auth: headers = make_auth_header() else: headers = None pages = get_paged_request(url, headers=headers, **params) return pages
[ "def", "get_issues_list", "(", "project", ",", "auth", "=", "False", ",", "**", "params", ")", ":", "params", ".", "setdefault", "(", "'state'", ",", "'closed'", ")", "url", "=", "'https://api.github.com/repos/{project}/issues'", ".", "format", "(", "project", ...
get issues list .
train
true
26,431
def _pick_leadfield(leadfield, forward, ch_names): picks_fwd = pick_channels(forward['sol']['row_names'], ch_names) return leadfield[picks_fwd]
[ "def", "_pick_leadfield", "(", "leadfield", ",", "forward", ",", "ch_names", ")", ":", "picks_fwd", "=", "pick_channels", "(", "forward", "[", "'sol'", "]", "[", "'row_names'", "]", ",", "ch_names", ")", "return", "leadfield", "[", "picks_fwd", "]" ]
pick out correct lead field components .
train
false
26,432
def getTransformedVector3Blindly(tetragrid, vector3): return Vector3(getTransformedByList(tetragrid[0], vector3), getTransformedByList(tetragrid[1], vector3), getTransformedByList(tetragrid[2], vector3))
[ "def", "getTransformedVector3Blindly", "(", "tetragrid", ",", "vector3", ")", ":", "return", "Vector3", "(", "getTransformedByList", "(", "tetragrid", "[", "0", "]", ",", "vector3", ")", ",", "getTransformedByList", "(", "tetragrid", "[", "1", "]", ",", "vecto...
get the vector3 multiplied by a tetragrid without checking if the tetragrid exists .
train
false
26,433
def _sub_labels(axis, subs=()): fmt = axis.get_minor_formatter() minor_tlocs = axis.get_minorticklocs() fmt.set_locs(minor_tlocs) coefs = (minor_tlocs / (10 ** np.floor(np.log10(minor_tlocs)))) label_expected = [(np.round(c) in subs) for c in coefs] label_test = [(fmt(x) != u'') for x in minor_tlocs] assert (label_test == label_expected)
[ "def", "_sub_labels", "(", "axis", ",", "subs", "=", "(", ")", ")", ":", "fmt", "=", "axis", ".", "get_minor_formatter", "(", ")", "minor_tlocs", "=", "axis", ".", "get_minorticklocs", "(", ")", "fmt", ".", "set_locs", "(", "minor_tlocs", ")", "coefs", ...
test whether locator marks subs to be labeled .
train
false
26,434
def CentralMoment(xs, k): mean = RawMoment(xs, 1) return (sum((((x - mean) ** k) for x in xs)) / len(xs))
[ "def", "CentralMoment", "(", "xs", ",", "k", ")", ":", "mean", "=", "RawMoment", "(", "xs", ",", "1", ")", "return", "(", "sum", "(", "(", "(", "(", "x", "-", "mean", ")", "**", "k", ")", "for", "x", "in", "xs", ")", ")", "/", "len", "(", ...
computes the kth central moment of xs .
train
false
26,438
def _hungarian(cost_matrix): state = _HungarianState(cost_matrix) step = (None if (0 in cost_matrix.shape) else _step1) while (step is not None): step = step(state) results = np.array(np.where((state.marked == 1))).T if state.transposed: results = results[:, ::(-1)] return results
[ "def", "_hungarian", "(", "cost_matrix", ")", ":", "state", "=", "_HungarianState", "(", "cost_matrix", ")", "step", "=", "(", "None", "if", "(", "0", "in", "cost_matrix", ".", "shape", ")", "else", "_step1", ")", "while", "(", "step", "is", "not", "No...
the hungarian algorithm .
train
false
26,439
def mminfo(source): return MMFile.info(source)
[ "def", "mminfo", "(", "source", ")", ":", "return", "MMFile", ".", "info", "(", "source", ")" ]
return size and storage parameters from matrix market file-like source .
train
false
26,440
def human_size(num, precision=3): return (human_num(num, precision) + u'B')
[ "def", "human_size", "(", "num", ",", "precision", "=", "3", ")", ":", "return", "(", "human_num", "(", "num", ",", "precision", ")", "+", "u'B'", ")" ]
convert bytes length to a human-readable version .
train
false
26,442
def ensuredir(path): try: os.makedirs(path) except OSError as err: if (not (err.errno == 17)): raise
[ "def", "ensuredir", "(", "path", ")", ":", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", "as", "err", ":", "if", "(", "not", "(", "err", ".", "errno", "==", "17", ")", ")", ":", "raise" ]
ensure that a path exists .
train
false
26,443
@pytest.mark.parametrize('resource_name', ['messages', 'drafts', 'files', 'events', 'folders', 'labels', 'calendars', 'contacts']) def test_resource_views(resource_name, db, api_client, generic_account, message, thread, event, label, contact, folder): if (resource_name == 'folders'): api_client = new_api_client(db, generic_account.namespace) elements = api_client.get_data('/{}'.format(resource_name)) count = api_client.get_data('/{}?view=count'.format(resource_name)) assert (count['count'] == len(elements)) ids = api_client.get_data('/{}?view=ids'.format(resource_name)) for (i, elem) in enumerate(elements): assert isinstance(ids[i], basestring), '&views=ids should return string' assert (elem['id'] == ids[i]), 'view=ids should preserve order'
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'resource_name'", ",", "[", "'messages'", ",", "'drafts'", ",", "'files'", ",", "'events'", ",", "'folders'", ",", "'labels'", ",", "'calendars'", ",", "'contacts'", "]", ")", "def", "test_resource_views", ...
exercises various tests for views .
train
false
26,444
def mobile_test(f): @wraps(f) def wrapper(self, *args, **kw): MobileTest._mobile_init(self) return f(self, *args, **kw) return wrapper
[ "def", "mobile_test", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kw", ")", ":", "MobileTest", ".", "_mobile_init", "(", "self", ")", "return", "f", "(", "self", ",", "*", "args", ...
test decorator for hitting mobile views .
train
false
26,445
def check_tuple_assignments(evaluator, types, name): for (index, node) in name.assignment_indexes(): iterated = iterable.py__iter__(evaluator, types, node) for _ in range((index + 1)): try: types = next(iterated) except StopIteration: types = set() break return types
[ "def", "check_tuple_assignments", "(", "evaluator", ",", "types", ",", "name", ")", ":", "for", "(", "index", ",", "node", ")", "in", "name", ".", "assignment_indexes", "(", ")", ":", "iterated", "=", "iterable", ".", "py__iter__", "(", "evaluator", ",", ...
checks if tuples are assigned .
train
false
26,446
def set_es_key(lookup_dict, term, value): (value_dict, value_key) = _find_es_dict_by_key(lookup_dict, term) if (value_dict is not None): value_dict[value_key] = value return True return False
[ "def", "set_es_key", "(", "lookup_dict", ",", "term", ",", "value", ")", ":", "(", "value_dict", ",", "value_key", ")", "=", "_find_es_dict_by_key", "(", "lookup_dict", ",", "term", ")", "if", "(", "value_dict", "is", "not", "None", ")", ":", "value_dict",...
looks up the location that the term maps to and sets it to the given value .
train
false
26,448
def transaction_before_request(): if view_has_annotation(NO_AUTO_TRANSACTION_ATTR): return None ctx = _request_ctx_stack.top atomic = transaction.atomic() atomic.__enter__() ctx.current_atomic = atomic
[ "def", "transaction_before_request", "(", ")", ":", "if", "view_has_annotation", "(", "NO_AUTO_TRANSACTION_ATTR", ")", ":", "return", "None", "ctx", "=", "_request_ctx_stack", ".", "top", "atomic", "=", "transaction", ".", "atomic", "(", ")", "atomic", ".", "__e...
setup transaction before handling the request .
train
false
26,449
def grouped_correlation_row_generator(bt, pmf, category, gc_to_samples): data = array([i for i in bt.iter_data(axis='observation')]) category_values = gc_to_samples.keys() samples = gc_to_samples.values() sample_inds = [[bt.index(i, axis='sample') for i in group] for group in samples] try: md_vals = [] for grp in samples: md_vals.append(array([pmf[s][category] for s in grp], dtype=float)) except ValueError: raise ValueError("Couldn't convert sample metadata to float.") otu_vals = [data.take(inds, 1) for inds in sample_inds] return (category_values, md_vals, otu_vals)
[ "def", "grouped_correlation_row_generator", "(", "bt", ",", "pmf", ",", "category", ",", "gc_to_samples", ")", ":", "data", "=", "array", "(", "[", "i", "for", "i", "in", "bt", ".", "iter_data", "(", "axis", "=", "'observation'", ")", "]", ")", "category...
create generator for grouped correlation tests .
train
false
26,450
def sinh(x): return Sinh()(x)
[ "def", "sinh", "(", "x", ")", ":", "return", "Sinh", "(", ")", "(", "x", ")" ]
elementwise hyperbolic sine function .
train
false
26,452
def get_networkid(vm_): networkid = config.get_cloud_config_value('networkid', vm_, __opts__) if (networkid is not None): return networkid else: return False
[ "def", "get_networkid", "(", "vm_", ")", ":", "networkid", "=", "config", ".", "get_cloud_config_value", "(", "'networkid'", ",", "vm_", ",", "__opts__", ")", "if", "(", "networkid", "is", "not", "None", ")", ":", "return", "networkid", "else", ":", "retur...
return the networkid to use .
train
true
26,453
def generate(bits, randfunc=None, e=65537): if (bits < 1024): raise ValueError('RSA modulus length must be >= 1024') if (((e % 2) == 0) or (e < 3)): raise ValueError('RSA public exponent must be a positive, odd integer larger than 2.') if (randfunc is None): randfunc = Random.get_random_bytes d = n = Integer(1) e = Integer(e) while ((n.size_in_bits() != bits) and (d < (1 << (bits // 2)))): size_q = (bits // 2) size_p = (bits - size_q) min_p = min_q = (Integer(1) << ((2 * size_q) - 1)).sqrt() if (size_q != size_p): min_p = (Integer(1) << ((2 * size_p) - 1)).sqrt() def filter_p(candidate): return ((candidate > min_p) and ((candidate - 1).gcd(e) == 1)) p = generate_probable_prime(exact_bits=size_p, randfunc=randfunc, prime_filter=filter_p) min_distance = (Integer(1) << ((bits // 2) - 100)) def filter_q(candidate): return ((candidate > min_q) and ((candidate - 1).gcd(e) == 1) and (abs((candidate - p)) > min_distance)) q = generate_probable_prime(exact_bits=size_q, randfunc=randfunc, prime_filter=filter_q) n = (p * q) lcm = (p - 1).lcm((q - 1)) d = e.inverse(lcm) if (p > q): (p, q) = (q, p) u = p.inverse(q) return RsaKey(n=n, e=e, d=d, p=p, q=q, u=u)
[ "def", "generate", "(", "bits", ",", "randfunc", "=", "None", ",", "e", "=", "65537", ")", ":", "if", "(", "bits", "<", "1024", ")", ":", "raise", "ValueError", "(", "'RSA modulus length must be >= 1024'", ")", "if", "(", "(", "(", "e", "%", "2", ")"...
generate a new private key on the given curve .
train
false
26,454
def add_dicts_by_key(in_dict1, in_dict2): both = {} for key1 in in_dict1: for key2 in in_dict2: if (key1 == key2): both[key1] = (in_dict1[key1] + in_dict2[key2]) return both
[ "def", "add_dicts_by_key", "(", "in_dict1", ",", "in_dict2", ")", ":", "both", "=", "{", "}", "for", "key1", "in", "in_dict1", ":", "for", "key2", "in", "in_dict2", ":", "if", "(", "key1", "==", "key2", ")", ":", "both", "[", "key1", "]", "=", "(",...
combines two dictionaries and adds the values for those keys that are shared .
train
false
26,455
def _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete): for (state, state_dict, mapper, connection, update_version_id) in states_to_delete: if (table not in mapper._pks_by_table): continue params = {} for col in mapper._pks_by_table[table]: params[col.key] = value = mapper._get_committed_state_attr_by_column(state, state_dict, col) if (value is None): raise orm_exc.FlushError(("Can't delete from table %s using NULL for primary key value on column %s" % (table, col))) if ((update_version_id is not None) and (mapper.version_id_col in mapper._cols_by_table[table])): params[mapper.version_id_col.key] = update_version_id (yield (params, connection))
[ "def", "_collect_delete_commands", "(", "base_mapper", ",", "uowtransaction", ",", "table", ",", "states_to_delete", ")", ":", "for", "(", "state", ",", "state_dict", ",", "mapper", ",", "connection", ",", "update_version_id", ")", "in", "states_to_delete", ":", ...
identify values to use in delete statements for a list of states to be deleted .
train
false
26,457
def enableLoggingErrorDebugging(): print >>sys.stderr, 'WARNING' print >>sys.stderr, ('WARNING: nupic.support.enableLoggingErrorDebugging() was called to install a debugging patch into all logging handlers that will cause the program to fail if a logging exception occurrs; this call is for debugging only and MUST be removed before checking in code into production system. Caller: %s' % (traceback.format_stack(),)) print >>sys.stderr, 'WARNING' def handleErrorPatch(*args, **kwargs): if logging.raiseExceptions: raise for handler in logging._handlerList: handler.handleError = handleErrorPatch return
[ "def", "enableLoggingErrorDebugging", "(", ")", ":", "print", ">>", "sys", ".", "stderr", ",", "'WARNING'", "print", ">>", "sys", ".", "stderr", ",", "(", "'WARNING: nupic.support.enableLoggingErrorDebugging() was called to install a debugging patch into all logging handlers th...
overrides the python logging facilitys handler .
train
false
26,459
def parse_docstring(docstring): ret = {} ret['full'] = docstring regex = '([ \\t]*):depends:[ \\t]+- (\\w+)[^\\n]*\\n(\\1[ \\t]+- (\\w+)[^\\n]*\\n)*' match = re.search(regex, docstring, re.M) if match: deps = [] regex = '- (\\w+)' for line in match.group(0).strip().splitlines(): deps.append(re.search(regex, line).group(1)) ret['deps'] = deps return ret else: txt = 'Required python modules: ' data = docstring.splitlines() dep_list = list((x for x in data if x.strip().startswith(txt))) if (not dep_list): ret['deps'] = [] return ret deps = dep_list[0].replace(txt, '').strip().split(', ') ret['deps'] = deps return ret
[ "def", "parse_docstring", "(", "docstring", ")", ":", "ret", "=", "{", "}", "ret", "[", "'full'", "]", "=", "docstring", "regex", "=", "'([ \\\\t]*):depends:[ \\\\t]+- (\\\\w+)[^\\\\n]*\\\\n(\\\\1[ \\\\t]+- (\\\\w+)[^\\\\n]*\\\\n)*'", "match", "=", "re", ".", "search", ...
parse out the parts of a docstring .
train
true
26,460
def _check_load_paths(load_path): if ((load_path is None) or (not isinstance(load_path, six.string_types))): return None _paths = [] for _path in load_path.split(':'): if (os.path.isabs(_path) and os.path.isdir(_path)): _paths.append(_path) else: log.info('Invalid augeas_cfg load_path entry: %s removed', _path) if (len(_paths) == 0): return None return ':'.join(_paths)
[ "def", "_check_load_paths", "(", "load_path", ")", ":", "if", "(", "(", "load_path", "is", "None", ")", "or", "(", "not", "isinstance", "(", "load_path", ",", "six", ".", "string_types", ")", ")", ")", ":", "return", "None", "_paths", "=", "[", "]", ...
checks the validity of the load_path .
train
true
26,461
def prepare_disks(job, fs_desc, disk1_only=False, disk_list=None): if (not fs_desc): return (None, '', None) if (not isinstance(fs_desc, partition.FsOptions)): fs_desc = _legacy_str_to_test_flags(fs_desc) if (not disk_list): disk_list = get_disk_list() mkfs_bin = ('mkfs.' + fs_desc.fstype) if (fs_desc.fstype == 'ext4'): mkfs_bin = 'mkfs.ext4dev' try: utils.system(('which ' + mkfs_bin)) except Exception: try: mkfs_bin = os.path.join(job.toolsdir, mkfs_bin) utils.system(('cp -ufp %s /sbin' % mkfs_bin)) except Exception: raise error.TestError(('No mkfs binary available for ' + fs_desc.fstype)) if (fs_desc.fstype == 'ext4'): fs_desc.mkfs_flags += ' -E test_fs' if disk1_only: disk_list = disk_list[0:1] mkfs_all_disks(job, disk_list, fs_desc.fstype, fs_desc.mkfs_flags, fs_desc.mount_options) return (disk_list[0]['mountpt'], fs_desc.fs_tag, disk_list)
[ "def", "prepare_disks", "(", "job", ",", "fs_desc", ",", "disk1_only", "=", "False", ",", "disk_list", "=", "None", ")", ":", "if", "(", "not", "fs_desc", ")", ":", "return", "(", "None", ",", "''", ",", "None", ")", "if", "(", "not", "isinstance", ...
prepare drive(s) to contain the file system type / options given in the description line fs_desc .
train
false
26,464
def date_to_datetime(x): if ((not isinstance(x, datetime)) and isinstance(x, date)): return datetime.combine(x, time()) return x
[ "def", "date_to_datetime", "(", "x", ")", ":", "if", "(", "(", "not", "isinstance", "(", "x", ",", "datetime", ")", ")", "and", "isinstance", "(", "x", ",", "date", ")", ")", ":", "return", "datetime", ".", "combine", "(", "x", ",", "time", "(", ...
convert a date into a datetime .
train
true
26,466
@testing.requires_testing_data def test_io_trans(): tempdir = _TempDir() os.mkdir(op.join(tempdir, 'sample')) assert_raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir) trans0 = read_trans(fname) fname1 = op.join(tempdir, 'sample', 'test-trans.fif') trans0.save(fname1) assert_true((fname1 == _find_trans('sample', subjects_dir=tempdir))) trans1 = read_trans(fname1) assert_true((trans0['from'] == trans1['from'])) assert_true((trans0['to'] == trans1['to'])) assert_array_equal(trans0['trans'], trans1['trans']) assert_raises(IOError, read_trans, fname_eve) with warnings.catch_warnings(record=True) as w: fname2 = op.join(tempdir, 'trans-test-bad-name.fif') write_trans(fname2, trans0) assert_naming(w, 'test_transforms.py', 1)
[ "@", "testing", ".", "requires_testing_data", "def", "test_io_trans", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "os", ".", "mkdir", "(", "op", ".", "join", "(", "tempdir", ",", "'sample'", ")", ")", "assert_raises", "(", "RuntimeError", ",", "...
test reading and writing of trans files .
train
false
26,467
def get_webshells(extension, force_extension=False): return _get_file_list('webshell', extension, force_extension)
[ "def", "get_webshells", "(", "extension", ",", "force_extension", "=", "False", ")", ":", "return", "_get_file_list", "(", "'webshell'", ",", "extension", ",", "force_extension", ")" ]
this method returns a webshell content to be used in exploits .
train
false
26,469
@snippet def topic_delete(client, to_delete): TOPIC_NAME = ('topic_delete-%d' % (_millis(),)) topic = client.topic(TOPIC_NAME) topic.create() assert topic.exists() topic.delete() assert (not topic.exists())
[ "@", "snippet", "def", "topic_delete", "(", "client", ",", "to_delete", ")", ":", "TOPIC_NAME", "=", "(", "'topic_delete-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "topic", "=", "client", ".", "topic", "(", "TOPIC_NAME", ")", "topic", ".", "cr...
delete a topic .
train
false
26,470
def getAllowedReturnURLs(relying_party_url): (rp_url_after_redirects, return_to_urls) = services.getServiceEndpoints(relying_party_url, _extractReturnURL) if (rp_url_after_redirects != relying_party_url): raise RealmVerificationRedirected(relying_party_url, rp_url_after_redirects) return return_to_urls
[ "def", "getAllowedReturnURLs", "(", "relying_party_url", ")", ":", "(", "rp_url_after_redirects", ",", "return_to_urls", ")", "=", "services", ".", "getServiceEndpoints", "(", "relying_party_url", ",", "_extractReturnURL", ")", "if", "(", "rp_url_after_redirects", "!=",...
given a relying party discovery url return a list of return_to urls .
train
true
26,471
def get_pydoc_html(module): doc = pydoc.HTMLDoc() output = doc.docmodule(module) loc = (doc.getdocloc(pydoc_mod) or '') if loc: loc = (('<br><a href="' + loc) + '">Module Docs</a>') return (output.strip(), loc)
[ "def", "get_pydoc_html", "(", "module", ")", ":", "doc", "=", "pydoc", ".", "HTMLDoc", "(", ")", "output", "=", "doc", ".", "docmodule", "(", "module", ")", "loc", "=", "(", "doc", ".", "getdocloc", "(", "pydoc_mod", ")", "or", "''", ")", "if", "lo...
returns pydoc generated output as html .
train
false
26,472
def parse_quantifier(source, info, ch): q = _QUANTIFIERS.get(ch) if q: return q if (ch == '{'): counts = parse_limited_quantifier(source) if counts: return counts return None
[ "def", "parse_quantifier", "(", "source", ",", "info", ",", "ch", ")", ":", "q", "=", "_QUANTIFIERS", ".", "get", "(", "ch", ")", "if", "q", ":", "return", "q", "if", "(", "ch", "==", "'{'", ")", ":", "counts", "=", "parse_limited_quantifier", "(", ...
parses a quantifier .
train
false
26,474
def importfile(path): magic = imp.get_magic() file = open(path, 'r') if (file.read(len(magic)) == magic): kind = imp.PY_COMPILED else: kind = imp.PY_SOURCE file.close() filename = os.path.basename(path) (name, ext) = os.path.splitext(filename) file = open(path, 'r') try: module = imp.load_module(name, file, path, (ext, 'r', kind)) except: raise ErrorDuringImport(path, sys.exc_info()) file.close() return module
[ "def", "importfile", "(", "path", ")", ":", "magic", "=", "imp", ".", "get_magic", "(", ")", "file", "=", "open", "(", "path", ",", "'r'", ")", "if", "(", "file", ".", "read", "(", "len", "(", "magic", ")", ")", "==", "magic", ")", ":", "kind",...
import a python source file or compiled file given its path .
train
false
26,475
@register.filter @stringfilter def pretty_print_issue_status(status): return BaseComment.issue_status_to_string(status)
[ "@", "register", ".", "filter", "@", "stringfilter", "def", "pretty_print_issue_status", "(", "status", ")", ":", "return", "BaseComment", ".", "issue_status_to_string", "(", "status", ")" ]
turns an issue status code into a human-readable status string .
train
false
26,476
def _wait_for_async(conn, request_id): count = 0 log.debug('Waiting for asynchronous operation to complete') result = conn.get_operation_status(request_id) while (result.status == 'InProgress'): count = (count + 1) if (count > 120): raise ValueError('Timed out waiting for async operation to complete.') time.sleep(5) result = conn.get_operation_status(request_id) if (result.status != 'Succeeded'): raise AzureException('Operation failed. {message} ({code})'.format(message=result.error.message, code=result.error.code))
[ "def", "_wait_for_async", "(", "conn", ",", "request_id", ")", ":", "count", "=", "0", "log", ".", "debug", "(", "'Waiting for asynchronous operation to complete'", ")", "result", "=", "conn", ".", "get_operation_status", "(", "request_id", ")", "while", "(", "r...
helper function for azure tests .
train
true
26,477
def make_idx_data_cv(revs, word_idx_map, cv, max_l=51, k=300, filter_h=5): (train, test) = ([], []) for rev in revs: sent = get_idx_from_sent(rev['text'], word_idx_map, max_l, k, filter_h) sent.append(rev['y']) if (rev['split'] == cv): test.append(sent) else: train.append(sent) train = np.array(train, dtype='int') test = np.array(test, dtype='int') return [train, test]
[ "def", "make_idx_data_cv", "(", "revs", ",", "word_idx_map", ",", "cv", ",", "max_l", "=", "51", ",", "k", "=", "300", ",", "filter_h", "=", "5", ")", ":", "(", "train", ",", "test", ")", "=", "(", "[", "]", ",", "[", "]", ")", "for", "rev", ...
transforms sentences into a 2-d matrix .
train
false
26,478
def _listdir_local(path, kwargs): request = file_service_pb.ListDirRequest() response = file_service_pb.ListDirResponse() request.set_path(path) if (kwargs and kwargs.has_key('marker')): request.set_marker(kwargs['marker']) if (kwargs and kwargs.has_key('max-keys')): request.set_max_keys(kwargs['max-keys']) if (kwargs and kwargs.has_key('prefix')): request.set_prefix(kwargs['prefix']) files._make_call('ListDir', request, response) return response.filenames_list()
[ "def", "_listdir_local", "(", "path", ",", "kwargs", ")", ":", "request", "=", "file_service_pb", ".", "ListDirRequest", "(", ")", "response", "=", "file_service_pb", ".", "ListDirResponse", "(", ")", "request", ".", "set_path", "(", "path", ")", "if", "(", ...
dev app server version of listdir .
train
false
26,479
def funshion_get_title_by_vid(vid): html = get_content('http://pv.funshion.com/v5/video/profile?id={vid}&cl=aphone&uc=5'.format(vid=vid)) c = json.loads(html) return c['name']
[ "def", "funshion_get_title_by_vid", "(", "vid", ")", ":", "html", "=", "get_content", "(", "'http://pv.funshion.com/v5/video/profile?id={vid}&cl=aphone&uc=5'", ".", "format", "(", "vid", "=", "vid", ")", ")", "c", "=", "json", ".", "loads", "(", "html", ")", "re...
vid->str single video vid to title .
train
false
26,480
def _reshape(arg, shape): if isinstance(arg, tuple): raise TypeError('Composite batches not supported.') assert (not isinstance(arg, list)) if isinstance(arg, (np.ndarray, theano.tensor.TensorVariable)): return arg.reshape(shape) elif isinstance(arg, theano.sparse.SparseVariable): warnings.warn('Using pylearn2.space._reshape(), which is a memory-inefficient hack for reshaping sparse tensors. Do not use this on large tensors. This will eventually be replaced by a proper Theano Op for sparse reshaping, once that is written.') dense = theano.sparse.dense_from_sparse(arg) dense = dense.reshape(shape) if (arg.format == 'csr'): return theano.sparse.csr_from_dense(dense) elif (arg.format == 'csc'): return theano.sparse.csc_from_dense(dense) else: raise ValueError(('Unexpected sparse format "%s".' % arg.format)) else: raise TypeError(('Unexpected batch type "%s"' % str(type(arg))))
[ "def", "_reshape", "(", "arg", ",", "shape", ")", ":", "if", "isinstance", "(", "arg", ",", "tuple", ")", ":", "raise", "TypeError", "(", "'Composite batches not supported.'", ")", "assert", "(", "not", "isinstance", "(", "arg", ",", "list", ")", ")", "i...
reshapes a tensor .
train
false
26,483
def intersperse(string, counts, separator=''): (left, rest, right) = intersperse_pat.match(string).groups() def reverse(s): return s[::(-1)] splits = split(reverse(rest), counts) res = separator.join(map(reverse, reverse(splits))) return (((left + res) + right), (((len(splits) > 0) and (len(splits) - 1)) or 0))
[ "def", "intersperse", "(", "string", ",", "counts", ",", "separator", "=", "''", ")", ":", "(", "left", ",", "rest", ",", "right", ")", "=", "intersperse_pat", ".", "match", "(", "string", ")", ".", "groups", "(", ")", "def", "reverse", "(", "s", "...
see the asserts below for examples .
train
false
26,484
def get_email(invalid=False, optional=True): invalid_prefix = 'There seem to be problems with that address. ' msg = 'Enter email address (used for urgent renewal and security notices)' unsafe_suggestion = '\n\nIf you really want to skip this, you can run the client with --register-unsafely-without-email but make sure you then backup your account key from /etc/letsencrypt/accounts\n\n' if optional: if invalid: msg += unsafe_suggestion suggest_unsafe = False else: suggest_unsafe = True else: suggest_unsafe = False while True: try: (code, email) = z_util(interfaces.IDisplay).input(((invalid_prefix + msg) if invalid else msg), force_interactive=True) except errors.MissingCommandlineFlag: msg = 'You should register before running non-interactively, or provide --agree-tos and --email <email_address> flags.' raise errors.MissingCommandlineFlag(msg) if (code != display_util.OK): if optional: raise errors.Error('An e-mail address or --register-unsafely-without-email must be provided.') else: raise errors.Error('An e-mail address must be provided.') elif util.safe_email(email): return email elif suggest_unsafe: msg += unsafe_suggestion suggest_unsafe = False invalid = bool(email)
[ "def", "get_email", "(", "invalid", "=", "False", ",", "optional", "=", "True", ")", ":", "invalid_prefix", "=", "'There seem to be problems with that address. '", "msg", "=", "'Enter email address (used for urgent renewal and security notices)'", "unsafe_suggestion", "=", "'...
returns the email address for a user .
train
false
26,485
def _get_dev_hostname(backend, instance=None): port = _get_dev_port(backend, instance) if (not port): error = ('Backend not found: %s' % backend) if (instance is not None): error = ('%s.%d' % (error, instance)) raise InvalidBackendError(error) host = os.environ.get('SERVER_NAME', 'localhost') return ('%s:%d' % (host, port))
[ "def", "_get_dev_hostname", "(", "backend", ",", "instance", "=", "None", ")", ":", "port", "=", "_get_dev_port", "(", "backend", ",", "instance", ")", "if", "(", "not", "port", ")", ":", "error", "=", "(", "'Backend not found: %s'", "%", "backend", ")", ...
returns the hostname of a backend [instance] in the dev_appserver .
train
false
26,486
def _validate_cluster_size(size): try: size = int(size) except ValueError: raise argparse.ArgumentTypeError(u'Must be an integer. Found {!r}'.format(size)) if ((size < MIN_CLUSTER_SIZE) or (size > MAX_CLUSTER_SIZE)): raise argparse.ArgumentTypeError(u'Must be between {} and {}. Found {}.'.format(MIN_CLUSTER_SIZE, MAX_CLUSTER_SIZE, size)) return size
[ "def", "_validate_cluster_size", "(", "size", ")", ":", "try", ":", "size", "=", "int", "(", "size", ")", "except", "ValueError", ":", "raise", "argparse", ".", "ArgumentTypeError", "(", "u'Must be an integer. Found {!r}'", ".", "format", "(", "size", ")", ")"...
validate that user-input cluster size is supported by installer .
train
false
26,487
def _build_parent_list(policy_definition, admx_policy_definitions, return_full_policy_names, adml_policy_resources): parent_list = [] policy_namespace = policy_definition.nsmap.keys()[0] parent_category = policy_definition.xpath('{0}:parentCategory/@ref'.format(policy_namespace), namespaces=policy_definition.nsmap) if parent_category: parent_category = parent_category[0] nsmap_xpath = '/policyDefinitions/policyNamespaces/{0}:*'.format(policy_namespace) this_namespace_map = _buildElementNsmap(admx_policy_definitions.xpath(nsmap_xpath, namespaces=policy_definition.nsmap)) this_namespace_map = dictupdate.update(this_namespace_map, policy_definition.nsmap) parent_list = _admx_policy_parent_walk(parent_list, policy_namespace, parent_category, this_namespace_map, admx_policy_definitions, return_full_policy_names, adml_policy_resources) return parent_list
[ "def", "_build_parent_list", "(", "policy_definition", ",", "admx_policy_definitions", ",", "return_full_policy_names", ",", "adml_policy_resources", ")", ":", "parent_list", "=", "[", "]", "policy_namespace", "=", "policy_definition", ".", "nsmap", ".", "keys", "(", ...
helper function to build a list containing parent elements of the admx policy .
train
false
26,488
def SetProcessName(newname): global _process_name _process_name = newname try: from ctypes import cdll, byref, create_string_buffer libc = cdll.LoadLibrary('libc.so.6') buff = create_string_buffer((len(newname) + 1)) buff.value = newname libc.prctl(15, byref(buff), 0, 0, 0) except: pass
[ "def", "SetProcessName", "(", "newname", ")", ":", "global", "_process_name", "_process_name", "=", "newname", "try", ":", "from", "ctypes", "import", "cdll", ",", "byref", ",", "create_string_buffer", "libc", "=", "cdll", ".", "LoadLibrary", "(", "'libc.so.6'",...
attempts to set the process name .
train
false
26,490
def set_dhcp_dns(iface): cmd = ['netsh', 'interface', 'ip', 'set', 'dns', iface, 'dhcp'] __salt__['cmd.run'](cmd, python_shell=False) return {'Interface': iface, 'DNS Server': 'DHCP'}
[ "def", "set_dhcp_dns", "(", "iface", ")", ":", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'set'", ",", "'dns'", ",", "iface", ",", "'dhcp'", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ...
set dns source to dhcp on windows cli example: .
train
false
26,491
def symptom_LDAP_file_based_domain_specific_configs(): if ((not CONF.identity.domain_specific_drivers_enabled) or CONF.identity.domain_configurations_from_database): return False invalid_files = [] filedir = CONF.identity.domain_config_dir if os.path.isdir(filedir): for filename in os.listdir(filedir): if (not re.match(CONFIG_REGEX, filename)): invalid_files.append(filename) if invalid_files: invalid_str = ', '.join(invalid_files) print(('Warning: The following non-config files were found: %s\nIf they are intended to be config files then rename them to the form of `keystone.<domain_name>.conf`. Otherwise, ignore this warning' % invalid_str)) return True else: print('Could not find directory ', filedir) return True return False
[ "def", "symptom_LDAP_file_based_domain_specific_configs", "(", ")", ":", "if", "(", "(", "not", "CONF", ".", "identity", ".", "domain_specific_drivers_enabled", ")", "or", "CONF", ".", "identity", ".", "domain_configurations_from_database", ")", ":", "return", "False"...
domain specific driver directory is invalid or contains invalid files .
train
false
26,493
def split_csp_str(s): if isinstance(s, (list, tuple)): return s return list(set((i for i in s.strip().split(',') if i)))
[ "def", "split_csp_str", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "s", "return", "list", "(", "set", "(", "(", "i", "for", "i", "in", "s", ".", "strip", "(", ")", ".", "split", ...
split commaseparated string .
train
false
26,494
def parallel_helper(obj, methodname, *args, **kwargs): return getattr(obj, methodname)(*args, **kwargs)
[ "def", "parallel_helper", "(", "obj", ",", "methodname", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "getattr", "(", "obj", ",", "methodname", ")", "(", "*", "args", ",", "**", "kwargs", ")" ]
helper to workaround python 2 limitations of pickling instance methods .
train
false
26,495
def _create_xblock_ancestor_info(xblock, course_outline): ancestors = [] def collect_ancestor_info(ancestor, include_child_info=False): '\n Collect xblock info regarding the specified xblock and its ancestors.\n ' if ancestor: direct_children_only = (lambda parent: (parent == ancestor)) ancestors.append(create_xblock_info(ancestor, include_child_info=include_child_info, course_outline=course_outline, include_children_predicate=direct_children_only)) collect_ancestor_info(get_parent_xblock(ancestor)) collect_ancestor_info(get_parent_xblock(xblock), include_child_info=True) return {'ancestors': ancestors}
[ "def", "_create_xblock_ancestor_info", "(", "xblock", ",", "course_outline", ")", ":", "ancestors", "=", "[", "]", "def", "collect_ancestor_info", "(", "ancestor", ",", "include_child_info", "=", "False", ")", ":", "if", "ancestor", ":", "direct_children_only", "=...
returns information about the ancestors of an xblock .
train
false