id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
15,878
def DocumentListAclFeedFromString(xml_string): return atom.CreateClassFromXMLString(DocumentListAclFeed, xml_string)
[ "def", "DocumentListAclFeedFromString", "(", "xml_string", ")", ":", "return", "atom", ".", "CreateClassFromXMLString", "(", "DocumentListAclFeed", ",", "xml_string", ")" ]
converts an xml string into a documentlistaclfeed object .
train
false
15,879
def send_html_mail_jinja(subject, html_template, text_template, context, *args, **kwargs): with no_jinja_autoescape(): html_template = get_env().get_template(html_template) text_template = get_env().get_template(text_template) msg = send_mail(subject, text_template.render(context), html_message=html_template.render(context), *args, **kwargs) return msg
[ "def", "send_html_mail_jinja", "(", "subject", ",", "html_template", ",", "text_template", ",", "context", ",", "*", "args", ",", "**", "kwargs", ")", ":", "with", "no_jinja_autoescape", "(", ")", ":", "html_template", "=", "get_env", "(", ")", ".", "get_template", "(", "html_template", ")", "text_template", "=", "get_env", "(", ")", ".", "get_template", "(", "text_template", ")", "msg", "=", "send_mail", "(", "subject", ",", "text_template", ".", "render", "(", "context", ")", ",", "html_message", "=", "html_template", ".", "render", "(", "context", ")", ",", "*", "args", ",", "**", "kwargs", ")", "return", "msg" ]
sends html mail using a jinja template with autoescaping turned off .
train
false
15,880
def LanguageFacet(locale_ident, extra_weights={}): def score(searcher, docnum): doc = searcher.stored_fields(docnum) weight = extra_weights.get(doc['name'], 1.0) doc_language = doc['language'] if (doc_language == locale_ident): weight *= 2.0 elif (doc_language == u'roomaji'): weight *= 1.4 return (- weight) return whoosh.sorting.FunctionFacet(score)
[ "def", "LanguageFacet", "(", "locale_ident", ",", "extra_weights", "=", "{", "}", ")", ":", "def", "score", "(", "searcher", ",", "docnum", ")", ":", "doc", "=", "searcher", ".", "stored_fields", "(", "docnum", ")", "weight", "=", "extra_weights", ".", "get", "(", "doc", "[", "'name'", "]", ",", "1.0", ")", "doc_language", "=", "doc", "[", "'language'", "]", "if", "(", "doc_language", "==", "locale_ident", ")", ":", "weight", "*=", "2.0", "elif", "(", "doc_language", "==", "u'roomaji'", ")", ":", "weight", "*=", "1.4", "return", "(", "-", "weight", ")", "return", "whoosh", ".", "sorting", ".", "FunctionFacet", "(", "score", ")" ]
constructs a sorting function that bubbles results from the current locale to the top of the list .
train
false
15,884
def split_de_casteljau(beta, t): beta = np.asarray(beta) beta_list = [beta] while True: beta = _de_casteljau1(beta, t) beta_list.append(beta) if (len(beta) == 1): break left_beta = [beta[0] for beta in beta_list] right_beta = [beta[(-1)] for beta in reversed(beta_list)] return (left_beta, right_beta)
[ "def", "split_de_casteljau", "(", "beta", ",", "t", ")", ":", "beta", "=", "np", ".", "asarray", "(", "beta", ")", "beta_list", "=", "[", "beta", "]", "while", "True", ":", "beta", "=", "_de_casteljau1", "(", "beta", ",", "t", ")", "beta_list", ".", "append", "(", "beta", ")", "if", "(", "len", "(", "beta", ")", "==", "1", ")", ":", "break", "left_beta", "=", "[", "beta", "[", "0", "]", "for", "beta", "in", "beta_list", "]", "right_beta", "=", "[", "beta", "[", "(", "-", "1", ")", "]", "for", "beta", "in", "reversed", "(", "beta_list", ")", "]", "return", "(", "left_beta", ",", "right_beta", ")" ]
split a bezier segment defined by its controlpoints *beta* into two separate segment divided at *t* and return their control points .
train
false
15,885
def slack(registry, xml_parent, data): def _add_xml(elem, name, value=''): if isinstance(value, bool): value = str(value).lower() XML.SubElement(elem, name).text = value logger = logging.getLogger(__name__) plugin_info = registry.get_plugin_info('Slack Notification Plugin') plugin_ver = pkg_resources.parse_version(plugin_info.get('version', '0')) mapping = (('team-domain', 'teamDomain', ''), ('auth-token', 'authToken', ''), ('build-server-url', 'buildServerUrl', '/'), ('room', 'room', '')) mapping_20 = (('notify-start', 'startNotification', False), ('notify-success', 'notifySuccess', False), ('notify-aborted', 'notifyAborted', False), ('notify-not-built', 'notifyNotBuilt', False), ('notify-unstable', 'notifyUnstable', False), ('notify-failure', 'notifyFailure', False), ('notify-back-to-normal', 'notifyBackToNormal', False), ('notify-repeated-failure', 'notifyRepeatedFailure', False), ('include-test-summary', 'includeTestSummary', False), ('commit-info-choice', 'commitInfoChoice', 'NONE'), ('include-custom-message', 'includeCustomMessage', False), ('custom-message', 'customMessage', '')) commit_info_choices = ['NONE', 'AUTHORS', 'AUTHORS_AND_TITLES'] slack = XML.SubElement(xml_parent, 'jenkins.plugins.slack.SlackNotifier') if (plugin_ver >= pkg_resources.parse_version('2.0')): mapping = (mapping + mapping_20) if (plugin_ver < pkg_resources.parse_version('2.0')): for (yaml_name, _, default_value) in mapping: if (not data.get(yaml_name, default_value)): raise MissingAttributeError(yaml_name) for (yaml_name, _, _) in mapping_20: if (yaml_name in data): logger.warning("'%s' is invalid with plugin version < 2.0, ignored", yaml_name) for (yaml_name, xml_name, default_value) in mapping: value = data.get(yaml_name, default_value) if (yaml_name == 'commit-info-choice'): if (value not in commit_info_choices): raise InvalidAttributeError(yaml_name, value, commit_info_choices) if ((yaml_name == 'include-custom-message') and (data is False)): if (not data.get('custom-message', '')): raise MissingAttributeError('custom-message') _add_xml(slack, xml_name, value)
[ "def", "slack", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "def", "_add_xml", "(", "elem", ",", "name", ",", "value", "=", "''", ")", ":", "if", "isinstance", "(", "value", ",", "bool", ")", ":", "value", "=", "str", "(", "value", ")", ".", "lower", "(", ")", "XML", ".", "SubElement", "(", "elem", ",", "name", ")", ".", "text", "=", "value", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "plugin_info", "=", "registry", ".", "get_plugin_info", "(", "'Slack Notification Plugin'", ")", "plugin_ver", "=", "pkg_resources", ".", "parse_version", "(", "plugin_info", ".", "get", "(", "'version'", ",", "'0'", ")", ")", "mapping", "=", "(", "(", "'team-domain'", ",", "'teamDomain'", ",", "''", ")", ",", "(", "'auth-token'", ",", "'authToken'", ",", "''", ")", ",", "(", "'build-server-url'", ",", "'buildServerUrl'", ",", "'/'", ")", ",", "(", "'room'", ",", "'room'", ",", "''", ")", ")", "mapping_20", "=", "(", "(", "'notify-start'", ",", "'startNotification'", ",", "False", ")", ",", "(", "'notify-success'", ",", "'notifySuccess'", ",", "False", ")", ",", "(", "'notify-aborted'", ",", "'notifyAborted'", ",", "False", ")", ",", "(", "'notify-not-built'", ",", "'notifyNotBuilt'", ",", "False", ")", ",", "(", "'notify-unstable'", ",", "'notifyUnstable'", ",", "False", ")", ",", "(", "'notify-failure'", ",", "'notifyFailure'", ",", "False", ")", ",", "(", "'notify-back-to-normal'", ",", "'notifyBackToNormal'", ",", "False", ")", ",", "(", "'notify-repeated-failure'", ",", "'notifyRepeatedFailure'", ",", "False", ")", ",", "(", "'include-test-summary'", ",", "'includeTestSummary'", ",", "False", ")", ",", "(", "'commit-info-choice'", ",", "'commitInfoChoice'", ",", "'NONE'", ")", ",", "(", "'include-custom-message'", ",", "'includeCustomMessage'", ",", "False", ")", ",", "(", "'custom-message'", ",", "'customMessage'", ",", "''", ")", ")", "commit_info_choices", "=", "[", "'NONE'", ",", "'AUTHORS'", ",", "'AUTHORS_AND_TITLES'", "]", "slack", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'jenkins.plugins.slack.SlackNotifier'", ")", "if", "(", "plugin_ver", ">=", "pkg_resources", ".", "parse_version", "(", "'2.0'", ")", ")", ":", "mapping", "=", "(", "mapping", "+", "mapping_20", ")", "if", "(", "plugin_ver", "<", "pkg_resources", ".", "parse_version", "(", "'2.0'", ")", ")", ":", "for", "(", "yaml_name", ",", "_", ",", "default_value", ")", "in", "mapping", ":", "if", "(", "not", "data", ".", "get", "(", "yaml_name", ",", "default_value", ")", ")", ":", "raise", "MissingAttributeError", "(", "yaml_name", ")", "for", "(", "yaml_name", ",", "_", ",", "_", ")", "in", "mapping_20", ":", "if", "(", "yaml_name", "in", "data", ")", ":", "logger", ".", "warning", "(", "\"'%s' is invalid with plugin version < 2.0, ignored\"", ",", "yaml_name", ")", "for", "(", "yaml_name", ",", "xml_name", ",", "default_value", ")", "in", "mapping", ":", "value", "=", "data", ".", "get", "(", "yaml_name", ",", "default_value", ")", "if", "(", "yaml_name", "==", "'commit-info-choice'", ")", ":", "if", "(", "value", "not", "in", "commit_info_choices", ")", ":", "raise", "InvalidAttributeError", "(", "yaml_name", ",", "value", ",", "commit_info_choices", ")", "if", "(", "(", "yaml_name", "==", "'include-custom-message'", ")", "and", "(", "data", "is", "False", ")", ")", ":", "if", "(", "not", "data", ".", "get", "(", "'custom-message'", ",", "''", ")", ")", ":", "raise", "MissingAttributeError", "(", "'custom-message'", ")", "_add_xml", "(", "slack", ",", "xml_name", ",", "value", ")" ]
yaml: slack requires the jenkins :jenkins-wiki:slack plugin <slack+plugin> when using slack plugin version < 2 .
train
false
15,887
def is_widget_with_placeholder(widget): return isinstance(widget, (TextInput, Textarea, PasswordInput))
[ "def", "is_widget_with_placeholder", "(", "widget", ")", ":", "return", "isinstance", "(", "widget", ",", "(", "TextInput", ",", "Textarea", ",", "PasswordInput", ")", ")" ]
is this a widget that should have a placeholder? only text .
train
false
15,888
def stash(): model = StashModel() view = StashView(model, qtutils.active_window()) view.show() view.raise_() return view
[ "def", "stash", "(", ")", ":", "model", "=", "StashModel", "(", ")", "view", "=", "StashView", "(", "model", ",", "qtutils", ".", "active_window", "(", ")", ")", "view", ".", "show", "(", ")", "view", ".", "raise_", "(", ")", "return", "view" ]
interface to git-stash(1)_ .
train
false
15,889
def cache_from_env(env): return item_from_env(env, 'swift.cache')
[ "def", "cache_from_env", "(", "env", ")", ":", "return", "item_from_env", "(", "env", ",", "'swift.cache'", ")" ]
get memcache connection pool from the environment (which had been previously set by the memcache middleware .
train
false
15,890
def mountCgroups(): mounts = quietRun('cat /proc/mounts') cgdir = '/sys/fs/cgroup' csdir = (cgdir + '/cpuset') if ((('cgroup %s' % cgdir) not in mounts) and (('cgroups %s' % cgdir) not in mounts)): raise Exception(('cgroups not mounted on ' + cgdir)) if (('cpuset %s' % csdir) not in mounts): errRun(('mkdir -p ' + csdir)) errRun(('mount -t cgroup -ocpuset cpuset ' + csdir))
[ "def", "mountCgroups", "(", ")", ":", "mounts", "=", "quietRun", "(", "'cat /proc/mounts'", ")", "cgdir", "=", "'/sys/fs/cgroup'", "csdir", "=", "(", "cgdir", "+", "'/cpuset'", ")", "if", "(", "(", "(", "'cgroup %s'", "%", "cgdir", ")", "not", "in", "mounts", ")", "and", "(", "(", "'cgroups %s'", "%", "cgdir", ")", "not", "in", "mounts", ")", ")", ":", "raise", "Exception", "(", "(", "'cgroups not mounted on '", "+", "cgdir", ")", ")", "if", "(", "(", "'cpuset %s'", "%", "csdir", ")", "not", "in", "mounts", ")", ":", "errRun", "(", "(", "'mkdir -p '", "+", "csdir", ")", ")", "errRun", "(", "(", "'mount -t cgroup -ocpuset cpuset '", "+", "csdir", ")", ")" ]
make sure cgroups file system is mounted .
train
false
15,892
def invalidate_cache(user, size=None): sizes = set(settings.AVATAR_AUTO_GENERATE_SIZES) if (size is not None): sizes.add(size) for prefix in cached_funcs: for size in sizes: cache.delete(get_cache_key(user, size, prefix))
[ "def", "invalidate_cache", "(", "user", ",", "size", "=", "None", ")", ":", "sizes", "=", "set", "(", "settings", ".", "AVATAR_AUTO_GENERATE_SIZES", ")", "if", "(", "size", "is", "not", "None", ")", ":", "sizes", ".", "add", "(", "size", ")", "for", "prefix", "in", "cached_funcs", ":", "for", "size", "in", "sizes", ":", "cache", ".", "delete", "(", "get_cache_key", "(", "user", ",", "size", ",", "prefix", ")", ")" ]
function to be called when saving or changing an users avatars .
train
true
15,893
def get_usage(action_mapping): usage = ['%prog action [options]\nactions:'] available_actions = action_mapping.keys() available_actions.sort() for a in available_actions: func = action_mapping[a] usage.append((' %s %s' % (a, func.args))) usage.extend(textwrap.wrap(getattr(func, 'help_doc', textwrap.dedent(func.__doc__.strip())), initial_indent=' ', subsequent_indent=' ')) usage.append('') return '\n'.join(usage[:(-1)])
[ "def", "get_usage", "(", "action_mapping", ")", ":", "usage", "=", "[", "'%prog action [options]\\nactions:'", "]", "available_actions", "=", "action_mapping", ".", "keys", "(", ")", "available_actions", ".", "sort", "(", ")", "for", "a", "in", "available_actions", ":", "func", "=", "action_mapping", "[", "a", "]", "usage", ".", "append", "(", "(", "' %s %s'", "%", "(", "a", ",", "func", ".", "args", ")", ")", ")", "usage", ".", "extend", "(", "textwrap", ".", "wrap", "(", "getattr", "(", "func", ",", "'help_doc'", ",", "textwrap", ".", "dedent", "(", "func", ".", "__doc__", ".", "strip", "(", ")", ")", ")", ",", "initial_indent", "=", "' '", ",", "subsequent_indent", "=", "' '", ")", ")", "usage", ".", "append", "(", "''", ")", "return", "'\\n'", ".", "join", "(", "usage", "[", ":", "(", "-", "1", ")", "]", ")" ]
return the current usage of a ratelimit for the specified time slice .
train
false
15,895
def quadratic_residues(p): r = set() for i in range(((p // 2) + 1)): r.add(pow(i, 2, p)) return sorted(list(r))
[ "def", "quadratic_residues", "(", "p", ")", ":", "r", "=", "set", "(", ")", "for", "i", "in", "range", "(", "(", "(", "p", "//", "2", ")", "+", "1", ")", ")", ":", "r", ".", "add", "(", "pow", "(", "i", ",", "2", ",", "p", ")", ")", "return", "sorted", "(", "list", "(", "r", ")", ")" ]
returns the list of quadratic residues .
train
false
15,897
def _is_cookie_present(response): return (response.cookies.get(settings.SESSION_COOKIE_NAME) and response.cookies[settings.SESSION_COOKIE_NAME].value)
[ "def", "_is_cookie_present", "(", "response", ")", ":", "return", "(", "response", ".", "cookies", ".", "get", "(", "settings", ".", "SESSION_COOKIE_NAME", ")", "and", "response", ".", "cookies", "[", "settings", ".", "SESSION_COOKIE_NAME", "]", ".", "value", ")" ]
returns whether the session cookie is present in the response .
train
false
15,898
def res_format(resource): if (not resource['url']): return None return (resource['format'] or resource['url'].split('.')[(-1)]).lower()
[ "def", "res_format", "(", "resource", ")", ":", "if", "(", "not", "resource", "[", "'url'", "]", ")", ":", "return", "None", "return", "(", "resource", "[", "'format'", "]", "or", "resource", "[", "'url'", "]", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", ")", ".", "lower", "(", ")" ]
the assumed resource format in lower case .
train
false
15,900
def readconfig(cfgdict): default_cfg = 'turtle.cfg' cfgdict1 = {} cfgdict2 = {} if isfile(default_cfg): cfgdict1 = config_dict(default_cfg) if ('importconfig' in cfgdict1): default_cfg = ('turtle_%s.cfg' % cfgdict1['importconfig']) try: (head, tail) = split(__file__) cfg_file2 = join(head, default_cfg) except: cfg_file2 = '' if isfile(cfg_file2): cfgdict2 = config_dict(cfg_file2) _CFG.update(cfgdict2) _CFG.update(cfgdict1)
[ "def", "readconfig", "(", "cfgdict", ")", ":", "default_cfg", "=", "'turtle.cfg'", "cfgdict1", "=", "{", "}", "cfgdict2", "=", "{", "}", "if", "isfile", "(", "default_cfg", ")", ":", "cfgdict1", "=", "config_dict", "(", "default_cfg", ")", "if", "(", "'importconfig'", "in", "cfgdict1", ")", ":", "default_cfg", "=", "(", "'turtle_%s.cfg'", "%", "cfgdict1", "[", "'importconfig'", "]", ")", "try", ":", "(", "head", ",", "tail", ")", "=", "split", "(", "__file__", ")", "cfg_file2", "=", "join", "(", "head", ",", "default_cfg", ")", "except", ":", "cfg_file2", "=", "''", "if", "isfile", "(", "cfg_file2", ")", ":", "cfgdict2", "=", "config_dict", "(", "cfg_file2", ")", "_CFG", ".", "update", "(", "cfgdict2", ")", "_CFG", ".", "update", "(", "cfgdict1", ")" ]
read config-files .
train
false
15,901
def positional(max_positional_args): def positional_decorator(wrapped): @functools.wraps(wrapped) def positional_wrapper(*args, **kwargs): if (len(args) > max_positional_args): plural_s = '' if (max_positional_args != 1): plural_s = 's' message = ('%s() takes at most %d positional argument%s (%d given)' % (wrapped.__name__, max_positional_args, plural_s, len(args))) if (positional_parameters_enforcement == POSITIONAL_EXCEPTION): raise TypeError(message) elif (positional_parameters_enforcement == POSITIONAL_WARNING): logger.warning(message) else: pass return wrapped(*args, **kwargs) return positional_wrapper if isinstance(max_positional_args, six.integer_types): return positional_decorator else: (args, _, _, defaults) = inspect.getargspec(max_positional_args) return positional((len(args) - len(defaults)))(max_positional_args)
[ "def", "positional", "(", "max_positional_args", ")", ":", "def", "positional_decorator", "(", "wrapped", ")", ":", "@", "functools", ".", "wraps", "(", "wrapped", ")", "def", "positional_wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "len", "(", "args", ")", ">", "max_positional_args", ")", ":", "plural_s", "=", "''", "if", "(", "max_positional_args", "!=", "1", ")", ":", "plural_s", "=", "'s'", "message", "=", "(", "'%s() takes at most %d positional argument%s (%d given)'", "%", "(", "wrapped", ".", "__name__", ",", "max_positional_args", ",", "plural_s", ",", "len", "(", "args", ")", ")", ")", "if", "(", "positional_parameters_enforcement", "==", "POSITIONAL_EXCEPTION", ")", ":", "raise", "TypeError", "(", "message", ")", "elif", "(", "positional_parameters_enforcement", "==", "POSITIONAL_WARNING", ")", ":", "logger", ".", "warning", "(", "message", ")", "else", ":", "pass", "return", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", "return", "positional_wrapper", "if", "isinstance", "(", "max_positional_args", ",", "six", ".", "integer_types", ")", ":", "return", "positional_decorator", "else", ":", "(", "args", ",", "_", ",", "_", ",", "defaults", ")", "=", "inspect", ".", "getargspec", "(", "max_positional_args", ")", "return", "positional", "(", "(", "len", "(", "args", ")", "-", "len", "(", "defaults", ")", ")", ")", "(", "max_positional_args", ")" ]
a decorator to declare that only the first n arguments may be positional .
train
true
15,902
def _get_playbook_name_from_file(path): return path.stem
[ "def", "_get_playbook_name_from_file", "(", "path", ")", ":", "return", "path", ".", "stem" ]
gets name of playbook from the filepath .
train
false
15,904
def _fuzzy_group(args, quick_exit=False): saw_other = False for a in args: if (a is True): continue if (a is None): return if (quick_exit and saw_other): return saw_other = True return (not saw_other)
[ "def", "_fuzzy_group", "(", "args", ",", "quick_exit", "=", "False", ")", ":", "saw_other", "=", "False", "for", "a", "in", "args", ":", "if", "(", "a", "is", "True", ")", ":", "continue", "if", "(", "a", "is", "None", ")", ":", "return", "if", "(", "quick_exit", "and", "saw_other", ")", ":", "return", "saw_other", "=", "True", "return", "(", "not", "saw_other", ")" ]
return true if all args are true .
train
false
15,907
def vgg_arg_scope(weight_decay=0.0005): with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn=tf.nn.relu, weights_regularizer=slim.l2_regularizer(weight_decay), biases_initializer=tf.zeros_initializer): with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc: return arg_sc
[ "def", "vgg_arg_scope", "(", "weight_decay", "=", "0.0005", ")", ":", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", ",", "slim", ".", "fully_connected", "]", ",", "activation_fn", "=", "tf", ".", "nn", ".", "relu", ",", "weights_regularizer", "=", "slim", ".", "l2_regularizer", "(", "weight_decay", ")", ",", "biases_initializer", "=", "tf", ".", "zeros_initializer", ")", ":", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", "]", ",", "padding", "=", "'SAME'", ")", "as", "arg_sc", ":", "return", "arg_sc" ]
defines the vgg arg scope .
train
false
15,908
def writeChainTextWithNounMessage(fileName, procedure, shouldAnalyze=True): print '' print ('The %s tool is parsing the file:' % procedure) print os.path.basename(fileName) print '' startTime = time.time() fileNameSuffix = (((fileName[:fileName.rfind('.')] + '_') + procedure) + '.gcode') craftText = getChainText(fileName, procedure) if (craftText == ''): print 'Warning, there was no text output in writeChainTextWithNounMessage in skeinforge_craft for:' print fileName return archive.writeFileText(fileNameSuffix, craftText) window = None if shouldAnalyze: window = skeinforge_analyze.writeOutput(fileName, fileNameSuffix, fileNameSuffix, True, craftText) print '' print ('The %s tool has created the file:' % procedure) print fileNameSuffix print '' print ('It took %s to craft the file.' % euclidean.getDurationString((time.time() - startTime))) return window
[ "def", "writeChainTextWithNounMessage", "(", "fileName", ",", "procedure", ",", "shouldAnalyze", "=", "True", ")", ":", "print", "''", "print", "(", "'The %s tool is parsing the file:'", "%", "procedure", ")", "print", "os", ".", "path", ".", "basename", "(", "fileName", ")", "print", "''", "startTime", "=", "time", ".", "time", "(", ")", "fileNameSuffix", "=", "(", "(", "(", "fileName", "[", ":", "fileName", ".", "rfind", "(", "'.'", ")", "]", "+", "'_'", ")", "+", "procedure", ")", "+", "'.gcode'", ")", "craftText", "=", "getChainText", "(", "fileName", ",", "procedure", ")", "if", "(", "craftText", "==", "''", ")", ":", "print", "'Warning, there was no text output in writeChainTextWithNounMessage in skeinforge_craft for:'", "print", "fileName", "return", "archive", ".", "writeFileText", "(", "fileNameSuffix", ",", "craftText", ")", "window", "=", "None", "if", "shouldAnalyze", ":", "window", "=", "skeinforge_analyze", ".", "writeOutput", "(", "fileName", ",", "fileNameSuffix", ",", "fileNameSuffix", ",", "True", ",", "craftText", ")", "print", "''", "print", "(", "'The %s tool has created the file:'", "%", "procedure", ")", "print", "fileNameSuffix", "print", "''", "print", "(", "'It took %s to craft the file.'", "%", "euclidean", ".", "getDurationString", "(", "(", "time", ".", "time", "(", ")", "-", "startTime", ")", ")", ")", "return", "window" ]
get and write a crafted shape file .
train
false
15,909
def tzname_in_python2(myfunc): def inner_func(*args, **kwargs): if PY3: return myfunc(*args, **kwargs) else: return myfunc(*args, **kwargs).encode() return inner_func
[ "def", "tzname_in_python2", "(", "myfunc", ")", ":", "def", "inner_func", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "PY3", ":", "return", "myfunc", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "myfunc", "(", "*", "args", ",", "**", "kwargs", ")", ".", "encode", "(", ")", "return", "inner_func" ]
change unicode output into bytestrings in python 2 tzname() api changed in python 3 .
train
true
15,912
def extractExpectedValue(value, expected): if expected: value = unArrayizeValue(value) if isNoneValue(value): value = None elif (expected == EXPECTED.BOOL): if isinstance(value, int): value = bool(value) elif isinstance(value, basestring): value = value.strip().lower() if (value in ('true', 'false')): value = (value == 'true') elif (value in ('1', '-1')): value = True elif (value == '0'): value = False else: value = None elif (expected == EXPECTED.INT): if isinstance(value, basestring): value = (int(value) if value.isdigit() else None) return value
[ "def", "extractExpectedValue", "(", "value", ",", "expected", ")", ":", "if", "expected", ":", "value", "=", "unArrayizeValue", "(", "value", ")", "if", "isNoneValue", "(", "value", ")", ":", "value", "=", "None", "elif", "(", "expected", "==", "EXPECTED", ".", "BOOL", ")", ":", "if", "isinstance", "(", "value", ",", "int", ")", ":", "value", "=", "bool", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "basestring", ")", ":", "value", "=", "value", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "(", "value", "in", "(", "'true'", ",", "'false'", ")", ")", ":", "value", "=", "(", "value", "==", "'true'", ")", "elif", "(", "value", "in", "(", "'1'", ",", "'-1'", ")", ")", ":", "value", "=", "True", "elif", "(", "value", "==", "'0'", ")", ":", "value", "=", "False", "else", ":", "value", "=", "None", "elif", "(", "expected", "==", "EXPECTED", ".", "INT", ")", ":", "if", "isinstance", "(", "value", ",", "basestring", ")", ":", "value", "=", "(", "int", "(", "value", ")", "if", "value", ".", "isdigit", "(", ")", "else", "None", ")", "return", "value" ]
extracts and returns expected value by a given type .
train
false
15,913
def day_frac(val1, val2, factor=1.0, divisor=1.0): (sum12, err12) = two_sum(val1, val2) if np.any((factor != 1.0)): (sum12, carry) = two_product(sum12, factor) carry += (err12 * factor) (sum12, err12) = two_sum(sum12, carry) if np.any((divisor != 1.0)): q1 = (sum12 / divisor) (p1, p2) = two_product(q1, divisor) (d1, d2) = two_sum(sum12, (- p1)) d2 += err12 d2 -= p2 q2 = ((d1 + d2) / divisor) (sum12, err12) = two_sum(q1, q2) day = np.round(sum12) (extra, frac) = two_sum(sum12, (- day)) frac += (extra + err12) return (day, frac)
[ "def", "day_frac", "(", "val1", ",", "val2", ",", "factor", "=", "1.0", ",", "divisor", "=", "1.0", ")", ":", "(", "sum12", ",", "err12", ")", "=", "two_sum", "(", "val1", ",", "val2", ")", "if", "np", ".", "any", "(", "(", "factor", "!=", "1.0", ")", ")", ":", "(", "sum12", ",", "carry", ")", "=", "two_product", "(", "sum12", ",", "factor", ")", "carry", "+=", "(", "err12", "*", "factor", ")", "(", "sum12", ",", "err12", ")", "=", "two_sum", "(", "sum12", ",", "carry", ")", "if", "np", ".", "any", "(", "(", "divisor", "!=", "1.0", ")", ")", ":", "q1", "=", "(", "sum12", "/", "divisor", ")", "(", "p1", ",", "p2", ")", "=", "two_product", "(", "q1", ",", "divisor", ")", "(", "d1", ",", "d2", ")", "=", "two_sum", "(", "sum12", ",", "(", "-", "p1", ")", ")", "d2", "+=", "err12", "d2", "-=", "p2", "q2", "=", "(", "(", "d1", "+", "d2", ")", "/", "divisor", ")", "(", "sum12", ",", "err12", ")", "=", "two_sum", "(", "q1", ",", "q2", ")", "day", "=", "np", ".", "round", "(", "sum12", ")", "(", "extra", ",", "frac", ")", "=", "two_sum", "(", "sum12", ",", "(", "-", "day", ")", ")", "frac", "+=", "(", "extra", "+", "err12", ")", "return", "(", "day", ",", "frac", ")" ]
return the sum of val1 and val2 as two float64s .
train
false
15,914
def get_related_units(unitdata): related_units = Unit.objects.filter(contentsum=unitdata.contentsum, translation__subproject__project=unitdata.project) if (unitdata.language is not None): related_units = related_units.filter(translation__language=unitdata.language) return related_units.select_related('translation__subproject__project', 'translation__language')
[ "def", "get_related_units", "(", "unitdata", ")", ":", "related_units", "=", "Unit", ".", "objects", ".", "filter", "(", "contentsum", "=", "unitdata", ".", "contentsum", ",", "translation__subproject__project", "=", "unitdata", ".", "project", ")", "if", "(", "unitdata", ".", "language", "is", "not", "None", ")", ":", "related_units", "=", "related_units", ".", "filter", "(", "translation__language", "=", "unitdata", ".", "language", ")", "return", "related_units", ".", "select_related", "(", "'translation__subproject__project'", ",", "'translation__language'", ")" ]
returns queryset with related units .
train
false
15,915
def get_coffee(): logger.info(get_coffee.__doc__)
[ "def", "get_coffee", "(", ")", ":", "logger", ".", "info", "(", "get_coffee", ".", "__doc__", ")" ]
jgs ""-- .
train
false
15,916
def getLinkedElementNode(idSuffix, parentNode, target): linkedElementNode = xml_simple_reader.ElementNode() euclidean.overwriteDictionary(target.attributes, ['id', 'name', 'quantity'], linkedElementNode.attributes) linkedElementNode.addSuffixToID(idSuffix) tagKeys = target.getTagKeys() tagKeys.append('carve') tagKeys.sort() tags = ', '.join(tagKeys) linkedElementNode.attributes['tags'] = tags linkedElementNode.setParentAddToChildNodes(parentNode) linkedElementNode.addToIdentifierDictionaries() return linkedElementNode
[ "def", "getLinkedElementNode", "(", "idSuffix", ",", "parentNode", ",", "target", ")", ":", "linkedElementNode", "=", "xml_simple_reader", ".", "ElementNode", "(", ")", "euclidean", ".", "overwriteDictionary", "(", "target", ".", "attributes", ",", "[", "'id'", ",", "'name'", ",", "'quantity'", "]", ",", "linkedElementNode", ".", "attributes", ")", "linkedElementNode", ".", "addSuffixToID", "(", "idSuffix", ")", "tagKeys", "=", "target", ".", "getTagKeys", "(", ")", "tagKeys", ".", "append", "(", "'carve'", ")", "tagKeys", ".", "sort", "(", ")", "tags", "=", "', '", ".", "join", "(", "tagKeys", ")", "linkedElementNode", ".", "attributes", "[", "'tags'", "]", "=", "tags", "linkedElementNode", ".", "setParentAddToChildNodes", "(", "parentNode", ")", "linkedElementNode", ".", "addToIdentifierDictionaries", "(", ")", "return", "linkedElementNode" ]
get elementnode with identifiers and parentnode .
train
false
15,917
def reset_alarm(**kwargs): service_instance = kwargs.get('service_instance') payload = _build_payload(**kwargs) logging.debug(payload) session = service_instance._stub if (not _send_request(payload, session)): return False return True
[ "def", "reset_alarm", "(", "**", "kwargs", ")", ":", "service_instance", "=", "kwargs", ".", "get", "(", "'service_instance'", ")", "payload", "=", "_build_payload", "(", "**", "kwargs", ")", "logging", ".", "debug", "(", "payload", ")", "session", "=", "service_instance", ".", "_stub", "if", "(", "not", "_send_request", "(", "payload", ",", "session", ")", ")", ":", "return", "False", "return", "True" ]
resets an alarm on a given hostsystem in a vcenter to the green state without someone having to log in to do it manually .
train
false
15,918
def cint(s): try: num = int(float(s)) except: num = 0 return num
[ "def", "cint", "(", "s", ")", ":", "try", ":", "num", "=", "int", "(", "float", "(", "s", ")", ")", "except", ":", "num", "=", "0", "return", "num" ]
convert to integer .
train
false
15,919
def _next_int(file): return _unpack('!I', file.read(4))[0]
[ "def", "_next_int", "(", "file", ")", ":", "return", "_unpack", "(", "'!I'", ",", "file", ".", "read", "(", "4", ")", ")", "[", "0", "]" ]
read the next big-endian 4-byte unsigned int from a file .
train
false
15,920
@pytest.mark.django_db def test_new_product_admin_form_renders(rf, client, admin_user): request = rf.get('/') request.user = admin_user request.session = client.session view = ProductEditView.as_view() shop = get_default_shop() supplier = get_simple_supplier() supplier.stock_managed = True supplier.save() view(request).render() supplier.stock_managed = False supplier.save() view(request).render()
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_new_product_admin_form_renders", "(", "rf", ",", "client", ",", "admin_user", ")", ":", "request", "=", "rf", ".", "get", "(", "'/'", ")", "request", ".", "user", "=", "admin_user", "request", ".", "session", "=", "client", ".", "session", "view", "=", "ProductEditView", ".", "as_view", "(", ")", "shop", "=", "get_default_shop", "(", ")", "supplier", "=", "get_simple_supplier", "(", ")", "supplier", ".", "stock_managed", "=", "True", "supplier", ".", "save", "(", ")", "view", "(", "request", ")", ".", "render", "(", ")", "supplier", ".", "stock_managed", "=", "False", "supplier", ".", "save", "(", ")", "view", "(", "request", ")", ".", "render", "(", ")" ]
make sure that no exceptions are raised when creating a new product with simple supplier enabled .
train
false
15,921
def mkpath(name, mode=511, verbose=1, dry_run=0): global _path_created if (not isinstance(name, StringTypes)): raise DistutilsInternalError, ("mkpath: 'name' must be a string (got %r)" % (name,)) name = os.path.normpath(name) created_dirs = [] if (os.path.isdir(name) or (name == '')): return created_dirs if _path_created.get(os.path.abspath(name)): return created_dirs (head, tail) = os.path.split(name) tails = [tail] while (head and tail and (not os.path.isdir(head))): (head, tail) = os.path.split(head) tails.insert(0, tail) for d in tails: head = os.path.join(head, d) abs_head = os.path.abspath(head) if _path_created.get(abs_head): continue if (verbose >= 1): log.info('creating %s', head) if (not dry_run): try: os.mkdir(head) created_dirs.append(head) except OSError as exc: raise DistutilsFileError, ("could not create '%s': %s" % (head, exc[(-1)])) _path_created[abs_head] = 1 return created_dirs
[ "def", "mkpath", "(", "name", ",", "mode", "=", "511", ",", "verbose", "=", "1", ",", "dry_run", "=", "0", ")", ":", "global", "_path_created", "if", "(", "not", "isinstance", "(", "name", ",", "StringTypes", ")", ")", ":", "raise", "DistutilsInternalError", ",", "(", "\"mkpath: 'name' must be a string (got %r)\"", "%", "(", "name", ",", ")", ")", "name", "=", "os", ".", "path", ".", "normpath", "(", "name", ")", "created_dirs", "=", "[", "]", "if", "(", "os", ".", "path", ".", "isdir", "(", "name", ")", "or", "(", "name", "==", "''", ")", ")", ":", "return", "created_dirs", "if", "_path_created", ".", "get", "(", "os", ".", "path", ".", "abspath", "(", "name", ")", ")", ":", "return", "created_dirs", "(", "head", ",", "tail", ")", "=", "os", ".", "path", ".", "split", "(", "name", ")", "tails", "=", "[", "tail", "]", "while", "(", "head", "and", "tail", "and", "(", "not", "os", ".", "path", ".", "isdir", "(", "head", ")", ")", ")", ":", "(", "head", ",", "tail", ")", "=", "os", ".", "path", ".", "split", "(", "head", ")", "tails", ".", "insert", "(", "0", ",", "tail", ")", "for", "d", "in", "tails", ":", "head", "=", "os", ".", "path", ".", "join", "(", "head", ",", "d", ")", "abs_head", "=", "os", ".", "path", ".", "abspath", "(", "head", ")", "if", "_path_created", ".", "get", "(", "abs_head", ")", ":", "continue", "if", "(", "verbose", ">=", "1", ")", ":", "log", ".", "info", "(", "'creating %s'", ",", "head", ")", "if", "(", "not", "dry_run", ")", ":", "try", ":", "os", ".", "mkdir", "(", "head", ")", "created_dirs", ".", "append", "(", "head", ")", "except", "OSError", "as", "exc", ":", "raise", "DistutilsFileError", ",", "(", "\"could not create '%s': %s\"", "%", "(", "head", ",", "exc", "[", "(", "-", "1", ")", "]", ")", ")", "_path_created", "[", "abs_head", "]", "=", "1", "return", "created_dirs" ]
build os-dependent paths properly .
train
false
15,922
def object_class(type): return _TYPE_MAP.get(type, None)
[ "def", "object_class", "(", "type", ")", ":", "return", "_TYPE_MAP", ".", "get", "(", "type", ",", "None", ")" ]
get the object class corresponding to the given type .
train
false
15,923
def configure_listener(mapper, class_): for col_attr in mapper.column_attrs: column = col_attr.columns[0] if (column.default is not None): default_listener(col_attr, column.default)
[ "def", "configure_listener", "(", "mapper", ",", "class_", ")", ":", "for", "col_attr", "in", "mapper", ".", "column_attrs", ":", "column", "=", "col_attr", ".", "columns", "[", "0", "]", "if", "(", "column", ".", "default", "is", "not", "None", ")", ":", "default_listener", "(", "col_attr", ",", "column", ".", "default", ")" ]
establish attribute setters for every default-holding column on the given mapper .
train
false
15,924
def brevity_penalty(closest_ref_len, hyp_len): if (hyp_len > closest_ref_len): return 1 elif (hyp_len == 0): return 0 else: return math.exp((1 - (closest_ref_len / hyp_len)))
[ "def", "brevity_penalty", "(", "closest_ref_len", ",", "hyp_len", ")", ":", "if", "(", "hyp_len", ">", "closest_ref_len", ")", ":", "return", "1", "elif", "(", "hyp_len", "==", "0", ")", ":", "return", "0", "else", ":", "return", "math", ".", "exp", "(", "(", "1", "-", "(", "closest_ref_len", "/", "hyp_len", ")", ")", ")" ]
calculate brevity penalty .
train
false
15,925
@register.simple_tag(takes_context=True) def fields_for(context, form, template=u'includes/form_fields.html'): context[u'form_for_fields'] = form return get_template(template).render(context)
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "fields_for", "(", "context", ",", "form", ",", "template", "=", "u'includes/form_fields.html'", ")", ":", "context", "[", "u'form_for_fields'", "]", "=", "form", "return", "get_template", "(", "template", ")", ".", "render", "(", "context", ")" ]
renders fields for a form with an optional template choice .
train
false
15,926
def get_member_definitions(group, include_failed=False): return [(resource.name, resource.t) for resource in get_members(group, include_failed)]
[ "def", "get_member_definitions", "(", "group", ",", "include_failed", "=", "False", ")", ":", "return", "[", "(", "resource", ".", "name", ",", "resource", ".", "t", ")", "for", "resource", "in", "get_members", "(", "group", ",", "include_failed", ")", "]" ]
get member definitions in pair for group .
train
false
15,927
def get_application_id(): return app_identity.get_application_id()
[ "def", "get_application_id", "(", ")", ":", "return", "app_identity", ".", "get_application_id", "(", ")" ]
get the application id of an app .
train
false
15,928
def archive_deleted_rows(max_rows=None): table_to_rows_archived = {} total_rows_archived = 0 meta = MetaData(get_engine(use_slave=True)) meta.reflect() for table in reversed(meta.sorted_tables): tablename = table.name if ((tablename == 'migrate_version') or tablename.startswith(_SHADOW_TABLE_PREFIX)): continue rows_archived = _archive_deleted_rows_for_table(tablename, max_rows=(max_rows - total_rows_archived)) total_rows_archived += rows_archived if rows_archived: table_to_rows_archived[tablename] = rows_archived if (total_rows_archived >= max_rows): break return table_to_rows_archived
[ "def", "archive_deleted_rows", "(", "max_rows", "=", "None", ")", ":", "table_to_rows_archived", "=", "{", "}", "total_rows_archived", "=", "0", "meta", "=", "MetaData", "(", "get_engine", "(", "use_slave", "=", "True", ")", ")", "meta", ".", "reflect", "(", ")", "for", "table", "in", "reversed", "(", "meta", ".", "sorted_tables", ")", ":", "tablename", "=", "table", ".", "name", "if", "(", "(", "tablename", "==", "'migrate_version'", ")", "or", "tablename", ".", "startswith", "(", "_SHADOW_TABLE_PREFIX", ")", ")", ":", "continue", "rows_archived", "=", "_archive_deleted_rows_for_table", "(", "tablename", ",", "max_rows", "=", "(", "max_rows", "-", "total_rows_archived", ")", ")", "total_rows_archived", "+=", "rows_archived", "if", "rows_archived", ":", "table_to_rows_archived", "[", "tablename", "]", "=", "rows_archived", "if", "(", "total_rows_archived", ">=", "max_rows", ")", ":", "break", "return", "table_to_rows_archived" ]
move up to max_rows rows from production tables to corresponding shadow tables .
train
false
15,929
def update_course_updates(location, update, passed_id=None, user=None): try: course_updates = modulestore().get_item(location) except ItemNotFoundError: course_updates = modulestore().create_item(user.id, location.course_key, location.block_type, location.block_id) course_update_items = list(reversed(get_course_update_items(course_updates))) if (passed_id is not None): passed_index = _get_index(passed_id) if (0 < passed_index <= len(course_update_items)): course_update_dict = course_update_items[(passed_index - 1)] course_update_dict['date'] = update['date'] course_update_dict['content'] = update['content'] course_update_items[(passed_index - 1)] = course_update_dict else: return HttpResponseBadRequest(_('Invalid course update id.')) else: course_update_dict = {'id': (len(course_update_items) + 1), 'date': update['date'], 'content': update['content'], 'status': CourseInfoModule.STATUS_VISIBLE} course_update_items.append(course_update_dict) enqueue_push_course_update(update, location.course_key) save_course_update_items(location, course_updates, course_update_items, user) if ('status' in course_update_dict): del course_update_dict['status'] return course_update_dict
[ "def", "update_course_updates", "(", "location", ",", "update", ",", "passed_id", "=", "None", ",", "user", "=", "None", ")", ":", "try", ":", "course_updates", "=", "modulestore", "(", ")", ".", "get_item", "(", "location", ")", "except", "ItemNotFoundError", ":", "course_updates", "=", "modulestore", "(", ")", ".", "create_item", "(", "user", ".", "id", ",", "location", ".", "course_key", ",", "location", ".", "block_type", ",", "location", ".", "block_id", ")", "course_update_items", "=", "list", "(", "reversed", "(", "get_course_update_items", "(", "course_updates", ")", ")", ")", "if", "(", "passed_id", "is", "not", "None", ")", ":", "passed_index", "=", "_get_index", "(", "passed_id", ")", "if", "(", "0", "<", "passed_index", "<=", "len", "(", "course_update_items", ")", ")", ":", "course_update_dict", "=", "course_update_items", "[", "(", "passed_index", "-", "1", ")", "]", "course_update_dict", "[", "'date'", "]", "=", "update", "[", "'date'", "]", "course_update_dict", "[", "'content'", "]", "=", "update", "[", "'content'", "]", "course_update_items", "[", "(", "passed_index", "-", "1", ")", "]", "=", "course_update_dict", "else", ":", "return", "HttpResponseBadRequest", "(", "_", "(", "'Invalid course update id.'", ")", ")", "else", ":", "course_update_dict", "=", "{", "'id'", ":", "(", "len", "(", "course_update_items", ")", "+", "1", ")", ",", "'date'", ":", "update", "[", "'date'", "]", ",", "'content'", ":", "update", "[", "'content'", "]", ",", "'status'", ":", "CourseInfoModule", ".", "STATUS_VISIBLE", "}", "course_update_items", ".", "append", "(", "course_update_dict", ")", "enqueue_push_course_update", "(", "update", ",", "location", ".", "course_key", ")", "save_course_update_items", "(", "location", ",", "course_updates", ",", "course_update_items", ",", "user", ")", "if", "(", "'status'", "in", "course_update_dict", ")", ":", "del", "course_update_dict", "[", "'status'", "]", "return", "course_update_dict" ]
either add or update the given course update .
train
false
15,930
def get_or_create_storage_file(node, path, **kwargs): try: return OsfStorageGuidFile.find_one((Q('node', 'eq', node) & Q('path', 'eq', path))) except modm_errors.ModularOdmException as error: obj = OsfStorageGuidFile(node=node, path=path, **kwargs) obj.save() return obj
[ "def", "get_or_create_storage_file", "(", "node", ",", "path", ",", "**", "kwargs", ")", ":", "try", ":", "return", "OsfStorageGuidFile", ".", "find_one", "(", "(", "Q", "(", "'node'", ",", "'eq'", ",", "node", ")", "&", "Q", "(", "'path'", ",", "'eq'", ",", "path", ")", ")", ")", "except", "modm_errors", ".", "ModularOdmException", "as", "error", ":", "obj", "=", "OsfStorageGuidFile", "(", "node", "=", "node", ",", "path", "=", "path", ",", "**", "kwargs", ")", "obj", ".", "save", "(", ")", "return", "obj" ]
get or create osfstorageguidfile record .
train
false
15,932
def parse_key_value_list(kv_string_list, error_fmt, error_func): log.warning('parse_key_value_list() is deprecated and will be removed in v0.6.0') ret = {} for value in kv_string_list: try: (k, v) = value.split('=', 1) ret[k] = v except ValueError: error_func((error_fmt % (value,))) return ret
[ "def", "parse_key_value_list", "(", "kv_string_list", ",", "error_fmt", ",", "error_func", ")", ":", "log", ".", "warning", "(", "'parse_key_value_list() is deprecated and will be removed in v0.6.0'", ")", "ret", "=", "{", "}", "for", "value", "in", "kv_string_list", ":", "try", ":", "(", "k", ",", "v", ")", "=", "value", ".", "split", "(", "'='", ",", "1", ")", "ret", "[", "k", "]", "=", "v", "except", "ValueError", ":", "error_func", "(", "(", "error_fmt", "%", "(", "value", ",", ")", ")", ")", "return", "ret" ]
parse a list of strings like key=value into a dictionary .
train
false
15,934
def create_test_blockdevice_volume_for_dataset_id(dataset_id, attached_to=None): return BlockDeviceVolume(blockdevice_id=_create_blockdevice_id_for_test(dataset_id), size=int(REALISTIC_BLOCKDEVICE_SIZE.to_Byte()), attached_to=attached_to, dataset_id=UUID(dataset_id))
[ "def", "create_test_blockdevice_volume_for_dataset_id", "(", "dataset_id", ",", "attached_to", "=", "None", ")", ":", "return", "BlockDeviceVolume", "(", "blockdevice_id", "=", "_create_blockdevice_id_for_test", "(", "dataset_id", ")", ",", "size", "=", "int", "(", "REALISTIC_BLOCKDEVICE_SIZE", ".", "to_Byte", "(", ")", ")", ",", "attached_to", "=", "attached_to", ",", "dataset_id", "=", "UUID", "(", "dataset_id", ")", ")" ]
create a fake blockdevicevolume for the given dataset_id .
train
false
15,935
def current_time(): return time.time()
[ "def", "current_time", "(", ")", ":", "return", "time", ".", "time", "(", ")" ]
return the time in milliseconds since the epoch as a floating point number .
train
false
15,936
@cronjobs.register def send_postatus_errors(): if settings.STAGE: return def new_section(line): return (line.startswith('dennis ') or line.startswith('Totals') or line.startswith('BUSTED') or line.startswith('COMPILED')) postatus = requests.get('https://support.mozilla.org/media/postatus.txt') lines = postatus.content.splitlines() datestamp = lines.pop(0) errordata = [] while lines: line = lines.pop(0) if line.startswith('>>> '): while (lines and (not new_section(line))): errordata.append(line) line = lines.pop(0) if errordata: mail_admins(subject=('[SUMO] postatus errors %s' % datestamp), message=((('These are the errors in the SUMO postatus file.\n' + 'See http://postatus.paas.allizom.org/p/SUMO for details\n') + 'and bug generation links.\n\n') + '\n'.join(errordata)))
[ "@", "cronjobs", ".", "register", "def", "send_postatus_errors", "(", ")", ":", "if", "settings", ".", "STAGE", ":", "return", "def", "new_section", "(", "line", ")", ":", "return", "(", "line", ".", "startswith", "(", "'dennis '", ")", "or", "line", ".", "startswith", "(", "'Totals'", ")", "or", "line", ".", "startswith", "(", "'BUSTED'", ")", "or", "line", ".", "startswith", "(", "'COMPILED'", ")", ")", "postatus", "=", "requests", ".", "get", "(", "'https://support.mozilla.org/media/postatus.txt'", ")", "lines", "=", "postatus", ".", "content", ".", "splitlines", "(", ")", "datestamp", "=", "lines", ".", "pop", "(", "0", ")", "errordata", "=", "[", "]", "while", "lines", ":", "line", "=", "lines", ".", "pop", "(", "0", ")", "if", "line", ".", "startswith", "(", "'>>> '", ")", ":", "while", "(", "lines", "and", "(", "not", "new_section", "(", "line", ")", ")", ")", ":", "errordata", ".", "append", "(", "line", ")", "line", "=", "lines", ".", "pop", "(", "0", ")", "if", "errordata", ":", "mail_admins", "(", "subject", "=", "(", "'[SUMO] postatus errors %s'", "%", "datestamp", ")", ",", "message", "=", "(", "(", "(", "'These are the errors in the SUMO postatus file.\\n'", "+", "'See http://postatus.paas.allizom.org/p/SUMO for details\\n'", ")", "+", "'and bug generation links.\\n\\n'", ")", "+", "'\\n'", ".", "join", "(", "errordata", ")", ")", ")" ]
looks at postatus file and sends an email with errors .
train
false
15,937
def getIsWiddershinsByVector3(polygon): return isWiddershins(getComplexPath(polygon))
[ "def", "getIsWiddershinsByVector3", "(", "polygon", ")", ":", "return", "isWiddershins", "(", "getComplexPath", "(", "polygon", ")", ")" ]
determine if the polygon goes round in the widdershins direction .
train
false
15,938
def search_year(string): match = re.search(u'[^0-9]([0-9]{4})[^0-9]', string) if match: year = int(match.group(1)) if valid_year(year): return (year, match.span(1)) return (None, None)
[ "def", "search_year", "(", "string", ")", ":", "match", "=", "re", ".", "search", "(", "u'[^0-9]([0-9]{4})[^0-9]'", ",", "string", ")", "if", "match", ":", "year", "=", "int", "(", "match", ".", "group", "(", "1", ")", ")", "if", "valid_year", "(", "year", ")", ":", "return", "(", "year", ",", "match", ".", "span", "(", "1", ")", ")", "return", "(", "None", ",", "None", ")" ]
looks for year patterns .
train
false
15,939
def test_generate_pipeline_code(): pipeline = ['KNeighborsClassifier', ['CombineDFs', ['GradientBoostingClassifier', 'input_matrix', 38.0, 0.87], ['GaussianNB', ['ZeroCount', 'input_matrix']]], 18, 33] expected_code = 'make_pipeline(\n make_union(\n make_union(VotingClassifier([(\'branch\',\n GradientBoostingClassifier(learning_rate=1.0, max_features=1.0, n_estimators=500)\n )]), FunctionTransformer(lambda X: X)),\n make_union(VotingClassifier([(\'branch\',\n make_pipeline(\n ZeroCount(),\n GaussianNB()\n )\n )]), FunctionTransformer(lambda X: X))\n ),\n KNeighborsClassifier(n_neighbors=5, weights="distance")\n)' assert (expected_code == generate_pipeline_code(pipeline))
[ "def", "test_generate_pipeline_code", "(", ")", ":", "pipeline", "=", "[", "'KNeighborsClassifier'", ",", "[", "'CombineDFs'", ",", "[", "'GradientBoostingClassifier'", ",", "'input_matrix'", ",", "38.0", ",", "0.87", "]", ",", "[", "'GaussianNB'", ",", "[", "'ZeroCount'", ",", "'input_matrix'", "]", "]", "]", ",", "18", ",", "33", "]", "expected_code", "=", "'make_pipeline(\\n make_union(\\n make_union(VotingClassifier([(\\'branch\\',\\n GradientBoostingClassifier(learning_rate=1.0, max_features=1.0, n_estimators=500)\\n )]), FunctionTransformer(lambda X: X)),\\n make_union(VotingClassifier([(\\'branch\\',\\n make_pipeline(\\n ZeroCount(),\\n GaussianNB()\\n )\\n )]), FunctionTransformer(lambda X: X))\\n ),\\n KNeighborsClassifier(n_neighbors=5, weights=\"distance\")\\n)'", "assert", "(", "expected_code", "==", "generate_pipeline_code", "(", "pipeline", ")", ")" ]
assert that generate_pipeline_code() returns the correct code given a specific pipeline .
train
false
15,943
def is_old_env(): env_exists = exists('~/env') if (not env_exists): return False with settings(warn_only=True): is_link = run('readlink ~/env') if (is_link.return_code == 0): return False return True
[ "def", "is_old_env", "(", ")", ":", "env_exists", "=", "exists", "(", "'~/env'", ")", "if", "(", "not", "env_exists", ")", ":", "return", "False", "with", "settings", "(", "warn_only", "=", "True", ")", ":", "is_link", "=", "run", "(", "'readlink ~/env'", ")", "if", "(", "is_link", ".", "return_code", "==", "0", ")", ":", "return", "False", "return", "True" ]
return true if ~/env is old-style or false if new style .
train
false
15,944
def dup_normal(f, K): return dup_strip([K.normal(c) for c in f])
[ "def", "dup_normal", "(", "f", ",", "K", ")", ":", "return", "dup_strip", "(", "[", "K", ".", "normal", "(", "c", ")", "for", "c", "in", "f", "]", ")" ]
normalize univariate polynomial in the given domain .
train
false
15,948
def remove_version_from_href(href): parsed_url = urllib.parse.urlsplit(href) url_parts = parsed_url.path.split('/', 2) expression = re.compile('^v([0-9]+|[0-9]+\\.[0-9]+)(/.*|$)') for x in range(len(url_parts)): if expression.match(url_parts[x]): del url_parts[x] break new_path = '/'.join(url_parts) if (new_path == parsed_url.path): msg = ('href %s does not contain version' % href) LOG.debug(msg) raise ValueError(msg) parsed_url = list(parsed_url) parsed_url[2] = new_path return urllib.parse.urlunsplit(parsed_url)
[ "def", "remove_version_from_href", "(", "href", ")", ":", "parsed_url", "=", "urllib", ".", "parse", ".", "urlsplit", "(", "href", ")", "url_parts", "=", "parsed_url", ".", "path", ".", "split", "(", "'/'", ",", "2", ")", "expression", "=", "re", ".", "compile", "(", "'^v([0-9]+|[0-9]+\\\\.[0-9]+)(/.*|$)'", ")", "for", "x", "in", "range", "(", "len", "(", "url_parts", ")", ")", ":", "if", "expression", ".", "match", "(", "url_parts", "[", "x", "]", ")", ":", "del", "url_parts", "[", "x", "]", "break", "new_path", "=", "'/'", ".", "join", "(", "url_parts", ")", "if", "(", "new_path", "==", "parsed_url", ".", "path", ")", ":", "msg", "=", "(", "'href %s does not contain version'", "%", "href", ")", "LOG", ".", "debug", "(", "msg", ")", "raise", "ValueError", "(", "msg", ")", "parsed_url", "=", "list", "(", "parsed_url", ")", "parsed_url", "[", "2", "]", "=", "new_path", "return", "urllib", ".", "parse", ".", "urlunsplit", "(", "parsed_url", ")" ]
removes the first api version from the href .
train
false
15,949
def redirect_view(request): if request.GET: from urllib import urlencode query = ('?' + urlencode(request.GET, True)) else: query = '' return HttpResponseRedirect(('/test_client/get_view/' + query))
[ "def", "redirect_view", "(", "request", ")", ":", "if", "request", ".", "GET", ":", "from", "urllib", "import", "urlencode", "query", "=", "(", "'?'", "+", "urlencode", "(", "request", ".", "GET", ",", "True", ")", ")", "else", ":", "query", "=", "''", "return", "HttpResponseRedirect", "(", "(", "'/test_client/get_view/'", "+", "query", ")", ")" ]
redirect all requests that come here to an api call with a view parameter .
train
false
15,950
def set_bits(data, value, offset, bits=1): mask = (((1 << bits) - 1) << offset) clear = (65535 ^ mask) data = ((data & clear) | ((value << offset) & mask)) return data
[ "def", "set_bits", "(", "data", ",", "value", ",", "offset", ",", "bits", "=", "1", ")", ":", "mask", "=", "(", "(", "(", "1", "<<", "bits", ")", "-", "1", ")", "<<", "offset", ")", "clear", "=", "(", "65535", "^", "mask", ")", "data", "=", "(", "(", "data", "&", "clear", ")", "|", "(", "(", "value", "<<", "offset", ")", "&", "mask", ")", ")", "return", "data" ]
set specified bits in integer .
train
false
15,951
def _to_fixed_ori(forward): if ((not forward['surf_ori']) or is_fixed_orient(forward)): raise ValueError('Only surface-oriented, free-orientation forward solutions can be converted to fixed orientaton') forward['sol']['data'] = forward['sol']['data'][:, 2::3] forward['sol']['ncol'] = (forward['sol']['ncol'] / 3) forward['source_ori'] = FIFF.FIFFV_MNE_FIXED_ORI logger.info(' Converted the forward solution into the fixed-orientation mode.') return forward
[ "def", "_to_fixed_ori", "(", "forward", ")", ":", "if", "(", "(", "not", "forward", "[", "'surf_ori'", "]", ")", "or", "is_fixed_orient", "(", "forward", ")", ")", ":", "raise", "ValueError", "(", "'Only surface-oriented, free-orientation forward solutions can be converted to fixed orientaton'", ")", "forward", "[", "'sol'", "]", "[", "'data'", "]", "=", "forward", "[", "'sol'", "]", "[", "'data'", "]", "[", ":", ",", "2", ":", ":", "3", "]", "forward", "[", "'sol'", "]", "[", "'ncol'", "]", "=", "(", "forward", "[", "'sol'", "]", "[", "'ncol'", "]", "/", "3", ")", "forward", "[", "'source_ori'", "]", "=", "FIFF", ".", "FIFFV_MNE_FIXED_ORI", "logger", ".", "info", "(", "' Converted the forward solution into the fixed-orientation mode.'", ")", "return", "forward" ]
convert the forward solution to fixed ori from free .
train
false
15,952
def osquery_registry(attrs=None, where=None): return _osquery_cmd(table='osquery_registry', attrs=attrs, where=where)
[ "def", "osquery_registry", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'osquery_registry'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return osquery_registry information from osquery cli example: .
train
false
15,953
def test_clear_keystring(qtbot, keyparser): keyparser._keystring = 'test' with qtbot.waitSignal(keyparser.keystring_updated): keyparser.clear_keystring() assert (keyparser._keystring == '')
[ "def", "test_clear_keystring", "(", "qtbot", ",", "keyparser", ")", ":", "keyparser", ".", "_keystring", "=", "'test'", "with", "qtbot", ".", "waitSignal", "(", "keyparser", ".", "keystring_updated", ")", ":", "keyparser", ".", "clear_keystring", "(", ")", "assert", "(", "keyparser", ".", "_keystring", "==", "''", ")" ]
test that the keystring is cleared and the signal is emitted .
train
false
15,955
def batch_transformer(U, thetas, out_size, name='BatchSpatialTransformer'): with tf.variable_scope(name): (num_batch, num_transforms) = map(int, thetas.get_shape().as_list()[:2]) indices = [([i] * num_transforms) for i in xrange(num_batch)] input_repeated = tf.gather(U, tf.reshape(indices, [(-1)])) return transformer(input_repeated, thetas, out_size)
[ "def", "batch_transformer", "(", "U", ",", "thetas", ",", "out_size", ",", "name", "=", "'BatchSpatialTransformer'", ")", ":", "with", "tf", ".", "variable_scope", "(", "name", ")", ":", "(", "num_batch", ",", "num_transforms", ")", "=", "map", "(", "int", ",", "thetas", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "[", ":", "2", "]", ")", "indices", "=", "[", "(", "[", "i", "]", "*", "num_transforms", ")", "for", "i", "in", "xrange", "(", "num_batch", ")", "]", "input_repeated", "=", "tf", ".", "gather", "(", "U", ",", "tf", ".", "reshape", "(", "indices", ",", "[", "(", "-", "1", ")", "]", ")", ")", "return", "transformer", "(", "input_repeated", ",", "thetas", ",", "out_size", ")" ]
batch spatial transformer layer parameters u : float tensor of inputs [num_batch .
train
true
15,956
def cfg_from_file(filename): import yaml with open(filename, 'r') as f: yaml_cfg = edict(yaml.load(f)) _merge_a_into_b(yaml_cfg, __C)
[ "def", "cfg_from_file", "(", "filename", ")", ":", "import", "yaml", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "yaml_cfg", "=", "edict", "(", "yaml", ".", "load", "(", "f", ")", ")", "_merge_a_into_b", "(", "yaml_cfg", ",", "__C", ")" ]
load a config file and merge it into the default options .
train
false
15,957
def _set_collection_attributes(cls, roles, methods): for (method_name, (before, argument, after)) in methods.items(): setattr(cls, method_name, _instrument_membership_mutator(getattr(cls, method_name), before, argument, after)) for (role, method_name) in roles.items(): setattr(cls, ('_sa_%s' % role), getattr(cls, method_name)) cls._sa_adapter = None if (not hasattr(cls, '_sa_converter')): cls._sa_converter = None cls._sa_instrumented = id(cls)
[ "def", "_set_collection_attributes", "(", "cls", ",", "roles", ",", "methods", ")", ":", "for", "(", "method_name", ",", "(", "before", ",", "argument", ",", "after", ")", ")", "in", "methods", ".", "items", "(", ")", ":", "setattr", "(", "cls", ",", "method_name", ",", "_instrument_membership_mutator", "(", "getattr", "(", "cls", ",", "method_name", ")", ",", "before", ",", "argument", ",", "after", ")", ")", "for", "(", "role", ",", "method_name", ")", "in", "roles", ".", "items", "(", ")", ":", "setattr", "(", "cls", ",", "(", "'_sa_%s'", "%", "role", ")", ",", "getattr", "(", "cls", ",", "method_name", ")", ")", "cls", ".", "_sa_adapter", "=", "None", "if", "(", "not", "hasattr", "(", "cls", ",", "'_sa_converter'", ")", ")", ":", "cls", ".", "_sa_converter", "=", "None", "cls", ".", "_sa_instrumented", "=", "id", "(", "cls", ")" ]
apply ad-hoc instrumentation from decorators .
train
false
15,958
def get_oauth_consumer_key(): req = user_service_pb.CheckOAuthSignatureRequest() resp = user_service_pb.CheckOAuthSignatureResponse() try: apiproxy_stub_map.MakeSyncCall('user', 'CheckOAuthSignature', req, resp) except apiproxy_errors.ApplicationError as e: if (e.application_error == user_service_pb.UserServiceError.OAUTH_INVALID_REQUEST): raise InvalidOAuthParametersError(e.error_detail) elif (e.application_error == user_service_pb.UserServiceError.OAUTH_ERROR): raise OAuthServiceFailureError(e.error_detail) else: raise OAuthServiceFailureError(e.error_detail) return resp.oauth_consumer_key()
[ "def", "get_oauth_consumer_key", "(", ")", ":", "req", "=", "user_service_pb", ".", "CheckOAuthSignatureRequest", "(", ")", "resp", "=", "user_service_pb", ".", "CheckOAuthSignatureResponse", "(", ")", "try", ":", "apiproxy_stub_map", ".", "MakeSyncCall", "(", "'user'", ",", "'CheckOAuthSignature'", ",", "req", ",", "resp", ")", "except", "apiproxy_errors", ".", "ApplicationError", "as", "e", ":", "if", "(", "e", ".", "application_error", "==", "user_service_pb", ".", "UserServiceError", ".", "OAUTH_INVALID_REQUEST", ")", ":", "raise", "InvalidOAuthParametersError", "(", "e", ".", "error_detail", ")", "elif", "(", "e", ".", "application_error", "==", "user_service_pb", ".", "UserServiceError", ".", "OAUTH_ERROR", ")", ":", "raise", "OAuthServiceFailureError", "(", "e", ".", "error_detail", ")", "else", ":", "raise", "OAuthServiceFailureError", "(", "e", ".", "error_detail", ")", "return", "resp", ".", "oauth_consumer_key", "(", ")" ]
returns the value of the oauth_consumer_key parameter from the request .
train
false
15,959
def get_templates(): injected = {} for (name, data) in templates.items(): injected[name] = dict([(k, (v % env)) for (k, v) in data.items()]) return injected
[ "def", "get_templates", "(", ")", ":", "injected", "=", "{", "}", "for", "(", "name", ",", "data", ")", "in", "templates", ".", "items", "(", ")", ":", "injected", "[", "name", "]", "=", "dict", "(", "[", "(", "k", ",", "(", "v", "%", "env", ")", ")", "for", "(", "k", ",", "v", ")", "in", "data", ".", "items", "(", ")", "]", ")", "return", "injected" ]
returns each of the templates with env vars injected .
train
true
15,960
def nat_gateway_present(name, subnet_name=None, subnet_id=None, region=None, key=None, keyid=None, profile=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} r = __salt__['boto_vpc.describe_nat_gateways'](subnet_name=subnet_name, subnet_id=subnet_id, region=region, key=key, keyid=keyid, profile=profile) if (not r): if __opts__['test']: msg = 'Nat gateway is set to be created.' ret['comment'] = msg ret['result'] = None return ret r = __salt__['boto_vpc.create_nat_gateway'](subnet_name=subnet_name, subnet_id=subnet_id, region=region, key=key, keyid=keyid, profile=profile) if (not r.get('created')): ret['result'] = False ret['comment'] = 'Failed to create nat gateway: {0}.'.format(r['error']['message']) return ret ret['changes']['old'] = {'nat_gateway': None} ret['changes']['new'] = {'nat_gateway': r['id']} ret['comment'] = 'Nat gateway created.' return ret inst = r[0] _id = inst.get('NatGatewayId') ret['comment'] = 'Nat gateway {0} present.'.format(_id) return ret
[ "def", "nat_gateway_present", "(", "name", ",", "subnet_name", "=", "None", ",", "subnet_id", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "r", "=", "__salt__", "[", "'boto_vpc.describe_nat_gateways'", "]", "(", "subnet_name", "=", "subnet_name", ",", "subnet_id", "=", "subnet_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "r", ")", ":", "if", "__opts__", "[", "'test'", "]", ":", "msg", "=", "'Nat gateway is set to be created.'", "ret", "[", "'comment'", "]", "=", "msg", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "r", "=", "__salt__", "[", "'boto_vpc.create_nat_gateway'", "]", "(", "subnet_name", "=", "subnet_name", ",", "subnet_id", "=", "subnet_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "r", ".", "get", "(", "'created'", ")", ")", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to create nat gateway: {0}.'", ".", "format", "(", "r", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "ret", "ret", "[", "'changes'", "]", "[", "'old'", "]", "=", "{", "'nat_gateway'", ":", "None", "}", "ret", "[", "'changes'", "]", "[", "'new'", "]", "=", "{", "'nat_gateway'", ":", "r", "[", "'id'", "]", "}", "ret", "[", "'comment'", "]", "=", "'Nat gateway created.'", "return", "ret", "inst", "=", "r", "[", "0", "]", "_id", "=", "inst", ".", "get", "(", "'NatGatewayId'", ")", "ret", "[", "'comment'", "]", "=", "'Nat gateway {0} present.'", ".", "format", "(", "_id", ")", "return", "ret" ]
ensure a nat gateway exists within the specified subnet this function requires boto3 .
train
true
15,961
def loadSupportedExtensions(): return ['.stl', '.obj', '.dae', '.amf']
[ "def", "loadSupportedExtensions", "(", ")", ":", "return", "[", "'.stl'", ",", "'.obj'", ",", "'.dae'", ",", "'.amf'", "]" ]
return a list of supported file extensions for loading .
train
false
15,962
def read_uic1tag(fh, byteorder, dtype, count, plane_count=None): assert ((dtype in ('2I', '1I')) and (byteorder == '<')) result = {} if (dtype == '2I'): values = fh.read_array('<u4', (2 * count)).reshape(count, 2) result = {'z_distance': (values[:, 0] / values[:, 1])} elif plane_count: for _ in range(count): tagid = struct.unpack('<I', fh.read(4))[0] if (tagid in (28, 29, 37, 40, 41)): fh.read(4) continue (name, value) = read_uic_tag(fh, tagid, plane_count, offset=True) result[name] = value return result
[ "def", "read_uic1tag", "(", "fh", ",", "byteorder", ",", "dtype", ",", "count", ",", "plane_count", "=", "None", ")", ":", "assert", "(", "(", "dtype", "in", "(", "'2I'", ",", "'1I'", ")", ")", "and", "(", "byteorder", "==", "'<'", ")", ")", "result", "=", "{", "}", "if", "(", "dtype", "==", "'2I'", ")", ":", "values", "=", "fh", ".", "read_array", "(", "'<u4'", ",", "(", "2", "*", "count", ")", ")", ".", "reshape", "(", "count", ",", "2", ")", "result", "=", "{", "'z_distance'", ":", "(", "values", "[", ":", ",", "0", "]", "/", "values", "[", ":", ",", "1", "]", ")", "}", "elif", "plane_count", ":", "for", "_", "in", "range", "(", "count", ")", ":", "tagid", "=", "struct", ".", "unpack", "(", "'<I'", ",", "fh", ".", "read", "(", "4", ")", ")", "[", "0", "]", "if", "(", "tagid", "in", "(", "28", ",", "29", ",", "37", ",", "40", ",", "41", ")", ")", ":", "fh", ".", "read", "(", "4", ")", "continue", "(", "name", ",", "value", ")", "=", "read_uic_tag", "(", "fh", ",", "tagid", ",", "plane_count", ",", "offset", "=", "True", ")", "result", "[", "name", "]", "=", "value", "return", "result" ]
read metamorph stk uic1tag from file and return as dictionary .
train
true
15,964
def set_main(key, value, path=MAIN_CF): (pairs, conf_list) = _parse_main(path) new_conf = [] if (key in pairs): for line in conf_list: if line.startswith(key): new_conf.append('{0} = {1}'.format(key, value)) else: new_conf.append(line) else: conf_list.append('{0} = {1}'.format(key, value)) new_conf = conf_list _write_conf(new_conf, path) return new_conf
[ "def", "set_main", "(", "key", ",", "value", ",", "path", "=", "MAIN_CF", ")", ":", "(", "pairs", ",", "conf_list", ")", "=", "_parse_main", "(", "path", ")", "new_conf", "=", "[", "]", "if", "(", "key", "in", "pairs", ")", ":", "for", "line", "in", "conf_list", ":", "if", "line", ".", "startswith", "(", "key", ")", ":", "new_conf", ".", "append", "(", "'{0} = {1}'", ".", "format", "(", "key", ",", "value", ")", ")", "else", ":", "new_conf", ".", "append", "(", "line", ")", "else", ":", "conf_list", ".", "append", "(", "'{0} = {1}'", ".", "format", "(", "key", ",", "value", ")", ")", "new_conf", "=", "conf_list", "_write_conf", "(", "new_conf", ",", "path", ")", "return", "new_conf" ]
set a single config value in the main .
train
false
15,965
def rgw_pools_missing(**kwargs): return ceph_cfg.rgw_pools_missing(**kwargs)
[ "def", "rgw_pools_missing", "(", "**", "kwargs", ")", ":", "return", "ceph_cfg", ".", "rgw_pools_missing", "(", "**", "kwargs", ")" ]
show pools missing for rgw cli example: .
train
false
15,966
def virtual_interface_get_by_address(context, address): return IMPL.virtual_interface_get_by_address(context, address)
[ "def", "virtual_interface_get_by_address", "(", "context", ",", "address", ")", ":", "return", "IMPL", ".", "virtual_interface_get_by_address", "(", "context", ",", "address", ")" ]
gets a virtual interface from the table .
train
false
15,967
def mat_slice_of_slice(parent, rowslice, colslice): row = slice_of_slice(parent.rowslice, rowslice) col = slice_of_slice(parent.colslice, colslice) return MatrixSlice(parent.parent, row, col)
[ "def", "mat_slice_of_slice", "(", "parent", ",", "rowslice", ",", "colslice", ")", ":", "row", "=", "slice_of_slice", "(", "parent", ".", "rowslice", ",", "rowslice", ")", "col", "=", "slice_of_slice", "(", "parent", ".", "colslice", ",", "colslice", ")", "return", "MatrixSlice", "(", "parent", ".", "parent", ",", "row", ",", "col", ")" ]
collapse nested matrix slices .
train
false
15,968
def _format_layoutlist(layout, indent=0, indent_size=2): script = [] for layout_elem in layout: (elem, opts) = layout_elem opts = (opts or {}) fopts = ' '.join(map(str, _format_optdict(opts, True, 'children'))) head = ('%s%s%s' % ((' ' * indent), elem, ((' %s' % fopts) if fopts else ''))) if ('children' in opts): script.append((head + ' -children {')) indent += indent_size (newscript, indent) = _format_layoutlist(opts['children'], indent, indent_size) script.append(newscript) indent -= indent_size script.append(('%s}' % (' ' * indent))) else: script.append(head) return ('\n'.join(script), indent)
[ "def", "_format_layoutlist", "(", "layout", ",", "indent", "=", "0", ",", "indent_size", "=", "2", ")", ":", "script", "=", "[", "]", "for", "layout_elem", "in", "layout", ":", "(", "elem", ",", "opts", ")", "=", "layout_elem", "opts", "=", "(", "opts", "or", "{", "}", ")", "fopts", "=", "' '", ".", "join", "(", "map", "(", "str", ",", "_format_optdict", "(", "opts", ",", "True", ",", "'children'", ")", ")", ")", "head", "=", "(", "'%s%s%s'", "%", "(", "(", "' '", "*", "indent", ")", ",", "elem", ",", "(", "(", "' %s'", "%", "fopts", ")", "if", "fopts", "else", "''", ")", ")", ")", "if", "(", "'children'", "in", "opts", ")", ":", "script", ".", "append", "(", "(", "head", "+", "' -children {'", ")", ")", "indent", "+=", "indent_size", "(", "newscript", ",", "indent", ")", "=", "_format_layoutlist", "(", "opts", "[", "'children'", "]", ",", "indent", ",", "indent_size", ")", "script", ".", "append", "(", "newscript", ")", "indent", "-=", "indent_size", "script", ".", "append", "(", "(", "'%s}'", "%", "(", "' '", "*", "indent", ")", ")", ")", "else", ":", "script", ".", "append", "(", "head", ")", "return", "(", "'\\n'", ".", "join", "(", "script", ")", ",", "indent", ")" ]
formats a layout list so we can pass the result to ttk::style layout and ttk::style settings .
train
false
15,969
@login_required @require_http_methods(['GET', 'POST']) def remove_from_locale(request, locale_code, user_id, role): locale = get_object_or_404(Locale, locale=locale_code) user = get_object_or_404(User, id=user_id) if (not _user_can_edit(request.user, locale)): raise PermissionDenied if (request.method == 'POST'): getattr(locale, ROLE_ATTRS[role]).remove(user) msg = _('{user} removed from successfully!').format(user=user.username) messages.add_message(request, messages.SUCCESS, msg) return HttpResponseRedirect(locale.get_absolute_url()) return render(request, 'wiki/confirm_remove_from_locale.html', {'locale': locale, 'leader': user, 'role': role})
[ "@", "login_required", "@", "require_http_methods", "(", "[", "'GET'", ",", "'POST'", "]", ")", "def", "remove_from_locale", "(", "request", ",", "locale_code", ",", "user_id", ",", "role", ")", ":", "locale", "=", "get_object_or_404", "(", "Locale", ",", "locale", "=", "locale_code", ")", "user", "=", "get_object_or_404", "(", "User", ",", "id", "=", "user_id", ")", "if", "(", "not", "_user_can_edit", "(", "request", ".", "user", ",", "locale", ")", ")", ":", "raise", "PermissionDenied", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "getattr", "(", "locale", ",", "ROLE_ATTRS", "[", "role", "]", ")", ".", "remove", "(", "user", ")", "msg", "=", "_", "(", "'{user} removed from successfully!'", ")", ".", "format", "(", "user", "=", "user", ".", "username", ")", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "SUCCESS", ",", "msg", ")", "return", "HttpResponseRedirect", "(", "locale", ".", "get_absolute_url", "(", ")", ")", "return", "render", "(", "request", ",", "'wiki/confirm_remove_from_locale.html'", ",", "{", "'locale'", ":", "locale", ",", "'leader'", ":", "user", ",", "'role'", ":", "role", "}", ")" ]
remove a user from the locale role .
train
false
15,971
def replace_win_devices(name): if name: lname = name.lower() for dev in _DEVICES: if ((lname == dev) or lname.startswith((dev + '.'))): name = ('_' + name) break return name
[ "def", "replace_win_devices", "(", "name", ")", ":", "if", "name", ":", "lname", "=", "name", ".", "lower", "(", ")", "for", "dev", "in", "_DEVICES", ":", "if", "(", "(", "lname", "==", "dev", ")", "or", "lname", ".", "startswith", "(", "(", "dev", "+", "'.'", ")", ")", ")", ":", "name", "=", "(", "'_'", "+", "name", ")", "break", "return", "name" ]
remove reserved windows device names from a name .
train
false
15,972
def test_SAMPHubServer_run_repeated(): hub = SAMPHubServer(web_profile=True, mode='multiple', pool_size=1) hub.start() time.sleep(1) hub.stop() time.sleep(1) hub.start() time.sleep(1) hub.stop()
[ "def", "test_SAMPHubServer_run_repeated", "(", ")", ":", "hub", "=", "SAMPHubServer", "(", "web_profile", "=", "True", ",", "mode", "=", "'multiple'", ",", "pool_size", "=", "1", ")", "hub", ".", "start", "(", ")", "time", ".", "sleep", "(", "1", ")", "hub", ".", "stop", "(", ")", "time", ".", "sleep", "(", "1", ")", "hub", ".", "start", "(", ")", "time", ".", "sleep", "(", "1", ")", "hub", ".", "stop", "(", ")" ]
test that samphub can be restarted after it has been stopped .
train
false
15,973
def list_command(args): print list_zones(args.project_id)
[ "def", "list_command", "(", "args", ")", ":", "print", "list_zones", "(", "args", ".", "project_id", ")" ]
lists all tasks by creation time .
train
false
15,974
def issubclass_safe(x, klass): try: return issubclass(x, klass) except TypeError: return False
[ "def", "issubclass_safe", "(", "x", ",", "klass", ")", ":", "try", ":", "return", "issubclass", "(", "x", ",", "klass", ")", "except", "TypeError", ":", "return", "False" ]
return issubclass and return false on a typeerror .
train
false
15,975
def make_test_case(base_case): class FooTests(base_case, ): def test_something(self): pass return FooTests('test_something')
[ "def", "make_test_case", "(", "base_case", ")", ":", "class", "FooTests", "(", "base_case", ",", ")", ":", "def", "test_something", "(", "self", ")", ":", "pass", "return", "FooTests", "(", "'test_something'", ")" ]
make a single test that subclasses base_case and passes .
train
false
15,976
def alias_set_collections(alias_name, collections=None, **kwargs): if (not isinstance(collections, list)): raise SaltInvocationError('Collection parameter must be defined and contain a list of collection name') for collection in collections: if (not isinstance(collection, six.string_types)): raise ValueError('Collection name must be a string') return _query('admin/collections?action=CREATEALIAS&name={alias}&wt=json&collections={collections}'.format(alias=alias_name, collections=', '.join(collections)), **kwargs)
[ "def", "alias_set_collections", "(", "alias_name", ",", "collections", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "not", "isinstance", "(", "collections", ",", "list", ")", ")", ":", "raise", "SaltInvocationError", "(", "'Collection parameter must be defined and contain a list of collection name'", ")", "for", "collection", "in", "collections", ":", "if", "(", "not", "isinstance", "(", "collection", ",", "six", ".", "string_types", ")", ")", ":", "raise", "ValueError", "(", "'Collection name must be a string'", ")", "return", "_query", "(", "'admin/collections?action=CREATEALIAS&name={alias}&wt=json&collections={collections}'", ".", "format", "(", "alias", "=", "alias_name", ",", "collections", "=", "', '", ".", "join", "(", "collections", ")", ")", ",", "**", "kwargs", ")" ]
define an alias additional parameters may be passed .
train
true
15,978
def transform_array_to_list(array): if ((array.dtype.kind in ('u', 'i', 'f')) and (~ np.isfinite(array)).any()): transformed = array.astype('object') transformed[np.isnan(array)] = 'NaN' transformed[np.isposinf(array)] = 'Infinity' transformed[np.isneginf(array)] = '-Infinity' return transformed.tolist() return array.tolist()
[ "def", "transform_array_to_list", "(", "array", ")", ":", "if", "(", "(", "array", ".", "dtype", ".", "kind", "in", "(", "'u'", ",", "'i'", ",", "'f'", ")", ")", "and", "(", "~", "np", ".", "isfinite", "(", "array", ")", ")", ".", "any", "(", ")", ")", ":", "transformed", "=", "array", ".", "astype", "(", "'object'", ")", "transformed", "[", "np", ".", "isnan", "(", "array", ")", "]", "=", "'NaN'", "transformed", "[", "np", ".", "isposinf", "(", "array", ")", "]", "=", "'Infinity'", "transformed", "[", "np", ".", "isneginf", "(", "array", ")", "]", "=", "'-Infinity'", "return", "transformed", ".", "tolist", "(", ")", "return", "array", ".", "tolist", "(", ")" ]
transforms a numpy array into a list of values args: array : the numpy array series to transform returns: list or dict .
train
false
15,979
def _pwdGetByName(username): if (pwd is None): return None return pwd.getpwnam(username)
[ "def", "_pwdGetByName", "(", "username", ")", ":", "if", "(", "pwd", "is", "None", ")", ":", "return", "None", "return", "pwd", ".", "getpwnam", "(", "username", ")" ]
look up a user in the /etc/passwd database using the pwd module .
train
false
15,980
@handle_response_format @treeio_login_required def project_delete(request, project_id, response_format='html'): project = get_object_or_404(Project, pk=project_id) if (not request.user.profile.has_permission(project, mode='w')): return user_denied(request, message="You don't have access to this Project") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): project.trash = True project.save() else: project.delete() return HttpResponseRedirect(reverse('projects_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('projects_project_view', args=[project.id])) context = _get_default_context(request) context.update({'project': project}) return render_to_response('projects/project_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "project_delete", "(", "request", ",", "project_id", ",", "response_format", "=", "'html'", ")", ":", "project", "=", "get_object_or_404", "(", "Project", ",", "pk", "=", "project_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "project", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Project\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "project", ".", "trash", "=", "True", "project", ".", "save", "(", ")", "else", ":", "project", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_index'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'projects_project_view'", ",", "args", "=", "[", "project", ".", "id", "]", ")", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'project'", ":", "project", "}", ")", "return", "render_to_response", "(", "'projects/project_delete'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
project delete confirmation view make a project as deleted on post .
train
false
15,981
def supported_uri_schemes(uri_schemes): supported_schemes = set() registry = Gst.Registry.get() for factory in registry.get_feature_list(Gst.ElementFactory): for uri in factory.get_uri_protocols(): if (uri in uri_schemes): supported_schemes.add(uri) return supported_schemes
[ "def", "supported_uri_schemes", "(", "uri_schemes", ")", ":", "supported_schemes", "=", "set", "(", ")", "registry", "=", "Gst", ".", "Registry", ".", "get", "(", ")", "for", "factory", "in", "registry", ".", "get_feature_list", "(", "Gst", ".", "ElementFactory", ")", ":", "for", "uri", "in", "factory", ".", "get_uri_protocols", "(", ")", ":", "if", "(", "uri", "in", "uri_schemes", ")", ":", "supported_schemes", ".", "add", "(", "uri", ")", "return", "supported_schemes" ]
determine which uris we can actually support from provided whitelist .
train
false
15,983
def get_deployment_engine_class(provider): engine_path = PROVIDER_MAPPING.get(provider, None) logger.debug('Loading Deployment Engine for %s: %s', provider, engine_path) if (engine_path is None): return None (module, engine) = engine_path.rsplit('.', 1) try: _mod = __import__(module, fromlist=[engine]) except ImportError as e: logger.error('Unable to import requested engine (%s) for provider %s', engine, provider) logger.error('A required library was missing: %s', e.message) logger.error('Please install the library and try again') else: return getattr(_mod, engine)
[ "def", "get_deployment_engine_class", "(", "provider", ")", ":", "engine_path", "=", "PROVIDER_MAPPING", ".", "get", "(", "provider", ",", "None", ")", "logger", ".", "debug", "(", "'Loading Deployment Engine for %s: %s'", ",", "provider", ",", "engine_path", ")", "if", "(", "engine_path", "is", "None", ")", ":", "return", "None", "(", "module", ",", "engine", ")", "=", "engine_path", ".", "rsplit", "(", "'.'", ",", "1", ")", "try", ":", "_mod", "=", "__import__", "(", "module", ",", "fromlist", "=", "[", "engine", "]", ")", "except", "ImportError", "as", "e", ":", "logger", ".", "error", "(", "'Unable to import requested engine (%s) for provider %s'", ",", "engine", ",", "provider", ")", "logger", ".", "error", "(", "'A required library was missing: %s'", ",", "e", ".", "message", ")", "logger", ".", "error", "(", "'Please install the library and try again'", ")", "else", ":", "return", "getattr", "(", "_mod", ",", "engine", ")" ]
import an engine by name :provider: the provider you want to deploy to :type provider: str :returns: the deployment engine :rtype: cactus .
train
false
15,984
def logging_level(runlevel): if runlevel: return LOG_VALUES[runlevel] else: return (logging.FATAL + 5)
[ "def", "logging_level", "(", "runlevel", ")", ":", "if", "runlevel", ":", "return", "LOG_VALUES", "[", "runlevel", "]", "else", ":", "return", "(", "logging", ".", "FATAL", "+", "5", ")" ]
translates a runlevel into the value expected by the logging module .
train
false
15,988
def get_rules(): cmd = '{0} -S -n'.format(__detect_os()) ret = __salt__['cmd.run'](cmd, python_shell=False) return ret
[ "def", "get_rules", "(", ")", ":", "cmd", "=", "'{0} -S -n'", ".", "format", "(", "__detect_os", "(", ")", ")", "ret", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "return", "ret" ]
return a data structure of the current .
train
true
15,990
@environmentfilter def do_first(environment, seq): try: return next(iter(seq)) except StopIteration: return environment.undefined('No first item, sequence was empty.')
[ "@", "environmentfilter", "def", "do_first", "(", "environment", ",", "seq", ")", ":", "try", ":", "return", "next", "(", "iter", "(", "seq", ")", ")", "except", "StopIteration", ":", "return", "environment", ".", "undefined", "(", "'No first item, sequence was empty.'", ")" ]
return the first item of a sequence .
train
false
15,992
def _ar_invtransparams(params): tmp = params.copy() for j in range((len(params) - 1), 0, (-1)): a = params[j] for kiter in range(j): tmp[kiter] = ((params[kiter] + (a * params[((j - kiter) - 1)])) / (1 - (a ** 2))) params[:j] = tmp[:j] invarcoefs = (- np.log(((1 - params) / (1 + params)))) return invarcoefs
[ "def", "_ar_invtransparams", "(", "params", ")", ":", "tmp", "=", "params", ".", "copy", "(", ")", "for", "j", "in", "range", "(", "(", "len", "(", "params", ")", "-", "1", ")", ",", "0", ",", "(", "-", "1", ")", ")", ":", "a", "=", "params", "[", "j", "]", "for", "kiter", "in", "range", "(", "j", ")", ":", "tmp", "[", "kiter", "]", "=", "(", "(", "params", "[", "kiter", "]", "+", "(", "a", "*", "params", "[", "(", "(", "j", "-", "kiter", ")", "-", "1", ")", "]", ")", ")", "/", "(", "1", "-", "(", "a", "**", "2", ")", ")", ")", "params", "[", ":", "j", "]", "=", "tmp", "[", ":", "j", "]", "invarcoefs", "=", "(", "-", "np", ".", "log", "(", "(", "(", "1", "-", "params", ")", "/", "(", "1", "+", "params", ")", ")", ")", ")", "return", "invarcoefs" ]
inverse of the jones reparameterization parameters params : array the transformed ar coefficients .
train
false
15,993
def require_remote_ref_path(func): def wrapper(self, *args): if (not self.is_remote()): raise ValueError(('ref path does not point to a remote reference: %s' % self.path)) return func(self, *args) wrapper.__name__ = func.__name__ return wrapper
[ "def", "require_remote_ref_path", "(", "func", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ")", ":", "if", "(", "not", "self", ".", "is_remote", "(", ")", ")", ":", "raise", "ValueError", "(", "(", "'ref path does not point to a remote reference: %s'", "%", "self", ".", "path", ")", ")", "return", "func", "(", "self", ",", "*", "args", ")", "wrapper", ".", "__name__", "=", "func", ".", "__name__", "return", "wrapper" ]
a decorator raising a typeerror if we are not a valid remote .
train
true
15,994
def norm_flat(a, p=2): if (p == u'Infinity'): return np.max(np.abs(a)) else: return (np.sum((np.abs(a) ** p)) ** (1 / p))
[ "def", "norm_flat", "(", "a", ",", "p", "=", "2", ")", ":", "if", "(", "p", "==", "u'Infinity'", ")", ":", "return", "np", ".", "max", "(", "np", ".", "abs", "(", "a", ")", ")", "else", ":", "return", "(", "np", ".", "sum", "(", "(", "np", ".", "abs", "(", "a", ")", "**", "p", ")", ")", "**", "(", "1", "/", "p", ")", ")" ]
norm -> l-p norm of a .
train
false
15,996
def is_ignored(hass, name): for prefix in hass.data['litejet_config'].get(CONF_EXCLUDE_NAMES, []): if name.startswith(prefix): return True return False
[ "def", "is_ignored", "(", "hass", ",", "name", ")", ":", "for", "prefix", "in", "hass", ".", "data", "[", "'litejet_config'", "]", ".", "get", "(", "CONF_EXCLUDE_NAMES", ",", "[", "]", ")", ":", "if", "name", ".", "startswith", "(", "prefix", ")", ":", "return", "True", "return", "False" ]
helper function to check if the given path should be ignored or not .
train
false
15,997
@hook.command(permissions=['op_quiet', 'op']) def quiet(text, conn, chan, notice): mode_cmd('+q', 'quiet', text, chan, conn, notice)
[ "@", "hook", ".", "command", "(", "permissions", "=", "[", "'op_quiet'", ",", "'op'", "]", ")", "def", "quiet", "(", "text", ",", "conn", ",", "chan", ",", "notice", ")", ":", "mode_cmd", "(", "'+q'", ",", "'quiet'", ",", "text", ",", "chan", ",", "conn", ",", "notice", ")" ]
this gives admins the ability to quiet a user .
train
false
15,999
def test_class_injection_does_not_break_collection(testdir): testdir.makeconftest('\n from test_inject import TestClass\n def pytest_generate_tests(metafunc):\n TestClass.changed_var = {}\n ') testdir.makepyfile(test_inject='\n class TestClass(object):\n def test_injection(self):\n """Test being parametrized."""\n pass\n ') result = testdir.runpytest() assert ('RuntimeError: dictionary changed size during iteration' not in result.stdout.str()) result.stdout.fnmatch_lines(['*1 passed*'])
[ "def", "test_class_injection_does_not_break_collection", "(", "testdir", ")", ":", "testdir", ".", "makeconftest", "(", "'\\n from test_inject import TestClass\\n def pytest_generate_tests(metafunc):\\n TestClass.changed_var = {}\\n '", ")", "testdir", ".", "makepyfile", "(", "test_inject", "=", "'\\n class TestClass(object):\\n def test_injection(self):\\n \"\"\"Test being parametrized.\"\"\"\\n pass\\n '", ")", "result", "=", "testdir", ".", "runpytest", "(", ")", "assert", "(", "'RuntimeError: dictionary changed size during iteration'", "not", "in", "result", ".", "stdout", ".", "str", "(", ")", ")", "result", ".", "stdout", ".", "fnmatch_lines", "(", "[", "'*1 passed*'", "]", ")" ]
tests whether injection during collection time will terminate testing .
train
false
16,003
def p_inclusive_or_expression_1(t): pass
[ "def", "p_inclusive_or_expression_1", "(", "t", ")", ":", "pass" ]
inclusive_or_expression : exclusive_or_expression .
train
false
16,004
def deduplicate(seq): seen = set() return [x for x in seq if ((x not in seen) and (not seen.add(x)))]
[ "def", "deduplicate", "(", "seq", ")", ":", "seen", "=", "set", "(", ")", "return", "[", "x", "for", "x", "in", "seq", "if", "(", "(", "x", "not", "in", "seen", ")", "and", "(", "not", "seen", ".", "add", "(", "x", ")", ")", ")", "]" ]
remove duplicates from sequence wile preserving order .
train
false
16,006
def pprint_nodes(subtrees): def indent(s, type=1): x = s.split('\n') r = ('+-%s\n' % x[0]) for a in x[1:]: if (a == ''): continue if (type == 1): r += ('| %s\n' % a) else: r += (' %s\n' % a) return r if (len(subtrees) == 0): return '' f = '' for a in subtrees[:(-1)]: f += indent(a) f += indent(subtrees[(-1)], 2) return f
[ "def", "pprint_nodes", "(", "subtrees", ")", ":", "def", "indent", "(", "s", ",", "type", "=", "1", ")", ":", "x", "=", "s", ".", "split", "(", "'\\n'", ")", "r", "=", "(", "'+-%s\\n'", "%", "x", "[", "0", "]", ")", "for", "a", "in", "x", "[", "1", ":", "]", ":", "if", "(", "a", "==", "''", ")", ":", "continue", "if", "(", "type", "==", "1", ")", ":", "r", "+=", "(", "'| %s\\n'", "%", "a", ")", "else", ":", "r", "+=", "(", "' %s\\n'", "%", "a", ")", "return", "r", "if", "(", "len", "(", "subtrees", ")", "==", "0", ")", ":", "return", "''", "f", "=", "''", "for", "a", "in", "subtrees", "[", ":", "(", "-", "1", ")", "]", ":", "f", "+=", "indent", "(", "a", ")", "f", "+=", "indent", "(", "subtrees", "[", "(", "-", "1", ")", "]", ",", "2", ")", "return", "f" ]
prettyprints systems of nodes .
train
false
16,008
def _calculate_overquota(project_quotas, user_quotas, deltas, project_usages, user_usages): overs = [] for (res, delta) in deltas.items(): if (delta >= 0): if (0 <= project_quotas[res] < (delta + project_usages[res]['total'])): LOG.debug('Request is over project quota for resource "%(res)s". Project limit: %(limit)s, delta: %(delta)s, current total project usage: %(total)s', {'res': res, 'limit': project_quotas[res], 'delta': delta, 'total': project_usages[res]['total']}) overs.append(res) elif (0 <= user_quotas[res] < (delta + user_usages[res]['total'])): LOG.debug('Request is over user quota for resource "%(res)s". User limit: %(limit)s, delta: %(delta)s, current total user usage: %(total)s', {'res': res, 'limit': user_quotas[res], 'delta': delta, 'total': user_usages[res]['total']}) overs.append(res) return overs
[ "def", "_calculate_overquota", "(", "project_quotas", ",", "user_quotas", ",", "deltas", ",", "project_usages", ",", "user_usages", ")", ":", "overs", "=", "[", "]", "for", "(", "res", ",", "delta", ")", "in", "deltas", ".", "items", "(", ")", ":", "if", "(", "delta", ">=", "0", ")", ":", "if", "(", "0", "<=", "project_quotas", "[", "res", "]", "<", "(", "delta", "+", "project_usages", "[", "res", "]", "[", "'total'", "]", ")", ")", ":", "LOG", ".", "debug", "(", "'Request is over project quota for resource \"%(res)s\". Project limit: %(limit)s, delta: %(delta)s, current total project usage: %(total)s'", ",", "{", "'res'", ":", "res", ",", "'limit'", ":", "project_quotas", "[", "res", "]", ",", "'delta'", ":", "delta", ",", "'total'", ":", "project_usages", "[", "res", "]", "[", "'total'", "]", "}", ")", "overs", ".", "append", "(", "res", ")", "elif", "(", "0", "<=", "user_quotas", "[", "res", "]", "<", "(", "delta", "+", "user_usages", "[", "res", "]", "[", "'total'", "]", ")", ")", ":", "LOG", ".", "debug", "(", "'Request is over user quota for resource \"%(res)s\". User limit: %(limit)s, delta: %(delta)s, current total user usage: %(total)s'", ",", "{", "'res'", ":", "res", ",", "'limit'", ":", "user_quotas", "[", "res", "]", ",", "'delta'", ":", "delta", ",", "'total'", ":", "user_usages", "[", "res", "]", "[", "'total'", "]", "}", ")", "overs", ".", "append", "(", "res", ")", "return", "overs" ]
checks if any resources will go over quota based on the request .
train
false
16,009
def LoadBackendEntry(backend_entry): builder = yaml_object.ObjectBuilder(BackendEntry) handler = yaml_builder.BuilderHandler(builder) listener = yaml_listener.EventListener(handler) listener.Parse(backend_entry) entries = handler.GetResults() if (len(entries) < 1): raise BadConfig('Empty backend configuration.') if (len(entries) > 1): raise BadConfig('Multiple backend entries were found in configuration.') return entries[0].Init()
[ "def", "LoadBackendEntry", "(", "backend_entry", ")", ":", "builder", "=", "yaml_object", ".", "ObjectBuilder", "(", "BackendEntry", ")", "handler", "=", "yaml_builder", ".", "BuilderHandler", "(", "builder", ")", "listener", "=", "yaml_listener", ".", "EventListener", "(", "handler", ")", "listener", ".", "Parse", "(", "backend_entry", ")", "entries", "=", "handler", ".", "GetResults", "(", ")", "if", "(", "len", "(", "entries", ")", "<", "1", ")", ":", "raise", "BadConfig", "(", "'Empty backend configuration.'", ")", "if", "(", "len", "(", "entries", ")", ">", "1", ")", ":", "raise", "BadConfig", "(", "'Multiple backend entries were found in configuration.'", ")", "return", "entries", "[", "0", "]", ".", "Init", "(", ")" ]
parses a backendentry object from a string .
train
false
16,010
def list_may_enroll(course_key, features): may_enroll_and_unenrolled = CourseEnrollmentAllowed.may_enroll_and_unenrolled(course_key) def extract_student(student, features): '\n Build dict containing information about a single student.\n ' return dict(((feature, getattr(student, feature)) for feature in features)) return [extract_student(student, features) for student in may_enroll_and_unenrolled]
[ "def", "list_may_enroll", "(", "course_key", ",", "features", ")", ":", "may_enroll_and_unenrolled", "=", "CourseEnrollmentAllowed", ".", "may_enroll_and_unenrolled", "(", "course_key", ")", "def", "extract_student", "(", "student", ",", "features", ")", ":", "return", "dict", "(", "(", "(", "feature", ",", "getattr", "(", "student", ",", "feature", ")", ")", "for", "feature", "in", "features", ")", ")", "return", "[", "extract_student", "(", "student", ",", "features", ")", "for", "student", "in", "may_enroll_and_unenrolled", "]" ]
return info about students who may enroll in a course as a dict .
train
false
16,011
def test_rus_init(): ratio = 'auto' rus = RandomUnderSampler(ratio=ratio, random_state=RND_SEED) assert_equal(rus.random_state, RND_SEED)
[ "def", "test_rus_init", "(", ")", ":", "ratio", "=", "'auto'", "rus", "=", "RandomUnderSampler", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ")", "assert_equal", "(", "rus", ".", "random_state", ",", "RND_SEED", ")" ]
test the initialisation of the object .
train
false
16,012
def strip_remotes(remote_branches): branches = [utils.strip_one(branch) for branch in remote_branches] return [branch for branch in branches if (branch != u'HEAD')]
[ "def", "strip_remotes", "(", "remote_branches", ")", ":", "branches", "=", "[", "utils", ".", "strip_one", "(", "branch", ")", "for", "branch", "in", "remote_branches", "]", "return", "[", "branch", "for", "branch", "in", "branches", "if", "(", "branch", "!=", "u'HEAD'", ")", "]" ]
strip the <remote>/ prefixes from branches e .
train
false
16,014
def expand_env_var(env_var): if (not env_var): return env_var while True: interpolated = os.path.expanduser(os.path.expandvars(str(env_var))) if (interpolated == env_var): return interpolated else: env_var = interpolated
[ "def", "expand_env_var", "(", "env_var", ")", ":", "if", "(", "not", "env_var", ")", ":", "return", "env_var", "while", "True", ":", "interpolated", "=", "os", ".", "path", ".", "expanduser", "(", "os", ".", "path", ".", "expandvars", "(", "str", "(", "env_var", ")", ")", ")", "if", "(", "interpolated", "==", "env_var", ")", ":", "return", "interpolated", "else", ":", "env_var", "=", "interpolated" ]
expands env vars by repeatedly applying expandvars and expanduser until interpolation stops having any effect .
train
true
16,017
def ledoit_wolf(X, assume_centered=False, block_size=1000): X = np.asarray(X) if ((len(X.shape) == 2) and (X.shape[1] == 1)): if (not assume_centered): X = (X - X.mean()) return (np.atleast_2d((X ** 2).mean()), 0.0) if (X.ndim == 1): X = np.reshape(X, (1, (-1))) warnings.warn('Only one sample available. You may want to reshape your data array') n_samples = 1 n_features = X.size else: (n_samples, n_features) = X.shape shrinkage = ledoit_wolf_shrinkage(X, assume_centered=assume_centered, block_size=block_size) emp_cov = empirical_covariance(X, assume_centered=assume_centered) mu = (np.sum(np.trace(emp_cov)) / n_features) shrunk_cov = ((1.0 - shrinkage) * emp_cov) shrunk_cov.flat[::(n_features + 1)] += (shrinkage * mu) return (shrunk_cov, shrinkage)
[ "def", "ledoit_wolf", "(", "X", ",", "assume_centered", "=", "False", ",", "block_size", "=", "1000", ")", ":", "X", "=", "np", ".", "asarray", "(", "X", ")", "if", "(", "(", "len", "(", "X", ".", "shape", ")", "==", "2", ")", "and", "(", "X", ".", "shape", "[", "1", "]", "==", "1", ")", ")", ":", "if", "(", "not", "assume_centered", ")", ":", "X", "=", "(", "X", "-", "X", ".", "mean", "(", ")", ")", "return", "(", "np", ".", "atleast_2d", "(", "(", "X", "**", "2", ")", ".", "mean", "(", ")", ")", ",", "0.0", ")", "if", "(", "X", ".", "ndim", "==", "1", ")", ":", "X", "=", "np", ".", "reshape", "(", "X", ",", "(", "1", ",", "(", "-", "1", ")", ")", ")", "warnings", ".", "warn", "(", "'Only one sample available. You may want to reshape your data array'", ")", "n_samples", "=", "1", "n_features", "=", "X", ".", "size", "else", ":", "(", "n_samples", ",", "n_features", ")", "=", "X", ".", "shape", "shrinkage", "=", "ledoit_wolf_shrinkage", "(", "X", ",", "assume_centered", "=", "assume_centered", ",", "block_size", "=", "block_size", ")", "emp_cov", "=", "empirical_covariance", "(", "X", ",", "assume_centered", "=", "assume_centered", ")", "mu", "=", "(", "np", ".", "sum", "(", "np", ".", "trace", "(", "emp_cov", ")", ")", "/", "n_features", ")", "shrunk_cov", "=", "(", "(", "1.0", "-", "shrinkage", ")", "*", "emp_cov", ")", "shrunk_cov", ".", "flat", "[", ":", ":", "(", "n_features", "+", "1", ")", "]", "+=", "(", "shrinkage", "*", "mu", ")", "return", "(", "shrunk_cov", ",", "shrinkage", ")" ]
estimates the shrunk ledoit-wolf covariance matrix .
train
false
16,019
@pytest.fixture(params=('browse', 'translate')) def view_types(request): return request.param
[ "@", "pytest", ".", "fixture", "(", "params", "=", "(", "'browse'", ",", "'translate'", ")", ")", "def", "view_types", "(", "request", ")", ":", "return", "request", ".", "param" ]
list of possible view types .
train
false