id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
7,411
def _escalation_rules_to_string(escalation_rules): result = '' for rule in escalation_rules: result += 'escalation_delay_in_minutes: {0} '.format(rule['escalation_delay_in_minutes']) for target in rule['targets']: result += '{0}:{1} '.format(target['type'], target['id']) return result
[ "def", "_escalation_rules_to_string", "(", "escalation_rules", ")", ":", "result", "=", "''", "for", "rule", "in", "escalation_rules", ":", "result", "+=", "'escalation_delay_in_minutes: {0} '", ".", "format", "(", "rule", "[", "'escalation_delay_in_minutes'", "]", ")", "for", "target", "in", "rule", "[", "'targets'", "]", ":", "result", "+=", "'{0}:{1} '", ".", "format", "(", "target", "[", "'type'", "]", ",", "target", "[", "'id'", "]", ")", "return", "result" ]
convert escalation_rules dict to a string for comparison .
train
true
7,412
def start_tag(doc, name, attr=None, body=None, namespace=None): if (attr is None): attr = {} attr_vals = {} attr_keys = {} for (key, val) in attr.items(): key_tuple = (namespace, key) attr_vals[key_tuple] = val attr_keys[key_tuple] = key attr2 = AttributesNSImpl(attr_vals, attr_keys) doc.startElementNS((namespace, name), name, attr2) if body: doc.characters(body)
[ "def", "start_tag", "(", "doc", ",", "name", ",", "attr", "=", "None", ",", "body", "=", "None", ",", "namespace", "=", "None", ")", ":", "if", "(", "attr", "is", "None", ")", ":", "attr", "=", "{", "}", "attr_vals", "=", "{", "}", "attr_keys", "=", "{", "}", "for", "(", "key", ",", "val", ")", "in", "attr", ".", "items", "(", ")", ":", "key_tuple", "=", "(", "namespace", ",", "key", ")", "attr_vals", "[", "key_tuple", "]", "=", "val", "attr_keys", "[", "key_tuple", "]", "=", "key", "attr2", "=", "AttributesNSImpl", "(", "attr_vals", ",", "attr_keys", ")", "doc", ".", "startElementNS", "(", "(", "namespace", ",", "name", ")", ",", "name", ",", "attr2", ")", "if", "body", ":", "doc", ".", "characters", "(", "body", ")" ]
the text representation of the start tag for a tag .
train
false
7,413
def CheckCommaSpacing(filename, clean_lines, linenum, error): raw = clean_lines.lines_without_raw_strings line = clean_lines.elided[linenum] if (Search(',[^,\\s]', ReplaceAll('\\boperator\\s*,\\s*\\(', 'F(', line)) and Search(',[^,\\s]', raw[linenum])): error(filename, linenum, 'whitespace/comma', 3, 'Missing space after ,') if Search(';[^\\s};\\\\)/]', line): error(filename, linenum, 'whitespace/semicolon', 3, 'Missing space after ;')
[ "def", "CheckCommaSpacing", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "raw", "=", "clean_lines", ".", "lines_without_raw_strings", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "(", "Search", "(", "',[^,\\\\s]'", ",", "ReplaceAll", "(", "'\\\\boperator\\\\s*,\\\\s*\\\\('", ",", "'F('", ",", "line", ")", ")", "and", "Search", "(", "',[^,\\\\s]'", ",", "raw", "[", "linenum", "]", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/comma'", ",", "3", ",", "'Missing space after ,'", ")", "if", "Search", "(", "';[^\\\\s};\\\\\\\\)/]'", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/semicolon'", ",", "3", ",", "'Missing space after ;'", ")" ]
checks for horizontal spacing near commas and semicolons .
train
false
7,414
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
7,415
def run_one_command(*args): cmd_line = (('"' + executable) + '" '.join(args)) (inp, out, err) = os.popen3(cmd_line) print cmd_line (output, err) = multireader(out, err) res = out.close() if res: print ('%d running %s failed' % (res, cmd_line)) print 'output was', output print 'err', err return (output, err, res)
[ "def", "run_one_command", "(", "*", "args", ")", ":", "cmd_line", "=", "(", "(", "'\"'", "+", "executable", ")", "+", "'\" '", ".", "join", "(", "args", ")", ")", "(", "inp", ",", "out", ",", "err", ")", "=", "os", ".", "popen3", "(", "cmd_line", ")", "print", "cmd_line", "(", "output", ",", "err", ")", "=", "multireader", "(", "out", ",", "err", ")", "res", "=", "out", ".", "close", "(", ")", "if", "res", ":", "print", "(", "'%d running %s failed'", "%", "(", "res", ",", "cmd_line", ")", ")", "print", "'output was'", ",", "output", "print", "'err'", ",", "err", "return", "(", "output", ",", "err", ",", "res", ")" ]
runs a single command .
train
false
7,416
def _send_new_pending_email(instance): context = {'approval_url': urlunsplit((('https' if (settings.HTTPS == 'on') else 'http'), instance.site.domain, reverse('admin:api_admin_apiaccessrequest_change', args=(instance.id,)), '', '')), 'api_request': instance} message = render_to_string('api_admin/api_access_request_email_new_request.txt', context) try: send_mail(_('API access request from {company}').format(company=instance.company_name), message, settings.API_ACCESS_FROM_EMAIL, [settings.API_ACCESS_MANAGER_EMAIL], fail_silently=False) except SMTPException: log.exception('Error sending API user notification email for request [%s].', instance.id)
[ "def", "_send_new_pending_email", "(", "instance", ")", ":", "context", "=", "{", "'approval_url'", ":", "urlunsplit", "(", "(", "(", "'https'", "if", "(", "settings", ".", "HTTPS", "==", "'on'", ")", "else", "'http'", ")", ",", "instance", ".", "site", ".", "domain", ",", "reverse", "(", "'admin:api_admin_apiaccessrequest_change'", ",", "args", "=", "(", "instance", ".", "id", ",", ")", ")", ",", "''", ",", "''", ")", ")", ",", "'api_request'", ":", "instance", "}", "message", "=", "render_to_string", "(", "'api_admin/api_access_request_email_new_request.txt'", ",", "context", ")", "try", ":", "send_mail", "(", "_", "(", "'API access request from {company}'", ")", ".", "format", "(", "company", "=", "instance", ".", "company_name", ")", ",", "message", ",", "settings", ".", "API_ACCESS_FROM_EMAIL", ",", "[", "settings", ".", "API_ACCESS_MANAGER_EMAIL", "]", ",", "fail_silently", "=", "False", ")", "except", "SMTPException", ":", "log", ".", "exception", "(", "'Error sending API user notification email for request [%s].'", ",", "instance", ".", "id", ")" ]
send an email to settings .
train
false
7,418
def get_oauth_keys(provider): keys = frappe.conf.get(u'{provider}_login'.format(provider=provider)) if (not keys): social = frappe.get_doc(u'Social Login Keys', u'Social Login Keys') keys = {} for fieldname in (u'client_id', u'client_secret'): value = social.get(u'{provider}_{fieldname}'.format(provider=provider, fieldname=fieldname)) if (not value): keys = {} break keys[fieldname] = value return keys else: return {u'client_id': keys[u'client_id'], u'client_secret': keys[u'client_secret']}
[ "def", "get_oauth_keys", "(", "provider", ")", ":", "keys", "=", "frappe", ".", "conf", ".", "get", "(", "u'{provider}_login'", ".", "format", "(", "provider", "=", "provider", ")", ")", "if", "(", "not", "keys", ")", ":", "social", "=", "frappe", ".", "get_doc", "(", "u'Social Login Keys'", ",", "u'Social Login Keys'", ")", "keys", "=", "{", "}", "for", "fieldname", "in", "(", "u'client_id'", ",", "u'client_secret'", ")", ":", "value", "=", "social", ".", "get", "(", "u'{provider}_{fieldname}'", ".", "format", "(", "provider", "=", "provider", ",", "fieldname", "=", "fieldname", ")", ")", "if", "(", "not", "value", ")", ":", "keys", "=", "{", "}", "break", "keys", "[", "fieldname", "]", "=", "value", "return", "keys", "else", ":", "return", "{", "u'client_id'", ":", "keys", "[", "u'client_id'", "]", ",", "u'client_secret'", ":", "keys", "[", "u'client_secret'", "]", "}" ]
get client_id and client_secret from database or conf .
train
false
7,420
def delete_collection(committer_id, collection_id, force_deletion=False): collection_rights_model = collection_models.CollectionRightsModel.get(collection_id) collection_rights_model.delete(committer_id, '', force_deletion=force_deletion) collection_model = collection_models.CollectionModel.get(collection_id) collection_model.delete(committer_id, feconf.COMMIT_MESSAGE_COLLECTION_DELETED, force_deletion=force_deletion) collection_memcache_key = _get_collection_memcache_key(collection_id) memcache_services.delete(collection_memcache_key) delete_documents_from_search_index([collection_id]) delete_collection_summary(collection_id) activity_services.remove_featured_activity(feconf.ACTIVITY_TYPE_COLLECTION, collection_id)
[ "def", "delete_collection", "(", "committer_id", ",", "collection_id", ",", "force_deletion", "=", "False", ")", ":", "collection_rights_model", "=", "collection_models", ".", "CollectionRightsModel", ".", "get", "(", "collection_id", ")", "collection_rights_model", ".", "delete", "(", "committer_id", ",", "''", ",", "force_deletion", "=", "force_deletion", ")", "collection_model", "=", "collection_models", ".", "CollectionModel", ".", "get", "(", "collection_id", ")", "collection_model", ".", "delete", "(", "committer_id", ",", "feconf", ".", "COMMIT_MESSAGE_COLLECTION_DELETED", ",", "force_deletion", "=", "force_deletion", ")", "collection_memcache_key", "=", "_get_collection_memcache_key", "(", "collection_id", ")", "memcache_services", ".", "delete", "(", "collection_memcache_key", ")", "delete_documents_from_search_index", "(", "[", "collection_id", "]", ")", "delete_collection_summary", "(", "collection_id", ")", "activity_services", ".", "remove_featured_activity", "(", "feconf", ".", "ACTIVITY_TYPE_COLLECTION", ",", "collection_id", ")" ]
deletes the collection with the given collection_id .
train
false
7,421
def simple_read_words(filename='nietzsche.txt'): with open('nietzsche.txt', 'r') as f: words = f.read() return words
[ "def", "simple_read_words", "(", "filename", "=", "'nietzsche.txt'", ")", ":", "with", "open", "(", "'nietzsche.txt'", ",", "'r'", ")", "as", "f", ":", "words", "=", "f", ".", "read", "(", ")", "return", "words" ]
read context from file without any preprocessing .
train
false
7,422
def generate_include_search_paths(hdr, extra_dirs): if (os.sep in hdr): paths = [hdr] else: arg_paths = itertools.chain(COMMON_HEADER_PATHS, extra_dirs) search_paths = unique_not_false_list(arg_paths) paths = path_joiner(hdr, search_paths) return paths
[ "def", "generate_include_search_paths", "(", "hdr", ",", "extra_dirs", ")", ":", "if", "(", "os", ".", "sep", "in", "hdr", ")", ":", "paths", "=", "[", "hdr", "]", "else", ":", "arg_paths", "=", "itertools", ".", "chain", "(", "COMMON_HEADER_PATHS", ",", "extra_dirs", ")", "search_paths", "=", "unique_not_false_list", "(", "arg_paths", ")", "paths", "=", "path_joiner", "(", "hdr", ",", "search_paths", ")", "return", "paths" ]
generate full paths of potential locations of a given header file based on common_header_paths .
train
false
7,424
def make_member_list(members, **attr_map): def _fetch_memb(memb, attr_map): return dict([(k, memb[v]) for (k, v) in attr_map.items() if (v in memb.keys())]) return [_fetch_memb(memb, attr_map) for memb in members]
[ "def", "make_member_list", "(", "members", ",", "**", "attr_map", ")", ":", "def", "_fetch_memb", "(", "memb", ",", "attr_map", ")", ":", "return", "dict", "(", "[", "(", "k", ",", "memb", "[", "v", "]", ")", "for", "(", "k", ",", "v", ")", "in", "attr_map", ".", "items", "(", ")", "if", "(", "v", "in", "memb", ".", "keys", "(", ")", ")", "]", ")", "return", "[", "_fetch_memb", "(", "memb", ",", "attr_map", ")", "for", "memb", "in", "members", "]" ]
create a dict representation of a list of members which we can use to serialize the members list .
train
false
7,425
def list_from_child_node(xpath, suppress_blank=False): def list_from_child_node_lambda(unused_value, bulkload_state): result = [] for node in bulkload_state.current_dictionary['__node__'].findall(xpath): if node.text: result.append(node.text) elif (not suppress_blank): result.append('') return result return list_from_child_node_lambda
[ "def", "list_from_child_node", "(", "xpath", ",", "suppress_blank", "=", "False", ")", ":", "def", "list_from_child_node_lambda", "(", "unused_value", ",", "bulkload_state", ")", ":", "result", "=", "[", "]", "for", "node", "in", "bulkload_state", ".", "current_dictionary", "[", "'__node__'", "]", ".", "findall", "(", "xpath", ")", ":", "if", "node", ".", "text", ":", "result", ".", "append", "(", "node", ".", "text", ")", "elif", "(", "not", "suppress_blank", ")", ":", "result", ".", "append", "(", "''", ")", "return", "result", "return", "list_from_child_node_lambda" ]
return a list property from child nodes of the current xml node .
train
false
7,426
def _validate_api_params(params): return (isinstance(params['api_page_id'], (six.string_types, six.text_type)) and isinstance(params['api_key'], (six.string_types, six.text_type)))
[ "def", "_validate_api_params", "(", "params", ")", ":", "return", "(", "isinstance", "(", "params", "[", "'api_page_id'", "]", ",", "(", "six", ".", "string_types", ",", "six", ".", "text_type", ")", ")", "and", "isinstance", "(", "params", "[", "'api_key'", "]", ",", "(", "six", ".", "string_types", ",", "six", ".", "text_type", ")", ")", ")" ]
validate the api params as specified in the config file .
train
true
7,427
def release(): return uname()[2]
[ "def", "release", "(", ")", ":", "return", "uname", "(", ")", "[", "2", "]" ]
release a collection of hosts from user its ok if user does not own these systems .
train
false
7,428
def _to_volume_name(dataset_id): return VolumeName(namespace=u'default', dataset_id=dataset_id)
[ "def", "_to_volume_name", "(", "dataset_id", ")", ":", "return", "VolumeName", "(", "namespace", "=", "u'default'", ",", "dataset_id", "=", "dataset_id", ")" ]
convert dataset id to volumename with u"default" namespace .
train
false
7,429
def banner(): _ = BANNER if (not getattr(LOGGER_HANDLER, 'is_tty', False)): _ = re.sub('\x1b.+?m', '', _) dataToStdout(_, forceOutput=True)
[ "def", "banner", "(", ")", ":", "_", "=", "BANNER", "if", "(", "not", "getattr", "(", "LOGGER_HANDLER", ",", "'is_tty'", ",", "False", ")", ")", ":", "_", "=", "re", ".", "sub", "(", "'\\x1b.+?m'", ",", "''", ",", "_", ")", "dataToStdout", "(", "_", ",", "forceOutput", "=", "True", ")" ]
function prints pocsuite banner with its version .
train
false
7,430
def params_to_strings(params, param_values, app): rval = dict() for (key, value) in param_values.items(): if (key in params): value = params[key].value_to_basic(value, app) rval[key] = str(dumps(value)) return rval
[ "def", "params_to_strings", "(", "params", ",", "param_values", ",", "app", ")", ":", "rval", "=", "dict", "(", ")", "for", "(", "key", ",", "value", ")", "in", "param_values", ".", "items", "(", ")", ":", "if", "(", "key", "in", "params", ")", ":", "value", "=", "params", "[", "key", "]", ".", "value_to_basic", "(", "value", ",", "app", ")", "rval", "[", "key", "]", "=", "str", "(", "dumps", "(", "value", ")", ")", "return", "rval" ]
convert a dictionary of parameter values to a dictionary of strings suitable for persisting .
train
false
7,431
def is_kernel_thread(proc): try: return (os.getpgid(proc.pid) == 0) except OSError: return False
[ "def", "is_kernel_thread", "(", "proc", ")", ":", "try", ":", "return", "(", "os", ".", "getpgid", "(", "proc", ".", "pid", ")", "==", "0", ")", "except", "OSError", ":", "return", "False" ]
return true if proc is a kernel thread .
train
false
7,432
def is_old_code(): code_exists = exists('~/viewfinder') if (not code_exists): return False with settings(warn_only=True): is_link = run('readlink ~/viewfinder') if (is_link.return_code == 0): return False return True
[ "def", "is_old_code", "(", ")", ":", "code_exists", "=", "exists", "(", "'~/viewfinder'", ")", "if", "(", "not", "code_exists", ")", ":", "return", "False", "with", "settings", "(", "warn_only", "=", "True", ")", ":", "is_link", "=", "run", "(", "'readlink ~/viewfinder'", ")", "if", "(", "is_link", ".", "return_code", "==", "0", ")", ":", "return", "False", "return", "True" ]
return true if ~/viewfinder is old-style or false if new style .
train
false
7,433
def parse_time_string(arg, freq=None, dayfirst=None, yearfirst=None): from pandas.core.config import get_option if (not isinstance(arg, compat.string_types)): return arg from pandas.tseries.offsets import DateOffset if isinstance(freq, DateOffset): freq = freq.rule_code if (dayfirst is None): dayfirst = get_option('display.date_dayfirst') if (yearfirst is None): yearfirst = get_option('display.date_yearfirst') return tslib.parse_datetime_string_with_reso(arg, freq=freq, dayfirst=dayfirst, yearfirst=yearfirst)
[ "def", "parse_time_string", "(", "arg", ",", "freq", "=", "None", ",", "dayfirst", "=", "None", ",", "yearfirst", "=", "None", ")", ":", "from", "pandas", ".", "core", ".", "config", "import", "get_option", "if", "(", "not", "isinstance", "(", "arg", ",", "compat", ".", "string_types", ")", ")", ":", "return", "arg", "from", "pandas", ".", "tseries", ".", "offsets", "import", "DateOffset", "if", "isinstance", "(", "freq", ",", "DateOffset", ")", ":", "freq", "=", "freq", ".", "rule_code", "if", "(", "dayfirst", "is", "None", ")", ":", "dayfirst", "=", "get_option", "(", "'display.date_dayfirst'", ")", "if", "(", "yearfirst", "is", "None", ")", ":", "yearfirst", "=", "get_option", "(", "'display.date_yearfirst'", ")", "return", "tslib", ".", "parse_datetime_string_with_reso", "(", "arg", ",", "freq", "=", "freq", ",", "dayfirst", "=", "dayfirst", ",", "yearfirst", "=", "yearfirst", ")" ]
try hard to parse datetime string .
train
false
7,434
def instance_uri(request, resource_name, **params): return strip_uri_prefix(request.route_path(('%s-record' % resource_name), **params))
[ "def", "instance_uri", "(", "request", ",", "resource_name", ",", "**", "params", ")", ":", "return", "strip_uri_prefix", "(", "request", ".", "route_path", "(", "(", "'%s-record'", "%", "resource_name", ")", ",", "**", "params", ")", ")" ]
return the uri for the given resource .
train
false
7,436
def warn_cxx(val): if ((sys.platform == 'darwin') and ('clang++' not in val)): _logger.warning('Only clang++ is supported. With g++, we end up with strange g++/OSX bugs.') return True
[ "def", "warn_cxx", "(", "val", ")", ":", "if", "(", "(", "sys", ".", "platform", "==", "'darwin'", ")", "and", "(", "'clang++'", "not", "in", "val", ")", ")", ":", "_logger", ".", "warning", "(", "'Only clang++ is supported. With g++, we end up with strange g++/OSX bugs.'", ")", "return", "True" ]
we only support clang++ as otherwise we hit strange g++/osx bugs .
train
false
7,437
def _visible_fields(user_profile, user, configuration=None): if (not configuration): configuration = settings.ACCOUNT_VISIBILITY_CONFIGURATION profile_visibility = get_profile_visibility(user_profile, user, configuration) if (profile_visibility == ALL_USERS_VISIBILITY): return configuration.get('shareable_fields') else: return configuration.get('public_fields')
[ "def", "_visible_fields", "(", "user_profile", ",", "user", ",", "configuration", "=", "None", ")", ":", "if", "(", "not", "configuration", ")", ":", "configuration", "=", "settings", ".", "ACCOUNT_VISIBILITY_CONFIGURATION", "profile_visibility", "=", "get_profile_visibility", "(", "user_profile", ",", "user", ",", "configuration", ")", "if", "(", "profile_visibility", "==", "ALL_USERS_VISIBILITY", ")", ":", "return", "configuration", ".", "get", "(", "'shareable_fields'", ")", "else", ":", "return", "configuration", ".", "get", "(", "'public_fields'", ")" ]
return what fields should be visible based on user settings .
train
false
7,438
def CreateCookieData(email, admin): nickname = email.split('@')[0] if admin: app_list = os.environ['APPLICATION_ID'] else: app_list = '' secret = os.environ['COOKIE_SECRET'] hashed = sha.new((((email + nickname) + app_list) + secret)).hexdigest() return urllib.quote_plus('{0}:{1}:{2}:{3}'.format(email, nickname, app_list, hashed))
[ "def", "CreateCookieData", "(", "email", ",", "admin", ")", ":", "nickname", "=", "email", ".", "split", "(", "'@'", ")", "[", "0", "]", "if", "admin", ":", "app_list", "=", "os", ".", "environ", "[", "'APPLICATION_ID'", "]", "else", ":", "app_list", "=", "''", "secret", "=", "os", ".", "environ", "[", "'COOKIE_SECRET'", "]", "hashed", "=", "sha", ".", "new", "(", "(", "(", "(", "email", "+", "nickname", ")", "+", "app_list", ")", "+", "secret", ")", ")", ".", "hexdigest", "(", ")", "return", "urllib", ".", "quote_plus", "(", "'{0}:{1}:{2}:{3}'", ".", "format", "(", "email", ",", "nickname", ",", "app_list", ",", "hashed", ")", ")" ]
creates cookie payload data .
train
false
7,439
def _GetSupportedApiVersions(versions, runtime): if ('supported_api_versions' in versions): return versions['supported_api_versions'].get(runtime, versions)['api_versions'] return versions['api_versions']
[ "def", "_GetSupportedApiVersions", "(", "versions", ",", "runtime", ")", ":", "if", "(", "'supported_api_versions'", "in", "versions", ")", ":", "return", "versions", "[", "'supported_api_versions'", "]", ".", "get", "(", "runtime", ",", "versions", ")", "[", "'api_versions'", "]", "return", "versions", "[", "'api_versions'", "]" ]
returns the runtime-specific or general list of supported runtimes .
train
false
7,440
def rtrimTerminus(command, terminator=None): if terminator: pieces = command.split(terminator) if (len(pieces) > 1): command = (terminator.join(pieces[:(-1)]) + terminator) return command
[ "def", "rtrimTerminus", "(", "command", ",", "terminator", "=", "None", ")", ":", "if", "terminator", ":", "pieces", "=", "command", ".", "split", "(", "terminator", ")", "if", "(", "len", "(", "pieces", ")", ">", "1", ")", ":", "command", "=", "(", "terminator", ".", "join", "(", "pieces", "[", ":", "(", "-", "1", ")", "]", ")", "+", "terminator", ")", "return", "command" ]
return command minus anything that follows the final terminator .
train
false
7,444
def signed_serialize(data, secret): pickled = pickle.dumps(data, pickle.HIGHEST_PROTOCOL) try: secret = bytes_(secret) except UnicodeEncodeError: secret = bytes_(secret, 'utf-8') sig = hmac.new(secret, pickled, hashlib.sha1).hexdigest() return (sig + native_(base64.b64encode(pickled)))
[ "def", "signed_serialize", "(", "data", ",", "secret", ")", ":", "pickled", "=", "pickle", ".", "dumps", "(", "data", ",", "pickle", ".", "HIGHEST_PROTOCOL", ")", "try", ":", "secret", "=", "bytes_", "(", "secret", ")", "except", "UnicodeEncodeError", ":", "secret", "=", "bytes_", "(", "secret", ",", "'utf-8'", ")", "sig", "=", "hmac", ".", "new", "(", "secret", ",", "pickled", ",", "hashlib", ".", "sha1", ")", ".", "hexdigest", "(", ")", "return", "(", "sig", "+", "native_", "(", "base64", ".", "b64encode", "(", "pickled", ")", ")", ")" ]
serialize any pickleable structure and sign it using the secret .
train
false
7,445
def getInsetLoopsFromLoop(loop, radius, thresholdRatio=0.9): if (radius == 0.0): return [loop] isInset = (radius > 0) insetLoops = [] isLoopWiddershins = euclidean.isWiddershins(loop) arounds = getAroundsFromLoop(loop, radius, thresholdRatio) for around in arounds: leftPoint = euclidean.getLeftPoint(around) shouldBeWithin = (isInset == isLoopWiddershins) if (euclidean.isPointInsideLoop(loop, leftPoint) == shouldBeWithin): if (isLoopWiddershins != euclidean.isWiddershins(around)): around.reverse() insetLoops.append(around) return insetLoops
[ "def", "getInsetLoopsFromLoop", "(", "loop", ",", "radius", ",", "thresholdRatio", "=", "0.9", ")", ":", "if", "(", "radius", "==", "0.0", ")", ":", "return", "[", "loop", "]", "isInset", "=", "(", "radius", ">", "0", ")", "insetLoops", "=", "[", "]", "isLoopWiddershins", "=", "euclidean", ".", "isWiddershins", "(", "loop", ")", "arounds", "=", "getAroundsFromLoop", "(", "loop", ",", "radius", ",", "thresholdRatio", ")", "for", "around", "in", "arounds", ":", "leftPoint", "=", "euclidean", ".", "getLeftPoint", "(", "around", ")", "shouldBeWithin", "=", "(", "isInset", "==", "isLoopWiddershins", ")", "if", "(", "euclidean", ".", "isPointInsideLoop", "(", "loop", ",", "leftPoint", ")", "==", "shouldBeWithin", ")", ":", "if", "(", "isLoopWiddershins", "!=", "euclidean", ".", "isWiddershins", "(", "around", ")", ")", ":", "around", ".", "reverse", "(", ")", "insetLoops", ".", "append", "(", "around", ")", "return", "insetLoops" ]
get the inset loops .
train
false
7,446
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
7,447
def getargspecfromtext(text): blocks = text.split('\n\n') first_block = blocks[0].strip() return getsignaturefromtext(first_block, '')
[ "def", "getargspecfromtext", "(", "text", ")", ":", "blocks", "=", "text", ".", "split", "(", "'\\n\\n'", ")", "first_block", "=", "blocks", "[", "0", "]", ".", "strip", "(", ")", "return", "getsignaturefromtext", "(", "first_block", ",", "''", ")" ]
try to get the formatted argspec of a callable from the first block of its docstring this will return something like .
train
true
7,449
def _is_valid_sub_path(path, parent_paths): if (not parent_paths): return True for parent_path in parent_paths: if (path[:len(parent_path)] == parent_path): return True return False
[ "def", "_is_valid_sub_path", "(", "path", ",", "parent_paths", ")", ":", "if", "(", "not", "parent_paths", ")", ":", "return", "True", "for", "parent_path", "in", "parent_paths", ":", "if", "(", "path", "[", ":", "len", "(", "parent_path", ")", "]", "==", "parent_path", ")", ":", "return", "True", "return", "False" ]
check if a sub path is valid given an iterable of parent paths .
train
false
7,450
def sort_against(list1, list2, reverse=False): try: return [item for (_, item) in sorted(zip(list2, list1), key=(lambda x: x[0]), reverse=reverse)] except: return list1
[ "def", "sort_against", "(", "list1", ",", "list2", ",", "reverse", "=", "False", ")", ":", "try", ":", "return", "[", "item", "for", "(", "_", ",", "item", ")", "in", "sorted", "(", "zip", "(", "list2", ",", "list1", ")", ",", "key", "=", "(", "lambda", "x", ":", "x", "[", "0", "]", ")", ",", "reverse", "=", "reverse", ")", "]", "except", ":", "return", "list1" ]
arrange items of list1 in the same order as sorted .
train
true
7,452
def post_delete_site(instance, sender, **kwargs): SiteResources.objects.filter(site=instance).delete() SitePeople.objects.filter(site=instance).delete()
[ "def", "post_delete_site", "(", "instance", ",", "sender", ",", "**", "kwargs", ")", ":", "SiteResources", ".", "objects", ".", "filter", "(", "site", "=", "instance", ")", ".", "delete", "(", ")", "SitePeople", ".", "objects", ".", "filter", "(", "site", "=", "instance", ")", ".", "delete", "(", ")" ]
signal to delete the siteresources on site delete .
train
false
7,453
def download_encrypted_blob(bucket_name, source_blob_name, destination_file_name, base64_encryption_key): storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) encryption_key = base64.b64decode(base64_encryption_key) blob = Blob(source_blob_name, bucket, encryption_key=encryption_key) blob.download_to_filename(destination_file_name) print 'Blob {} downloaded to {}.'.format(source_blob_name, destination_file_name)
[ "def", "download_encrypted_blob", "(", "bucket_name", ",", "source_blob_name", ",", "destination_file_name", ",", "base64_encryption_key", ")", ":", "storage_client", "=", "storage", ".", "Client", "(", ")", "bucket", "=", "storage_client", ".", "get_bucket", "(", "bucket_name", ")", "encryption_key", "=", "base64", ".", "b64decode", "(", "base64_encryption_key", ")", "blob", "=", "Blob", "(", "source_blob_name", ",", "bucket", ",", "encryption_key", "=", "encryption_key", ")", "blob", ".", "download_to_filename", "(", "destination_file_name", ")", "print", "'Blob {} downloaded to {}.'", ".", "format", "(", "source_blob_name", ",", "destination_file_name", ")" ]
downloads a previously-encrypted blob from google cloud storage .
train
false
7,454
def _doc_components_from_url(url, required_locale=None, check_host=True): parsed = urlparse(url) if (check_host and parsed.netloc): return False (locale, path) = split_path(parsed.path) if (required_locale and (locale != required_locale)): return False path = ('/' + path) try: (view, view_args, view_kwargs) = resolve(path) except Http404: return False import kitsune.wiki.views if (view != kitsune.wiki.views.document): raise _NotDocumentView return (locale, path, view_kwargs['document_slug'])
[ "def", "_doc_components_from_url", "(", "url", ",", "required_locale", "=", "None", ",", "check_host", "=", "True", ")", ":", "parsed", "=", "urlparse", "(", "url", ")", "if", "(", "check_host", "and", "parsed", ".", "netloc", ")", ":", "return", "False", "(", "locale", ",", "path", ")", "=", "split_path", "(", "parsed", ".", "path", ")", "if", "(", "required_locale", "and", "(", "locale", "!=", "required_locale", ")", ")", ":", "return", "False", "path", "=", "(", "'/'", "+", "path", ")", "try", ":", "(", "view", ",", "view_args", ",", "view_kwargs", ")", "=", "resolve", "(", "path", ")", "except", "Http404", ":", "return", "False", "import", "kitsune", ".", "wiki", ".", "views", "if", "(", "view", "!=", "kitsune", ".", "wiki", ".", "views", ".", "document", ")", ":", "raise", "_NotDocumentView", "return", "(", "locale", ",", "path", ",", "view_kwargs", "[", "'document_slug'", "]", ")" ]
return if url is a document .
train
false
7,457
def clear_bad_addresses(store): bad_tx = ['a288fec5559c3f73fd3d93db8e8460562ebfe2fcf04a5114e8d0f2920a6270dc', '2a0597e665ac3d1cabeede95cedf907934db7f639e477b3c77b242140d8cf728', 'e411dbebd2f7d64dafeef9b14b5c59ec60c36779d43f850e5e347abee1e1a455'] for tx_hash in bad_tx: row = store.selectrow('\n SELECT tx_id FROM tx WHERE tx_hash = ?', (store.hashin_hex(tx_hash),)) if row: store.sql('\n UPDATE txout SET pubkey_id = NULL\n WHERE tx_id = ? AND txout_pos = 1 AND pubkey_id IS NOT NULL', (row[0],)) if store.rowcount(): store.log.info('Cleared txout %s', tx_hash)
[ "def", "clear_bad_addresses", "(", "store", ")", ":", "bad_tx", "=", "[", "'a288fec5559c3f73fd3d93db8e8460562ebfe2fcf04a5114e8d0f2920a6270dc'", ",", "'2a0597e665ac3d1cabeede95cedf907934db7f639e477b3c77b242140d8cf728'", ",", "'e411dbebd2f7d64dafeef9b14b5c59ec60c36779d43f850e5e347abee1e1a455'", "]", "for", "tx_hash", "in", "bad_tx", ":", "row", "=", "store", ".", "selectrow", "(", "'\\n SELECT tx_id FROM tx WHERE tx_hash = ?'", ",", "(", "store", ".", "hashin_hex", "(", "tx_hash", ")", ",", ")", ")", "if", "row", ":", "store", ".", "sql", "(", "'\\n UPDATE txout SET pubkey_id = NULL\\n WHERE tx_id = ? AND txout_pos = 1 AND pubkey_id IS NOT NULL'", ",", "(", "row", "[", "0", "]", ",", ")", ")", "if", "store", ".", "rowcount", "(", ")", ":", "store", ".", "log", ".", "info", "(", "'Cleared txout %s'", ",", "tx_hash", ")" ]
set address=unknown for the bogus outputs in bitcoin 71036 .
train
false
7,458
def _get_course_credit_requirements(course_key): credit_xblock_requirements = _get_credit_course_requirement_xblocks(course_key) min_grade_requirement = _get_min_grade_requirement(course_key) proctored_exams_requirements = _get_proctoring_requirements(course_key) block_requirements = (credit_xblock_requirements + proctored_exams_requirements) sorted_block_requirements = sorted(block_requirements, key=(lambda x: ((x['start_date'] is None), x['start_date'], x['display_name']))) credit_requirements = (min_grade_requirement + sorted_block_requirements) return credit_requirements
[ "def", "_get_course_credit_requirements", "(", "course_key", ")", ":", "credit_xblock_requirements", "=", "_get_credit_course_requirement_xblocks", "(", "course_key", ")", "min_grade_requirement", "=", "_get_min_grade_requirement", "(", "course_key", ")", "proctored_exams_requirements", "=", "_get_proctoring_requirements", "(", "course_key", ")", "block_requirements", "=", "(", "credit_xblock_requirements", "+", "proctored_exams_requirements", ")", "sorted_block_requirements", "=", "sorted", "(", "block_requirements", ",", "key", "=", "(", "lambda", "x", ":", "(", "(", "x", "[", "'start_date'", "]", "is", "None", ")", ",", "x", "[", "'start_date'", "]", ",", "x", "[", "'display_name'", "]", ")", ")", ")", "credit_requirements", "=", "(", "min_grade_requirement", "+", "sorted_block_requirements", ")", "return", "credit_requirements" ]
returns the list of credit requirements for the given course .
train
false
7,459
@contextmanager def augment_usage_errors(ctx, param=None): try: (yield) except BadParameter as e: if (e.ctx is None): e.ctx = ctx if ((param is not None) and (e.param is None)): e.param = param raise except UsageError as e: if (e.ctx is None): e.ctx = ctx raise
[ "@", "contextmanager", "def", "augment_usage_errors", "(", "ctx", ",", "param", "=", "None", ")", ":", "try", ":", "(", "yield", ")", "except", "BadParameter", "as", "e", ":", "if", "(", "e", ".", "ctx", "is", "None", ")", ":", "e", ".", "ctx", "=", "ctx", "if", "(", "(", "param", "is", "not", "None", ")", "and", "(", "e", ".", "param", "is", "None", ")", ")", ":", "e", ".", "param", "=", "param", "raise", "except", "UsageError", "as", "e", ":", "if", "(", "e", ".", "ctx", "is", "None", ")", ":", "e", ".", "ctx", "=", "ctx", "raise" ]
context manager that attaches extra information to exceptions that fly .
train
true
7,460
def _BytesForNonRepeatedElement(value, field_number, field_type): try: fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] return fn(field_number, value) except KeyError: raise message_mod.EncodeError(('Unrecognized field type: %d' % field_type))
[ "def", "_BytesForNonRepeatedElement", "(", "value", ",", "field_number", ",", "field_type", ")", ":", "try", ":", "fn", "=", "type_checkers", ".", "TYPE_TO_BYTE_SIZE_FN", "[", "field_type", "]", "return", "fn", "(", "field_number", ",", "value", ")", "except", "KeyError", ":", "raise", "message_mod", ".", "EncodeError", "(", "(", "'Unrecognized field type: %d'", "%", "field_type", ")", ")" ]
returns the number of bytes needed to serialize a non-repeated element .
train
true
7,461
def test_resize_icon_list(): resize_size = [32, 82, 100] final_size = [(32, 12), (82, 30), (100, 37)] _uploader(resize_size, final_size)
[ "def", "test_resize_icon_list", "(", ")", ":", "resize_size", "=", "[", "32", ",", "82", ",", "100", "]", "final_size", "=", "[", "(", "32", ",", "12", ")", ",", "(", "82", ",", "30", ")", ",", "(", "100", ",", "37", ")", "]", "_uploader", "(", "resize_size", ",", "final_size", ")" ]
resize multiple images at once .
train
false
7,462
def get_pasted_lines(sentinel, l_input=py3compat.input, quiet=False): if (not quiet): print ("Pasting code; enter '%s' alone on the line to stop or use Ctrl-D." % sentinel) prompt = ':' else: prompt = '' while True: try: l = l_input(prompt) if (l == sentinel): return else: (yield l) except EOFError: print '<EOF>' return
[ "def", "get_pasted_lines", "(", "sentinel", ",", "l_input", "=", "py3compat", ".", "input", ",", "quiet", "=", "False", ")", ":", "if", "(", "not", "quiet", ")", ":", "print", "(", "\"Pasting code; enter '%s' alone on the line to stop or use Ctrl-D.\"", "%", "sentinel", ")", "prompt", "=", "':'", "else", ":", "prompt", "=", "''", "while", "True", ":", "try", ":", "l", "=", "l_input", "(", "prompt", ")", "if", "(", "l", "==", "sentinel", ")", ":", "return", "else", ":", "(", "yield", "l", ")", "except", "EOFError", ":", "print", "'<EOF>'", "return" ]
yield pasted lines until the user enters the given sentinel value .
train
false
7,463
def has_app(id): return (_app_id(id) in apps()['apps'])
[ "def", "has_app", "(", "id", ")", ":", "return", "(", "_app_id", "(", "id", ")", "in", "apps", "(", ")", "[", "'apps'", "]", ")" ]
return whether the given app id is currently configured .
train
false
7,465
def find_git_http_backend(): if hasattr(find_git_http_backend, 'result'): return find_git_http_backend.result try: path = subprocess.check_output(['git', '--exec-path']).decode('utf-8') if path: GIT_PATHS.insert(0, path) except OSError: pass for path in GIT_PATHS: name = os.path.join(path, 'git-http-backend') if os.path.exists(name): find_git_http_backend.result = name return name
[ "def", "find_git_http_backend", "(", ")", ":", "if", "hasattr", "(", "find_git_http_backend", ",", "'result'", ")", ":", "return", "find_git_http_backend", ".", "result", "try", ":", "path", "=", "subprocess", ".", "check_output", "(", "[", "'git'", ",", "'--exec-path'", "]", ")", ".", "decode", "(", "'utf-8'", ")", "if", "path", ":", "GIT_PATHS", ".", "insert", "(", "0", ",", "path", ")", "except", "OSError", ":", "pass", "for", "path", "in", "GIT_PATHS", ":", "name", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'git-http-backend'", ")", "if", "os", ".", "path", ".", "exists", "(", "name", ")", ":", "find_git_http_backend", ".", "result", "=", "name", "return", "name" ]
finds git http backend .
train
false
7,466
def _get_time_zone_dictionary(time_zone_name): return {'time_zone': time_zone_name, 'description': get_display_time_zone(time_zone_name)}
[ "def", "_get_time_zone_dictionary", "(", "time_zone_name", ")", ":", "return", "{", "'time_zone'", ":", "time_zone_name", ",", "'description'", ":", "get_display_time_zone", "(", "time_zone_name", ")", "}" ]
returns a dictionary of time zone information: * time_zone: name of pytz time zone * description: display version of time zone [e .
train
false
7,468
def find_test_driver(): test_driver = DefaultGalaxyTestDriver for key in TEST_DRIVERS.keys(): if _check_arg(key): test_driver = TEST_DRIVERS[key] return test_driver
[ "def", "find_test_driver", "(", ")", ":", "test_driver", "=", "DefaultGalaxyTestDriver", "for", "key", "in", "TEST_DRIVERS", ".", "keys", "(", ")", ":", "if", "_check_arg", "(", "key", ")", ":", "test_driver", "=", "TEST_DRIVERS", "[", "key", "]", "return", "test_driver" ]
look at command-line args and find the correct galaxy test driver .
train
false
7,471
def tear_down_repo(repo): repo.close() temp_dir = os.path.dirname(repo.path.rstrip(os.sep)) shutil.rmtree(temp_dir)
[ "def", "tear_down_repo", "(", "repo", ")", ":", "repo", ".", "close", "(", ")", "temp_dir", "=", "os", ".", "path", ".", "dirname", "(", "repo", ".", "path", ".", "rstrip", "(", "os", ".", "sep", ")", ")", "shutil", ".", "rmtree", "(", "temp_dir", ")" ]
tear down a test repository .
train
false
7,472
def test_masking_regression_1795(): t = Table.read(os.path.join(DATA, 'tb.fits')) assert np.all((t['c1'].mask == np.array([False, False]))) assert np.all((t['c2'].mask == np.array([False, False]))) assert np.all((t['c3'].mask == np.array([False, False]))) assert np.all((t['c4'].mask == np.array([False, False]))) assert np.all((t['c1'].data == np.array([1, 2]))) assert np.all((t['c2'].data == np.array(['abc', 'xy ']))) assert_allclose(t['c3'].data, np.array([3.70000007153, 6.6999997139])) assert np.all((t['c4'].data == np.array([False, True])))
[ "def", "test_masking_regression_1795", "(", ")", ":", "t", "=", "Table", ".", "read", "(", "os", ".", "path", ".", "join", "(", "DATA", ",", "'tb.fits'", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c1'", "]", ".", "mask", "==", "np", ".", "array", "(", "[", "False", ",", "False", "]", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c2'", "]", ".", "mask", "==", "np", ".", "array", "(", "[", "False", ",", "False", "]", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c3'", "]", ".", "mask", "==", "np", ".", "array", "(", "[", "False", ",", "False", "]", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c4'", "]", ".", "mask", "==", "np", ".", "array", "(", "[", "False", ",", "False", "]", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c1'", "]", ".", "data", "==", "np", ".", "array", "(", "[", "1", ",", "2", "]", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c2'", "]", ".", "data", "==", "np", ".", "array", "(", "[", "'abc'", ",", "'xy '", "]", ")", ")", ")", "assert_allclose", "(", "t", "[", "'c3'", "]", ".", "data", ",", "np", ".", "array", "(", "[", "3.70000007153", ",", "6.6999997139", "]", ")", ")", "assert", "np", ".", "all", "(", "(", "t", "[", "'c4'", "]", ".", "data", "==", "np", ".", "array", "(", "[", "False", ",", "True", "]", ")", ")", ")" ]
regression test for #1795 - this bug originally caused columns where tnull was not defined to have their first element masked .
train
false
7,473
def CleanFemPreg(df): df.agepreg /= 100.0 df.birthwgt_lb[(df.birthwgt_lb > 20)] = np.nan na_vals = [97, 98, 99] df.birthwgt_lb.replace(na_vals, np.nan, inplace=True) df.birthwgt_oz.replace(na_vals, np.nan, inplace=True) df.hpagelb.replace(na_vals, np.nan, inplace=True) df.babysex.replace([7, 9], np.nan, inplace=True) df.nbrnaliv.replace([9], np.nan, inplace=True) df['totalwgt_lb'] = (df.birthwgt_lb + (df.birthwgt_oz / 16.0)) df.cmintvw = np.nan
[ "def", "CleanFemPreg", "(", "df", ")", ":", "df", ".", "agepreg", "/=", "100.0", "df", ".", "birthwgt_lb", "[", "(", "df", ".", "birthwgt_lb", ">", "20", ")", "]", "=", "np", ".", "nan", "na_vals", "=", "[", "97", ",", "98", ",", "99", "]", "df", ".", "birthwgt_lb", ".", "replace", "(", "na_vals", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "df", ".", "birthwgt_oz", ".", "replace", "(", "na_vals", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "df", ".", "hpagelb", ".", "replace", "(", "na_vals", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "df", ".", "babysex", ".", "replace", "(", "[", "7", ",", "9", "]", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "df", ".", "nbrnaliv", ".", "replace", "(", "[", "9", "]", ",", "np", ".", "nan", ",", "inplace", "=", "True", ")", "df", "[", "'totalwgt_lb'", "]", "=", "(", "df", ".", "birthwgt_lb", "+", "(", "df", ".", "birthwgt_oz", "/", "16.0", ")", ")", "df", ".", "cmintvw", "=", "np", ".", "nan" ]
recodes variables from the pregnancy frame .
train
false
7,474
def solve_biquadratic(f, g, opt): G = groebner([f, g]) if ((len(G) == 1) and G[0].is_ground): return None if (len(G) != 2): raise SolveFailed (p, q) = G (x, y) = opt.gens p = Poly(p, x, expand=False) q = q.ltrim((-1)) p_roots = [rcollect(expr, y) for expr in roots(p).keys()] q_roots = list(roots(q).keys()) solutions = [] for q_root in q_roots: for p_root in p_roots: solution = (p_root.subs(y, q_root), q_root) solutions.append(solution) return sorted(solutions, key=default_sort_key)
[ "def", "solve_biquadratic", "(", "f", ",", "g", ",", "opt", ")", ":", "G", "=", "groebner", "(", "[", "f", ",", "g", "]", ")", "if", "(", "(", "len", "(", "G", ")", "==", "1", ")", "and", "G", "[", "0", "]", ".", "is_ground", ")", ":", "return", "None", "if", "(", "len", "(", "G", ")", "!=", "2", ")", ":", "raise", "SolveFailed", "(", "p", ",", "q", ")", "=", "G", "(", "x", ",", "y", ")", "=", "opt", ".", "gens", "p", "=", "Poly", "(", "p", ",", "x", ",", "expand", "=", "False", ")", "q", "=", "q", ".", "ltrim", "(", "(", "-", "1", ")", ")", "p_roots", "=", "[", "rcollect", "(", "expr", ",", "y", ")", "for", "expr", "in", "roots", "(", "p", ")", ".", "keys", "(", ")", "]", "q_roots", "=", "list", "(", "roots", "(", "q", ")", ".", "keys", "(", ")", ")", "solutions", "=", "[", "]", "for", "q_root", "in", "q_roots", ":", "for", "p_root", "in", "p_roots", ":", "solution", "=", "(", "p_root", ".", "subs", "(", "y", ",", "q_root", ")", ",", "q_root", ")", "solutions", ".", "append", "(", "solution", ")", "return", "sorted", "(", "solutions", ",", "key", "=", "default_sort_key", ")" ]
solve a system of two bivariate quadratic polynomial equations .
train
false
7,476
def test_tilde(data): session = PipSession() with patch('pip.index.os.path.exists', return_value=True): finder = PackageFinder(['~/python-pkgs'], [], session=session) req = InstallRequirement.from_line('gmpy') with pytest.raises(DistributionNotFound): finder.find_requirement(req, False)
[ "def", "test_tilde", "(", "data", ")", ":", "session", "=", "PipSession", "(", ")", "with", "patch", "(", "'pip.index.os.path.exists'", ",", "return_value", "=", "True", ")", ":", "finder", "=", "PackageFinder", "(", "[", "'~/python-pkgs'", "]", ",", "[", "]", ",", "session", "=", "session", ")", "req", "=", "InstallRequirement", ".", "from_line", "(", "'gmpy'", ")", "with", "pytest", ".", "raises", "(", "DistributionNotFound", ")", ":", "finder", ".", "find_requirement", "(", "req", ",", "False", ")" ]
finder can accept a path with ~ in it and will normalize it .
train
false
7,477
def check(actions, request, target=None): policy_check = utils_settings.import_setting('POLICY_CHECK_FUNCTION') if policy_check: return policy_check(actions, request, target) return True
[ "def", "check", "(", "actions", ",", "request", ",", "target", "=", "None", ")", ":", "policy_check", "=", "utils_settings", ".", "import_setting", "(", "'POLICY_CHECK_FUNCTION'", ")", "if", "policy_check", ":", "return", "policy_check", "(", "actions", ",", "request", ",", "target", ")", "return", "True" ]
checks if a version is greater than the known version for <what> .
train
true
7,478
def sendline(command): if (ping() is False): init() (out, err) = DETAILS[_worker_name()].sendline(command) (_, out) = out.split('\n', 1) (out, _, _) = out.rpartition('\n') return out
[ "def", "sendline", "(", "command", ")", ":", "if", "(", "ping", "(", ")", "is", "False", ")", ":", "init", "(", ")", "(", "out", ",", "err", ")", "=", "DETAILS", "[", "_worker_name", "(", ")", "]", ".", "sendline", "(", "command", ")", "(", "_", ",", "out", ")", "=", "out", ".", "split", "(", "'\\n'", ",", "1", ")", "(", "out", ",", "_", ",", "_", ")", "=", "out", ".", "rpartition", "(", "'\\n'", ")", "return", "out" ]
run command through switchs cli .
train
false
7,479
def remove_container(container, force=False, v=False): client = _get_client() status = base_status.copy() status['id'] = container dcontainer = None try: dcontainer = _get_container_infos(container)['Id'] if is_running(dcontainer): if (not force): _invalid(status, id_=container, out=None, comment="Container {0} is running, won't remove it".format(container)) __salt__['mine.send']('dockerng.ps', verbose=True, all=True, host=True) return status else: kill(dcontainer) client.remove_container(dcontainer, v=v) try: _get_container_infos(dcontainer) _invalid(status, comment='Container was not removed: {0}'.format(container)) except Exception: status['status'] = True status['comment'] = 'Container {0} was removed'.format(container) except Exception: _invalid(status, id_=container, out=traceback.format_exc()) __salt__['mine.send']('dockerng.ps', verbose=True, all=True, host=True) return status
[ "def", "remove_container", "(", "container", ",", "force", "=", "False", ",", "v", "=", "False", ")", ":", "client", "=", "_get_client", "(", ")", "status", "=", "base_status", ".", "copy", "(", ")", "status", "[", "'id'", "]", "=", "container", "dcontainer", "=", "None", "try", ":", "dcontainer", "=", "_get_container_infos", "(", "container", ")", "[", "'Id'", "]", "if", "is_running", "(", "dcontainer", ")", ":", "if", "(", "not", "force", ")", ":", "_invalid", "(", "status", ",", "id_", "=", "container", ",", "out", "=", "None", ",", "comment", "=", "\"Container {0} is running, won't remove it\"", ".", "format", "(", "container", ")", ")", "__salt__", "[", "'mine.send'", "]", "(", "'dockerng.ps'", ",", "verbose", "=", "True", ",", "all", "=", "True", ",", "host", "=", "True", ")", "return", "status", "else", ":", "kill", "(", "dcontainer", ")", "client", ".", "remove_container", "(", "dcontainer", ",", "v", "=", "v", ")", "try", ":", "_get_container_infos", "(", "dcontainer", ")", "_invalid", "(", "status", ",", "comment", "=", "'Container was not removed: {0}'", ".", "format", "(", "container", ")", ")", "except", "Exception", ":", "status", "[", "'status'", "]", "=", "True", "status", "[", "'comment'", "]", "=", "'Container {0} was removed'", ".", "format", "(", "container", ")", "except", "Exception", ":", "_invalid", "(", "status", ",", "id_", "=", "container", ",", "out", "=", "traceback", ".", "format_exc", "(", ")", ")", "__salt__", "[", "'mine.send'", "]", "(", "'dockerng.ps'", ",", "verbose", "=", "True", ",", "all", "=", "True", ",", "host", "=", "True", ")", "return", "status" ]
remove a container from a docker installation container container id force remove a running container .
train
false
7,480
def compile_template_str(template, renderers, default, blacklist, whitelist): fn_ = salt.utils.files.mkstemp() with salt.utils.fopen(fn_, 'wb') as ofile: ofile.write(SLS_ENCODER(template)[0]) return compile_template(fn_, renderers, default, blacklist, whitelist)
[ "def", "compile_template_str", "(", "template", ",", "renderers", ",", "default", ",", "blacklist", ",", "whitelist", ")", ":", "fn_", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "fn_", ",", "'wb'", ")", "as", "ofile", ":", "ofile", ".", "write", "(", "SLS_ENCODER", "(", "template", ")", "[", "0", "]", ")", "return", "compile_template", "(", "fn_", ",", "renderers", ",", "default", ",", "blacklist", ",", "whitelist", ")" ]
take template as a string and return the high data structure derived from the template .
train
true
7,482
def getGeometryOutputByArguments(arguments, elementNode): evaluate.setAttributesByArguments(['sides', 'radius'], arguments, elementNode) return getGeometryOutput(None, elementNode)
[ "def", "getGeometryOutputByArguments", "(", "arguments", ",", "elementNode", ")", ":", "evaluate", ".", "setAttributesByArguments", "(", "[", "'sides'", ",", "'radius'", "]", ",", "arguments", ",", "elementNode", ")", "return", "getGeometryOutput", "(", "None", ",", "elementNode", ")" ]
get vector3 vertexes from attribute dictionary by arguments .
train
false
7,483
def _get_form_descriptions(request): return {'login': _local_server_get('/user_api/v1/account/login_session/', request.session), 'registration': _local_server_get('/user_api/v1/account/registration/', request.session), 'password_reset': _local_server_get('/user_api/v1/account/password_reset/', request.session)}
[ "def", "_get_form_descriptions", "(", "request", ")", ":", "return", "{", "'login'", ":", "_local_server_get", "(", "'/user_api/v1/account/login_session/'", ",", "request", ".", "session", ")", ",", "'registration'", ":", "_local_server_get", "(", "'/user_api/v1/account/registration/'", ",", "request", ".", "session", ")", ",", "'password_reset'", ":", "_local_server_get", "(", "'/user_api/v1/account/password_reset/'", ",", "request", ".", "session", ")", "}" ]
retrieve form descriptions from the user api .
train
false
7,484
def test_low_gamma(): win = visual.Window([600, 600], gamma=0.5, autoLog=False) for n in range(5): win.flip() assert (win.useNativeGamma == False) win.close()
[ "def", "test_low_gamma", "(", ")", ":", "win", "=", "visual", ".", "Window", "(", "[", "600", ",", "600", "]", ",", "gamma", "=", "0.5", ",", "autoLog", "=", "False", ")", "for", "n", "in", "range", "(", "5", ")", ":", "win", ".", "flip", "(", ")", "assert", "(", "win", ".", "useNativeGamma", "==", "False", ")", "win", ".", "close", "(", ")" ]
setting gamma low .
train
false
7,485
def safe_string(value): if isinstance(value, NUMERIC_TYPES): value = ('%.16g' % value) elif (value is None): value = 'none' elif (not isinstance(value, basestring)): value = str(value) return value
[ "def", "safe_string", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "NUMERIC_TYPES", ")", ":", "value", "=", "(", "'%.16g'", "%", "value", ")", "elif", "(", "value", "is", "None", ")", ":", "value", "=", "'none'", "elif", "(", "not", "isinstance", "(", "value", ",", "basestring", ")", ")", ":", "value", "=", "str", "(", "value", ")", "return", "value" ]
safely and consistently format numeric values .
train
false
7,486
def triangulate(vertices): n = len(vertices) vertices = np.asarray(vertices) zmean = vertices[:, 2].mean() vertices_2d = vertices[:, :2] segments = np.repeat(np.arange((n + 1)), 2)[1:(-1)] segments[(-2):] = ((n - 1), 0) if _TRIANGLE_AVAILABLE: (vertices_2d, triangles) = _triangulate_cpp(vertices_2d, segments) else: (vertices_2d, triangles) = _triangulate_python(vertices_2d, segments) vertices = np.empty((len(vertices_2d), 3)) vertices[:, :2] = vertices_2d vertices[:, 2] = zmean return (vertices, triangles)
[ "def", "triangulate", "(", "vertices", ")", ":", "n", "=", "len", "(", "vertices", ")", "vertices", "=", "np", ".", "asarray", "(", "vertices", ")", "zmean", "=", "vertices", "[", ":", ",", "2", "]", ".", "mean", "(", ")", "vertices_2d", "=", "vertices", "[", ":", ",", ":", "2", "]", "segments", "=", "np", ".", "repeat", "(", "np", ".", "arange", "(", "(", "n", "+", "1", ")", ")", ",", "2", ")", "[", "1", ":", "(", "-", "1", ")", "]", "segments", "[", "(", "-", "2", ")", ":", "]", "=", "(", "(", "n", "-", "1", ")", ",", "0", ")", "if", "_TRIANGLE_AVAILABLE", ":", "(", "vertices_2d", ",", "triangles", ")", "=", "_triangulate_cpp", "(", "vertices_2d", ",", "segments", ")", "else", ":", "(", "vertices_2d", ",", "triangles", ")", "=", "_triangulate_python", "(", "vertices_2d", ",", "segments", ")", "vertices", "=", "np", ".", "empty", "(", "(", "len", "(", "vertices_2d", ")", ",", "3", ")", ")", "vertices", "[", ":", ",", ":", "2", "]", "=", "vertices_2d", "vertices", "[", ":", ",", "2", "]", "=", "zmean", "return", "(", "vertices", ",", "triangles", ")" ]
two-view triangulation of points in x1 .
train
true
7,489
def _UTC(): return utc
[ "def", "_UTC", "(", ")", ":", "return", "utc" ]
factory function for utc unpickling .
train
false
7,490
def assert_clock_idle(case, clock): case.assertEqual(clock.getDelayedCalls(), [])
[ "def", "assert_clock_idle", "(", "case", ",", "clock", ")", ":", "case", ".", "assertEqual", "(", "clock", ".", "getDelayedCalls", "(", ")", ",", "[", "]", ")" ]
assert that the given clock doesnt have any pending delayed calls .
train
false
7,493
def pathsplit(path): result = [] (head, tail) = _os.path.split(path) if ((not head) or (head == path)): pass else: result = pathsplit(head) if tail: result += [tail] return result
[ "def", "pathsplit", "(", "path", ")", ":", "result", "=", "[", "]", "(", "head", ",", "tail", ")", "=", "_os", ".", "path", ".", "split", "(", "path", ")", "if", "(", "(", "not", "head", ")", "or", "(", "head", "==", "path", ")", ")", ":", "pass", "else", ":", "result", "=", "pathsplit", "(", "head", ")", "if", "tail", ":", "result", "+=", "[", "tail", "]", "return", "result" ]
split a path into a list of elements of the file system hierarchy .
train
false
7,495
def S_crosssection(x, group): x_group_sums = group_sums(x, group).T return S_white_simple(x_group_sums)
[ "def", "S_crosssection", "(", "x", ",", "group", ")", ":", "x_group_sums", "=", "group_sums", "(", "x", ",", "group", ")", ".", "T", "return", "S_white_simple", "(", "x_group_sums", ")" ]
inner covariance matrix for white on group sums sandwich i guess for a single categorical group only .
train
false
7,496
def read_cert_from_file(cert_file, cert_type): if (not cert_file): return '' if (cert_type == 'pem'): line = open(cert_file).read().replace('\r\n', '\n').split('\n') if (line[0] == '-----BEGIN CERTIFICATE-----'): line = line[1:] elif (line[0] == '-----BEGIN PUBLIC KEY-----'): line = line[1:] else: raise CertificateError('Strange beginning of PEM file') while (line[(-1)] == ''): line = line[:(-1)] if (line[(-1)] == '-----END CERTIFICATE-----'): line = line[:(-1)] elif (line[(-1)] == '-----END PUBLIC KEY-----'): line = line[:(-1)] else: raise CertificateError('Strange end of PEM file') return ''.join(line) if (cert_type in ['der', 'cer', 'crt']): data = read_file(cert_file) return base64.b64encode(str(data))
[ "def", "read_cert_from_file", "(", "cert_file", ",", "cert_type", ")", ":", "if", "(", "not", "cert_file", ")", ":", "return", "''", "if", "(", "cert_type", "==", "'pem'", ")", ":", "line", "=", "open", "(", "cert_file", ")", ".", "read", "(", ")", ".", "replace", "(", "'\\r\\n'", ",", "'\\n'", ")", ".", "split", "(", "'\\n'", ")", "if", "(", "line", "[", "0", "]", "==", "'-----BEGIN CERTIFICATE-----'", ")", ":", "line", "=", "line", "[", "1", ":", "]", "elif", "(", "line", "[", "0", "]", "==", "'-----BEGIN PUBLIC KEY-----'", ")", ":", "line", "=", "line", "[", "1", ":", "]", "else", ":", "raise", "CertificateError", "(", "'Strange beginning of PEM file'", ")", "while", "(", "line", "[", "(", "-", "1", ")", "]", "==", "''", ")", ":", "line", "=", "line", "[", ":", "(", "-", "1", ")", "]", "if", "(", "line", "[", "(", "-", "1", ")", "]", "==", "'-----END CERTIFICATE-----'", ")", ":", "line", "=", "line", "[", ":", "(", "-", "1", ")", "]", "elif", "(", "line", "[", "(", "-", "1", ")", "]", "==", "'-----END PUBLIC KEY-----'", ")", ":", "line", "=", "line", "[", ":", "(", "-", "1", ")", "]", "else", ":", "raise", "CertificateError", "(", "'Strange end of PEM file'", ")", "return", "''", ".", "join", "(", "line", ")", "if", "(", "cert_type", "in", "[", "'der'", ",", "'cer'", ",", "'crt'", "]", ")", ":", "data", "=", "read_file", "(", "cert_file", ")", "return", "base64", ".", "b64encode", "(", "str", "(", "data", ")", ")" ]
reads a certificate from a file .
train
false
7,499
def purestr(x): if (not isinstance(x, Basic)): return str(x) if (type(x) in slotClasses): args = [getattr(x, slot) for slot in x.__slots__] elif (type(x) in sort_classes): args = sorted(x.args, key=default_sort_key) else: args = x.args return ('%s(%s)' % (type(x).__name__, ', '.join(map(purestr, args))))
[ "def", "purestr", "(", "x", ")", ":", "if", "(", "not", "isinstance", "(", "x", ",", "Basic", ")", ")", ":", "return", "str", "(", "x", ")", "if", "(", "type", "(", "x", ")", "in", "slotClasses", ")", ":", "args", "=", "[", "getattr", "(", "x", ",", "slot", ")", "for", "slot", "in", "x", ".", "__slots__", "]", "elif", "(", "type", "(", "x", ")", "in", "sort_classes", ")", ":", "args", "=", "sorted", "(", "x", ".", "args", ",", "key", "=", "default_sort_key", ")", "else", ":", "args", "=", "x", ".", "args", "return", "(", "'%s(%s)'", "%", "(", "type", "(", "x", ")", ".", "__name__", ",", "', '", ".", "join", "(", "map", "(", "purestr", ",", "args", ")", ")", ")", ")" ]
a string that follows obj = type exactly .
train
false
7,500
@verbose def spatial_src_connectivity(src, dist=None, verbose=None): return spatio_temporal_src_connectivity(src, 1, dist)
[ "@", "verbose", "def", "spatial_src_connectivity", "(", "src", ",", "dist", "=", "None", ",", "verbose", "=", "None", ")", ":", "return", "spatio_temporal_src_connectivity", "(", "src", ",", "1", ",", "dist", ")" ]
compute connectivity for a source space activation .
train
false
7,501
def _have_bug17666(): if (os.name == 'java'): return False import gzip bgzf_eof = ('\x1f\x8b\x08\x04\x00\x00\x00\x00\x00\xff\x06\x00BC' + '\x02\x00\x1b\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00') if (sys.version_info[0] >= 3): import codecs bgzf_eof = codecs.latin_1_encode(bgzf_eof)[0] h = gzip.GzipFile(fileobj=BytesIO(bgzf_eof)) try: data = h.read() h.close() assert (not data), ('Should be zero length, not %i' % len(data)) return False except TypeError as err: h.close() return True
[ "def", "_have_bug17666", "(", ")", ":", "if", "(", "os", ".", "name", "==", "'java'", ")", ":", "return", "False", "import", "gzip", "bgzf_eof", "=", "(", "'\\x1f\\x8b\\x08\\x04\\x00\\x00\\x00\\x00\\x00\\xff\\x06\\x00BC'", "+", "'\\x02\\x00\\x1b\\x00\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00'", ")", "if", "(", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ")", ":", "import", "codecs", "bgzf_eof", "=", "codecs", ".", "latin_1_encode", "(", "bgzf_eof", ")", "[", "0", "]", "h", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "BytesIO", "(", "bgzf_eof", ")", ")", "try", ":", "data", "=", "h", ".", "read", "(", ")", "h", ".", "close", "(", ")", "assert", "(", "not", "data", ")", ",", "(", "'Should be zero length, not %i'", "%", "len", "(", "data", ")", ")", "return", "False", "except", "TypeError", "as", "err", ":", "h", ".", "close", "(", ")", "return", "True" ]
debug function to check if pythons gzip is broken .
train
false
7,502
def new_document(source_path, settings=None): from docutils import frontend if (settings is None): settings = frontend.OptionParser().get_default_values() source_path = decode_path(source_path) reporter = new_reporter(source_path, settings) document = nodes.document(settings, reporter, source=source_path) document.note_source(source_path, (-1)) return document
[ "def", "new_document", "(", "source_path", ",", "settings", "=", "None", ")", ":", "from", "docutils", "import", "frontend", "if", "(", "settings", "is", "None", ")", ":", "settings", "=", "frontend", ".", "OptionParser", "(", ")", ".", "get_default_values", "(", ")", "source_path", "=", "decode_path", "(", "source_path", ")", "reporter", "=", "new_reporter", "(", "source_path", ",", "settings", ")", "document", "=", "nodes", ".", "document", "(", "settings", ",", "reporter", ",", "source", "=", "source_path", ")", "document", ".", "note_source", "(", "source_path", ",", "(", "-", "1", ")", ")", "return", "document" ]
return a new empty document object .
train
false
7,503
def is_raising(body): for node in body: if isinstance(node, astroid.Raise): return True return False
[ "def", "is_raising", "(", "body", ")", ":", "for", "node", "in", "body", ":", "if", "isinstance", "(", "node", ",", "astroid", ".", "Raise", ")", ":", "return", "True", "return", "False" ]
return true if the given statement node raise an exception .
train
false
7,504
def get_versions_for_exploration_stats(exploration_id): return stats_models.ExplorationAnnotationsModel.get_versions(exploration_id)
[ "def", "get_versions_for_exploration_stats", "(", "exploration_id", ")", ":", "return", "stats_models", ".", "ExplorationAnnotationsModel", ".", "get_versions", "(", "exploration_id", ")" ]
returns list of versions for this exploration .
train
false
7,505
def version_info(): Version = namedtuple('Version', 'major, minor, micro') from jedi import __version__ tupl = re.findall('[a-z]+|\\d+', __version__) return Version(*[(x if (i == 3) else int(x)) for (i, x) in enumerate(tupl)])
[ "def", "version_info", "(", ")", ":", "Version", "=", "namedtuple", "(", "'Version'", ",", "'major, minor, micro'", ")", "from", "jedi", "import", "__version__", "tupl", "=", "re", ".", "findall", "(", "'[a-z]+|\\\\d+'", ",", "__version__", ")", "return", "Version", "(", "*", "[", "(", "x", "if", "(", "i", "==", "3", ")", "else", "int", "(", "x", ")", ")", "for", "(", "i", ",", "x", ")", "in", "enumerate", "(", "tupl", ")", "]", ")" ]
returns a namedtuple of jedis version .
train
false
7,507
def _failureOldStyle(fail): import linecache tb = [] for f in fail.frames: tb.append((f[1], f[2], f[0], linecache.getline(f[1], f[2]))) return {'traceback': tb, 'exception': traceback.format_exception_only(fail.type, fail.value)}
[ "def", "_failureOldStyle", "(", "fail", ")", ":", "import", "linecache", "tb", "=", "[", "]", "for", "f", "in", "fail", ".", "frames", ":", "tb", ".", "append", "(", "(", "f", "[", "1", "]", ",", "f", "[", "2", "]", ",", "f", "[", "0", "]", ",", "linecache", ".", "getline", "(", "f", "[", "1", "]", ",", "f", "[", "2", "]", ")", ")", ")", "return", "{", "'traceback'", ":", "tb", ",", "'exception'", ":", "traceback", ".", "format_exception_only", "(", "fail", ".", "type", ",", "fail", ".", "value", ")", "}" ]
pre-failure manhole representation of exceptions .
train
false
7,508
@treeio_login_required def widget_index_assigned(request, response_format='html'): context = _get_default_context(request) agent = context['agent'] if agent: tickets = Object.filter_by_request(request, Ticket.objects.filter(assigned=agent, status__hidden=False)) else: return user_denied(request, 'You are not a Service Support Agent.') context.update({'tickets': tickets}) return render_to_response('services/widgets/index_assigned', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "def", "widget_index_assigned", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "context", "=", "_get_default_context", "(", "request", ")", "agent", "=", "context", "[", "'agent'", "]", "if", "agent", ":", "tickets", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Ticket", ".", "objects", ".", "filter", "(", "assigned", "=", "agent", ",", "status__hidden", "=", "False", ")", ")", "else", ":", "return", "user_denied", "(", "request", ",", "'You are not a Service Support Agent.'", ")", "context", ".", "update", "(", "{", "'tickets'", ":", "tickets", "}", ")", "return", "render_to_response", "(", "'services/widgets/index_assigned'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
tickets assigned to current user .
train
false
7,509
@nottest def make_test_handler(testcase, *args, **kwargs): handler = TestHandler(*args, **kwargs) testcase.addCleanup(handler.close) return handler
[ "@", "nottest", "def", "make_test_handler", "(", "testcase", ",", "*", "args", ",", "**", "kwargs", ")", ":", "handler", "=", "TestHandler", "(", "*", "args", ",", "**", "kwargs", ")", "testcase", ".", "addCleanup", "(", "handler", ".", "close", ")", "return", "handler" ]
returns a testhandler which will be used by the given testcase .
train
false
7,510
def _get_info(info, name): try: idx = info[name] except: idx = info[name] = dict() return idx
[ "def", "_get_info", "(", "info", ",", "name", ")", ":", "try", ":", "idx", "=", "info", "[", "name", "]", "except", ":", "idx", "=", "info", "[", "name", "]", "=", "dict", "(", ")", "return", "idx" ]
get/create the info for this name .
train
true
7,512
def get_new_obj(obj, klass, attributes): kwargs = {} for (key, value) in list(obj.__dict__.items()): if isinstance(value, dict): kwargs[key] = value.copy() elif isinstance(value, (tuple, list)): kwargs[key] = value[:] else: kwargs[key] = value for (key, value) in list(attributes.items()): if (value is None): continue if isinstance(value, dict): kwargs_value = kwargs.get(key, {}) for (key1, value2) in list(value.items()): if (value2 is None): continue kwargs_value[key1] = value2 kwargs[key] = kwargs_value else: kwargs[key] = value return klass(**kwargs)
[ "def", "get_new_obj", "(", "obj", ",", "klass", ",", "attributes", ")", ":", "kwargs", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "list", "(", "obj", ".", "__dict__", ".", "items", "(", ")", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "kwargs", "[", "key", "]", "=", "value", ".", "copy", "(", ")", "elif", "isinstance", "(", "value", ",", "(", "tuple", ",", "list", ")", ")", ":", "kwargs", "[", "key", "]", "=", "value", "[", ":", "]", "else", ":", "kwargs", "[", "key", "]", "=", "value", "for", "(", "key", ",", "value", ")", "in", "list", "(", "attributes", ".", "items", "(", ")", ")", ":", "if", "(", "value", "is", "None", ")", ":", "continue", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "kwargs_value", "=", "kwargs", ".", "get", "(", "key", ",", "{", "}", ")", "for", "(", "key1", ",", "value2", ")", "in", "list", "(", "value", ".", "items", "(", ")", ")", ":", "if", "(", "value2", "is", "None", ")", ":", "continue", "kwargs_value", "[", "key1", "]", "=", "value2", "kwargs", "[", "key", "]", "=", "kwargs_value", "else", ":", "kwargs", "[", "key", "]", "=", "value", "return", "klass", "(", "**", "kwargs", ")" ]
pass attributes from the existing object obj and attributes dictionary to a klass constructor .
train
false
7,514
def extractOne(query, choices, processor=default_processor, scorer=default_scorer, score_cutoff=0): best_list = extractWithoutOrder(query, choices, processor, scorer, score_cutoff) try: return max(best_list, key=(lambda i: i[1])) except ValueError: return None
[ "def", "extractOne", "(", "query", ",", "choices", ",", "processor", "=", "default_processor", ",", "scorer", "=", "default_scorer", ",", "score_cutoff", "=", "0", ")", ":", "best_list", "=", "extractWithoutOrder", "(", "query", ",", "choices", ",", "processor", ",", "scorer", ",", "score_cutoff", ")", "try", ":", "return", "max", "(", "best_list", ",", "key", "=", "(", "lambda", "i", ":", "i", "[", "1", "]", ")", ")", "except", "ValueError", ":", "return", "None" ]
find the single best match above a score in a list of choices .
train
true
7,515
def log1p(x): return Log1p()(x)
[ "def", "log1p", "(", "x", ")", ":", "return", "Log1p", "(", ")", "(", "x", ")" ]
elementwise natural logarithm plus one function .
train
false
7,516
def get_region_from_metadata(): global __Location__ if (__Location__ == 'do-not-get-from-metadata'): LOG.debug('Previously failed to get AWS region from metadata. Not trying again.') return None if (__Location__ != ''): return __Location__ try: result = requests.get('http://169.254.169.254/latest/dynamic/instance-identity/document', proxies={'http': ''}, timeout=AWS_METADATA_TIMEOUT) except requests.exceptions.RequestException: LOG.warning('Failed to get AWS region from instance metadata.', exc_info=True) __Location__ = 'do-not-get-from-metadata' return None try: region = result.json()['region'] __Location__ = region return __Location__ except (ValueError, KeyError): LOG.warning('Failed to decode JSON from instance metadata.') return None return None
[ "def", "get_region_from_metadata", "(", ")", ":", "global", "__Location__", "if", "(", "__Location__", "==", "'do-not-get-from-metadata'", ")", ":", "LOG", ".", "debug", "(", "'Previously failed to get AWS region from metadata. Not trying again.'", ")", "return", "None", "if", "(", "__Location__", "!=", "''", ")", ":", "return", "__Location__", "try", ":", "result", "=", "requests", ".", "get", "(", "'http://169.254.169.254/latest/dynamic/instance-identity/document'", ",", "proxies", "=", "{", "'http'", ":", "''", "}", ",", "timeout", "=", "AWS_METADATA_TIMEOUT", ")", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "LOG", ".", "warning", "(", "'Failed to get AWS region from instance metadata.'", ",", "exc_info", "=", "True", ")", "__Location__", "=", "'do-not-get-from-metadata'", "return", "None", "try", ":", "region", "=", "result", ".", "json", "(", ")", "[", "'region'", "]", "__Location__", "=", "region", "return", "__Location__", "except", "(", "ValueError", ",", "KeyError", ")", ":", "LOG", ".", "warning", "(", "'Failed to decode JSON from instance metadata.'", ")", "return", "None", "return", "None" ]
try to get region from instance identity document and cache it .
train
true
7,517
def _check_module(evaluator, module): def get_sys_path_powers(names): for name in names: power = name.parent.parent if tree.is_node(power, 'power', 'atom_expr'): c = power.children if (isinstance(c[0], tree.Name) and (c[0].value == 'sys') and tree.is_node(c[1], 'trailer')): n = c[1].children[1] if (isinstance(n, tree.Name) and (n.value == 'path')): (yield (name, power)) sys_path = list(evaluator.sys_path) try: possible_names = module.used_names['path'] except KeyError: pass else: for (name, power) in get_sys_path_powers(possible_names): stmt = name.get_definition() if (len(power.children) >= 4): sys_path.extend(_paths_from_list_modifications(module.path, *power.children[2:4])) elif (name.get_definition().type == 'expr_stmt'): sys_path.extend(_paths_from_assignment(evaluator, stmt)) return sys_path
[ "def", "_check_module", "(", "evaluator", ",", "module", ")", ":", "def", "get_sys_path_powers", "(", "names", ")", ":", "for", "name", "in", "names", ":", "power", "=", "name", ".", "parent", ".", "parent", "if", "tree", ".", "is_node", "(", "power", ",", "'power'", ",", "'atom_expr'", ")", ":", "c", "=", "power", ".", "children", "if", "(", "isinstance", "(", "c", "[", "0", "]", ",", "tree", ".", "Name", ")", "and", "(", "c", "[", "0", "]", ".", "value", "==", "'sys'", ")", "and", "tree", ".", "is_node", "(", "c", "[", "1", "]", ",", "'trailer'", ")", ")", ":", "n", "=", "c", "[", "1", "]", ".", "children", "[", "1", "]", "if", "(", "isinstance", "(", "n", ",", "tree", ".", "Name", ")", "and", "(", "n", ".", "value", "==", "'path'", ")", ")", ":", "(", "yield", "(", "name", ",", "power", ")", ")", "sys_path", "=", "list", "(", "evaluator", ".", "sys_path", ")", "try", ":", "possible_names", "=", "module", ".", "used_names", "[", "'path'", "]", "except", "KeyError", ":", "pass", "else", ":", "for", "(", "name", ",", "power", ")", "in", "get_sys_path_powers", "(", "possible_names", ")", ":", "stmt", "=", "name", ".", "get_definition", "(", ")", "if", "(", "len", "(", "power", ".", "children", ")", ">=", "4", ")", ":", "sys_path", ".", "extend", "(", "_paths_from_list_modifications", "(", "module", ".", "path", ",", "*", "power", ".", "children", "[", "2", ":", "4", "]", ")", ")", "elif", "(", "name", ".", "get_definition", "(", ")", ".", "type", "==", "'expr_stmt'", ")", ":", "sys_path", ".", "extend", "(", "_paths_from_assignment", "(", "evaluator", ",", "stmt", ")", ")", "return", "sys_path" ]
detect sys .
train
false
7,518
def install_update(name): salt.utils.warn_until('Fluorine', "This function is replaced by 'install' as of Salt Nitrogen. Thiswarning will be removed in Salt Fluorine.") return install(name)
[ "def", "install_update", "(", "name", ")", ":", "salt", ".", "utils", ".", "warn_until", "(", "'Fluorine'", ",", "\"This function is replaced by 'install' as of Salt Nitrogen. Thiswarning will be removed in Salt Fluorine.\"", ")", "return", "install", "(", "name", ")" ]
if a newer release is available .
train
false
7,519
@utils.arg('host', metavar='<host>', help='Name of host.') def do_host_servers_migrate(cs, args): hypervisors = cs.hypervisors.search(args.host, servers=True) response = [] for hyper in hypervisors: if hasattr(hyper, 'servers'): for server in hyper.servers: response.append(_server_migrate(cs, server)) utils.print_list(response, ['Server UUID', 'Migration Accepted', 'Error Message'])
[ "@", "utils", ".", "arg", "(", "'host'", ",", "metavar", "=", "'<host>'", ",", "help", "=", "'Name of host.'", ")", "def", "do_host_servers_migrate", "(", "cs", ",", "args", ")", ":", "hypervisors", "=", "cs", ".", "hypervisors", ".", "search", "(", "args", ".", "host", ",", "servers", "=", "True", ")", "response", "=", "[", "]", "for", "hyper", "in", "hypervisors", ":", "if", "hasattr", "(", "hyper", ",", "'servers'", ")", ":", "for", "server", "in", "hyper", ".", "servers", ":", "response", ".", "append", "(", "_server_migrate", "(", "cs", ",", "server", ")", ")", "utils", ".", "print_list", "(", "response", ",", "[", "'Server UUID'", ",", "'Migration Accepted'", ",", "'Error Message'", "]", ")" ]
cold migrate all instances off the specified host to other available hosts .
train
false
7,521
def pick_context_manager_writer(f): @functools.wraps(f) def wrapped(context, *args, **kwargs): ctxt_mgr = get_context_manager(context) with ctxt_mgr.writer.using(context): return f(context, *args, **kwargs) return wrapped
[ "def", "pick_context_manager_writer", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapped", "(", "context", ",", "*", "args", ",", "**", "kwargs", ")", ":", "ctxt_mgr", "=", "get_context_manager", "(", "context", ")", "with", "ctxt_mgr", ".", "writer", ".", "using", "(", "context", ")", ":", "return", "f", "(", "context", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped" ]
decorator to use a writer db context manager .
train
false
7,524
def say_text(text, priority='important'): mpstate.console.writeln(text)
[ "def", "say_text", "(", "text", ",", "priority", "=", "'important'", ")", ":", "mpstate", ".", "console", ".", "writeln", "(", "text", ")" ]
text output - default function for say() .
train
false
7,525
def _MapFileToMsBuildSourceType(source, rule_dependencies, extension_to_rule_name): (_, ext) = os.path.splitext(source) if (ext in extension_to_rule_name): group = 'rule' element = extension_to_rule_name[ext] elif (ext in ['.cc', '.cpp', '.c', '.cxx']): group = 'compile' element = 'ClCompile' elif (ext in ['.h', '.hxx']): group = 'include' element = 'ClInclude' elif (ext == '.rc'): group = 'resource' element = 'ResourceCompile' elif (ext == '.asm'): group = 'masm' element = 'MASM' elif (ext == '.idl'): group = 'midl' element = 'Midl' elif (source in rule_dependencies): group = 'rule_dependency' element = 'CustomBuild' else: group = 'none' element = 'None' return (group, element)
[ "def", "_MapFileToMsBuildSourceType", "(", "source", ",", "rule_dependencies", ",", "extension_to_rule_name", ")", ":", "(", "_", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "source", ")", "if", "(", "ext", "in", "extension_to_rule_name", ")", ":", "group", "=", "'rule'", "element", "=", "extension_to_rule_name", "[", "ext", "]", "elif", "(", "ext", "in", "[", "'.cc'", ",", "'.cpp'", ",", "'.c'", ",", "'.cxx'", "]", ")", ":", "group", "=", "'compile'", "element", "=", "'ClCompile'", "elif", "(", "ext", "in", "[", "'.h'", ",", "'.hxx'", "]", ")", ":", "group", "=", "'include'", "element", "=", "'ClInclude'", "elif", "(", "ext", "==", "'.rc'", ")", ":", "group", "=", "'resource'", "element", "=", "'ResourceCompile'", "elif", "(", "ext", "==", "'.asm'", ")", ":", "group", "=", "'masm'", "element", "=", "'MASM'", "elif", "(", "ext", "==", "'.idl'", ")", ":", "group", "=", "'midl'", "element", "=", "'Midl'", "elif", "(", "source", "in", "rule_dependencies", ")", ":", "group", "=", "'rule_dependency'", "element", "=", "'CustomBuild'", "else", ":", "group", "=", "'none'", "element", "=", "'None'", "return", "(", "group", ",", "element", ")" ]
returns the group and element type of the source file .
train
false
7,527
def _get_mro(cls): if (not isinstance(cls, type)): class cls(cls, object, ): pass return cls.__mro__[1:] return cls.__mro__
[ "def", "_get_mro", "(", "cls", ")", ":", "if", "(", "not", "isinstance", "(", "cls", ",", "type", ")", ")", ":", "class", "cls", "(", "cls", ",", "object", ",", ")", ":", "pass", "return", "cls", ".", "__mro__", "[", "1", ":", "]", "return", "cls", ".", "__mro__" ]
get a reasonable method resolution order of a class and its superclasses for both old-style and new-style classes .
train
true
7,528
def silverman_transform(bw, M, RANGE): J = np.arange(((M / 2) + 1)) FAC1 = (2 * (((np.pi * bw) / RANGE) ** 2)) JFAC = ((J ** 2) * FAC1) BC = (1 - ((1.0 / 3) * ((((J * 1.0) / M) * np.pi) ** 2))) FAC = (np.exp((- JFAC)) / BC) kern_est = np.r_[(FAC, FAC[1:(-1)])] return kern_est
[ "def", "silverman_transform", "(", "bw", ",", "M", ",", "RANGE", ")", ":", "J", "=", "np", ".", "arange", "(", "(", "(", "M", "/", "2", ")", "+", "1", ")", ")", "FAC1", "=", "(", "2", "*", "(", "(", "(", "np", ".", "pi", "*", "bw", ")", "/", "RANGE", ")", "**", "2", ")", ")", "JFAC", "=", "(", "(", "J", "**", "2", ")", "*", "FAC1", ")", "BC", "=", "(", "1", "-", "(", "(", "1.0", "/", "3", ")", "*", "(", "(", "(", "(", "J", "*", "1.0", ")", "/", "M", ")", "*", "np", ".", "pi", ")", "**", "2", ")", ")", ")", "FAC", "=", "(", "np", ".", "exp", "(", "(", "-", "JFAC", ")", ")", "/", "BC", ")", "kern_est", "=", "np", ".", "r_", "[", "(", "FAC", ",", "FAC", "[", "1", ":", "(", "-", "1", ")", "]", ")", "]", "return", "kern_est" ]
fft of gaussian kernel following to silverman as 176 .
train
false
7,531
def get_product_summary_serializer_class(): return _product_summary_serializer_class
[ "def", "get_product_summary_serializer_class", "(", ")", ":", "return", "_product_summary_serializer_class" ]
returns the the common productserializer .
train
false
7,532
def run_tasks(guest, instance, active_migrations, on_migration_failure, migration, is_post_copy_enabled): tasks = active_migrations.get(instance.uuid, deque()) while tasks: task = tasks.popleft() if (task == 'force-complete'): if (migration.status == 'running (post-copy)'): LOG.warning(_LW('Live-migration %s already switched to post-copy mode.'), instance=instance) elif is_post_copy_enabled: trigger_postcopy_switch(guest, instance, migration) else: try: guest.pause() on_migration_failure.append('unpause') except Exception as e: LOG.warning(_LW('Failed to pause instance during live-migration %s'), e, instance=instance) else: LOG.warning(_LW("Unknown migration task '%(task)s'"), {'task': task}, instance=instance)
[ "def", "run_tasks", "(", "guest", ",", "instance", ",", "active_migrations", ",", "on_migration_failure", ",", "migration", ",", "is_post_copy_enabled", ")", ":", "tasks", "=", "active_migrations", ".", "get", "(", "instance", ".", "uuid", ",", "deque", "(", ")", ")", "while", "tasks", ":", "task", "=", "tasks", ".", "popleft", "(", ")", "if", "(", "task", "==", "'force-complete'", ")", ":", "if", "(", "migration", ".", "status", "==", "'running (post-copy)'", ")", ":", "LOG", ".", "warning", "(", "_LW", "(", "'Live-migration %s already switched to post-copy mode.'", ")", ",", "instance", "=", "instance", ")", "elif", "is_post_copy_enabled", ":", "trigger_postcopy_switch", "(", "guest", ",", "instance", ",", "migration", ")", "else", ":", "try", ":", "guest", ".", "pause", "(", ")", "on_migration_failure", ".", "append", "(", "'unpause'", ")", "except", "Exception", "as", "e", ":", "LOG", ".", "warning", "(", "_LW", "(", "'Failed to pause instance during live-migration %s'", ")", ",", "e", ",", "instance", "=", "instance", ")", "else", ":", "LOG", ".", "warning", "(", "_LW", "(", "\"Unknown migration task '%(task)s'\"", ")", ",", "{", "'task'", ":", "task", "}", ",", "instance", "=", "instance", ")" ]
run any pending migration tasks .
train
false
7,533
def assert_sp_frame_equal(left, right, check_dtype=True, exact_indices=True, check_frame_type=True, obj='SparseDataFrame'): assertIsInstance(left, pd.SparseDataFrame, '[SparseDataFrame]') assertIsInstance(right, pd.SparseDataFrame, '[SparseDataFrame]') if check_frame_type: assert_class_equal(left, right, obj=obj) assert_index_equal(left.index, right.index, obj='{0}.index'.format(obj)) assert_index_equal(left.columns, right.columns, obj='{0}.columns'.format(obj)) for (col, series) in compat.iteritems(left): assert (col in right) if exact_indices: assert_sp_series_equal(series, right[col], check_dtype=check_dtype) else: assert_series_equal(series.to_dense(), right[col].to_dense(), check_dtype=check_dtype) assert_attr_equal('default_fill_value', left, right, obj=obj) for col in right: assert (col in left)
[ "def", "assert_sp_frame_equal", "(", "left", ",", "right", ",", "check_dtype", "=", "True", ",", "exact_indices", "=", "True", ",", "check_frame_type", "=", "True", ",", "obj", "=", "'SparseDataFrame'", ")", ":", "assertIsInstance", "(", "left", ",", "pd", ".", "SparseDataFrame", ",", "'[SparseDataFrame]'", ")", "assertIsInstance", "(", "right", ",", "pd", ".", "SparseDataFrame", ",", "'[SparseDataFrame]'", ")", "if", "check_frame_type", ":", "assert_class_equal", "(", "left", ",", "right", ",", "obj", "=", "obj", ")", "assert_index_equal", "(", "left", ".", "index", ",", "right", ".", "index", ",", "obj", "=", "'{0}.index'", ".", "format", "(", "obj", ")", ")", "assert_index_equal", "(", "left", ".", "columns", ",", "right", ".", "columns", ",", "obj", "=", "'{0}.columns'", ".", "format", "(", "obj", ")", ")", "for", "(", "col", ",", "series", ")", "in", "compat", ".", "iteritems", "(", "left", ")", ":", "assert", "(", "col", "in", "right", ")", "if", "exact_indices", ":", "assert_sp_series_equal", "(", "series", ",", "right", "[", "col", "]", ",", "check_dtype", "=", "check_dtype", ")", "else", ":", "assert_series_equal", "(", "series", ".", "to_dense", "(", ")", ",", "right", "[", "col", "]", ".", "to_dense", "(", ")", ",", "check_dtype", "=", "check_dtype", ")", "assert_attr_equal", "(", "'default_fill_value'", ",", "left", ",", "right", ",", "obj", "=", "obj", ")", "for", "col", "in", "right", ":", "assert", "(", "col", "in", "left", ")" ]
check that the left and right sparsedataframe are equal .
train
false
7,534
def is_mul(var): if (var.owner and (var.owner.op == tensor.mul)): return var.owner.inputs else: return None
[ "def", "is_mul", "(", "var", ")", ":", "if", "(", "var", ".", "owner", "and", "(", "var", ".", "owner", ".", "op", "==", "tensor", ".", "mul", ")", ")", ":", "return", "var", ".", "owner", ".", "inputs", "else", ":", "return", "None" ]
match a variable with x * y * z * .
train
false
7,535
def get_attributes(path): err = '' if (not os.path.exists(path)): err += 'File not found\n' if err: return err attributes = {} intAttributes = win32file.GetFileAttributes(path) attributes['archive'] = ((intAttributes & 32) == 32) attributes['reparsePoint'] = ((intAttributes & 1024) == 1024) attributes['compressed'] = ((intAttributes & 2048) == 2048) attributes['directory'] = ((intAttributes & 16) == 16) attributes['encrypted'] = ((intAttributes & 16384) == 16384) attributes['hidden'] = ((intAttributes & 2) == 2) attributes['normal'] = ((intAttributes & 128) == 128) attributes['notIndexed'] = ((intAttributes & 8192) == 8192) attributes['offline'] = ((intAttributes & 4096) == 4096) attributes['readonly'] = ((intAttributes & 1) == 1) attributes['system'] = ((intAttributes & 4) == 4) attributes['temporary'] = ((intAttributes & 256) == 256) attributes['mountedVolume'] = False if ((attributes['reparsePoint'] is True) and (attributes['directory'] is True)): fileIterator = win32file.FindFilesIterator(path) findDataTuple = next(fileIterator) if (findDataTuple[6] == 2684354563): attributes['mountedVolume'] = True attributes['symbolicLink'] = False if (attributes['reparsePoint'] is True): fileIterator = win32file.FindFilesIterator(path) findDataTuple = next(fileIterator) if (findDataTuple[6] == 2684354572): attributes['symbolicLink'] = True return attributes
[ "def", "get_attributes", "(", "path", ")", ":", "err", "=", "''", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "err", "+=", "'File not found\\n'", "if", "err", ":", "return", "err", "attributes", "=", "{", "}", "intAttributes", "=", "win32file", ".", "GetFileAttributes", "(", "path", ")", "attributes", "[", "'archive'", "]", "=", "(", "(", "intAttributes", "&", "32", ")", "==", "32", ")", "attributes", "[", "'reparsePoint'", "]", "=", "(", "(", "intAttributes", "&", "1024", ")", "==", "1024", ")", "attributes", "[", "'compressed'", "]", "=", "(", "(", "intAttributes", "&", "2048", ")", "==", "2048", ")", "attributes", "[", "'directory'", "]", "=", "(", "(", "intAttributes", "&", "16", ")", "==", "16", ")", "attributes", "[", "'encrypted'", "]", "=", "(", "(", "intAttributes", "&", "16384", ")", "==", "16384", ")", "attributes", "[", "'hidden'", "]", "=", "(", "(", "intAttributes", "&", "2", ")", "==", "2", ")", "attributes", "[", "'normal'", "]", "=", "(", "(", "intAttributes", "&", "128", ")", "==", "128", ")", "attributes", "[", "'notIndexed'", "]", "=", "(", "(", "intAttributes", "&", "8192", ")", "==", "8192", ")", "attributes", "[", "'offline'", "]", "=", "(", "(", "intAttributes", "&", "4096", ")", "==", "4096", ")", "attributes", "[", "'readonly'", "]", "=", "(", "(", "intAttributes", "&", "1", ")", "==", "1", ")", "attributes", "[", "'system'", "]", "=", "(", "(", "intAttributes", "&", "4", ")", "==", "4", ")", "attributes", "[", "'temporary'", "]", "=", "(", "(", "intAttributes", "&", "256", ")", "==", "256", ")", "attributes", "[", "'mountedVolume'", "]", "=", "False", "if", "(", "(", "attributes", "[", "'reparsePoint'", "]", "is", "True", ")", "and", "(", "attributes", "[", "'directory'", "]", "is", "True", ")", ")", ":", "fileIterator", "=", "win32file", ".", "FindFilesIterator", "(", "path", ")", "findDataTuple", "=", "next", "(", "fileIterator", ")", "if", "(", "findDataTuple", "[", "6", "]", "==", "2684354563", ")", ":", "attributes", "[", "'mountedVolume'", "]", "=", "True", "attributes", "[", "'symbolicLink'", "]", "=", "False", "if", "(", "attributes", "[", "'reparsePoint'", "]", "is", "True", ")", ":", "fileIterator", "=", "win32file", ".", "FindFilesIterator", "(", "path", ")", "findDataTuple", "=", "next", "(", "fileIterator", ")", "if", "(", "findDataTuple", "[", "6", "]", "==", "2684354572", ")", ":", "attributes", "[", "'symbolicLink'", "]", "=", "True", "return", "attributes" ]
check to see if attributes are set on an elb .
train
true
7,536
def getAllPaths(paths, xmlObject): for archivableObject in xmlObject.archivableObjects: paths += archivableObject.getPaths() return paths
[ "def", "getAllPaths", "(", "paths", ",", "xmlObject", ")", ":", "for", "archivableObject", "in", "xmlObject", ".", "archivableObjects", ":", "paths", "+=", "archivableObject", ".", "getPaths", "(", ")", "return", "paths" ]
get all paths .
train
false
7,537
def _maketrans(complement_mapping): before = ''.join(complement_mapping.keys()) after = ''.join(complement_mapping.values()) before += before.lower() after += after.lower() if (sys.version_info[0] == 3): return str.maketrans(before, after) else: return string.maketrans(before, after)
[ "def", "_maketrans", "(", "complement_mapping", ")", ":", "before", "=", "''", ".", "join", "(", "complement_mapping", ".", "keys", "(", ")", ")", "after", "=", "''", ".", "join", "(", "complement_mapping", ".", "values", "(", ")", ")", "before", "+=", "before", ".", "lower", "(", ")", "after", "+=", "after", ".", "lower", "(", ")", "if", "(", "sys", ".", "version_info", "[", "0", "]", "==", "3", ")", ":", "return", "str", ".", "maketrans", "(", "before", ",", "after", ")", "else", ":", "return", "string", ".", "maketrans", "(", "before", ",", "after", ")" ]
makes a python string translation table .
train
false
7,540
def reconstruct_matrix_from_id(B, idx, proj): if _is_real(B): return backend.idd_reconid(B, (idx + 1), proj) else: return backend.idz_reconid(B, (idx + 1), proj)
[ "def", "reconstruct_matrix_from_id", "(", "B", ",", "idx", ",", "proj", ")", ":", "if", "_is_real", "(", "B", ")", ":", "return", "backend", ".", "idd_reconid", "(", "B", ",", "(", "idx", "+", "1", ")", ",", "proj", ")", "else", ":", "return", "backend", ".", "idz_reconid", "(", "B", ",", "(", "idx", "+", "1", ")", ",", "proj", ")" ]
reconstruct matrix from its id .
train
false
7,541
@handle_dashboard_error @require_POST @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_post_params('student', 'url', 'due_datetime') def change_due_date(request, course_id): course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id)) student = require_student_from_identifier(request.POST.get('student')) unit = find_unit(course, request.POST.get('url')) due_date = parse_datetime(request.POST.get('due_datetime')) set_due_date_extension(course, unit, student, due_date) return JsonResponse(_('Successfully changed due date for student {0} for {1} to {2}').format(student.profile.name, _display_unit(unit), due_date.strftime('%Y-%m-%d %H:%M')))
[ "@", "handle_dashboard_error", "@", "require_POST", "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_level", "(", "'staff'", ")", "@", "require_post_params", "(", "'student'", ",", "'url'", ",", "'due_datetime'", ")", "def", "change_due_date", "(", "request", ",", "course_id", ")", ":", "course", "=", "get_course_by_id", "(", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", ")", "student", "=", "require_student_from_identifier", "(", "request", ".", "POST", ".", "get", "(", "'student'", ")", ")", "unit", "=", "find_unit", "(", "course", ",", "request", ".", "POST", ".", "get", "(", "'url'", ")", ")", "due_date", "=", "parse_datetime", "(", "request", ".", "POST", ".", "get", "(", "'due_datetime'", ")", ")", "set_due_date_extension", "(", "course", ",", "unit", ",", "student", ",", "due_date", ")", "return", "JsonResponse", "(", "_", "(", "'Successfully changed due date for student {0} for {1} to {2}'", ")", ".", "format", "(", "student", ".", "profile", ".", "name", ",", "_display_unit", "(", "unit", ")", ",", "due_date", ".", "strftime", "(", "'%Y-%m-%d %H:%M'", ")", ")", ")" ]
grants a due date extension to a student for a particular unit .
train
false
7,542
def hb_write(file, m, hb_info=None): if (hb_info is None): hb_info = HBInfo.from_data(m) def _set_matrix(fid): hb = HBFile(fid, hb_info) return hb.write_matrix(m) if isinstance(file, string_types): fid = open(file, 'w') try: return _set_matrix(fid) finally: fid.close() else: return _set_matrix(file)
[ "def", "hb_write", "(", "file", ",", "m", ",", "hb_info", "=", "None", ")", ":", "if", "(", "hb_info", "is", "None", ")", ":", "hb_info", "=", "HBInfo", ".", "from_data", "(", "m", ")", "def", "_set_matrix", "(", "fid", ")", ":", "hb", "=", "HBFile", "(", "fid", ",", "hb_info", ")", "return", "hb", ".", "write_matrix", "(", "m", ")", "if", "isinstance", "(", "file", ",", "string_types", ")", ":", "fid", "=", "open", "(", "file", ",", "'w'", ")", "try", ":", "return", "_set_matrix", "(", "fid", ")", "finally", ":", "fid", ".", "close", "(", ")", "else", ":", "return", "_set_matrix", "(", "file", ")" ]
write hb-format file .
train
false
7,543
def simplecatcher(application): def simplecatcher_app(environ, start_response): try: return application(environ, start_response) except: out = StringIO() traceback.print_exc(file=out) start_response('500 Server Error', [('content-type', 'text/html')], sys.exc_info()) res = out.getvalue() return [('<h3>Error</h3><pre>%s</pre>' % html_quote(res))] return simplecatcher_app
[ "def", "simplecatcher", "(", "application", ")", ":", "def", "simplecatcher_app", "(", "environ", ",", "start_response", ")", ":", "try", ":", "return", "application", "(", "environ", ",", "start_response", ")", "except", ":", "out", "=", "StringIO", "(", ")", "traceback", ".", "print_exc", "(", "file", "=", "out", ")", "start_response", "(", "'500 Server Error'", ",", "[", "(", "'content-type'", ",", "'text/html'", ")", "]", ",", "sys", ".", "exc_info", "(", ")", ")", "res", "=", "out", ".", "getvalue", "(", ")", "return", "[", "(", "'<h3>Error</h3><pre>%s</pre>'", "%", "html_quote", "(", "res", ")", ")", "]", "return", "simplecatcher_app" ]
a simple middleware that catches errors and turns them into simple tracebacks .
train
false
7,544
def lookupAddress6(name, timeout=None): return getResolver().lookupAddress6(name, timeout)
[ "def", "lookupAddress6", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupAddress6", "(", "name", ",", "timeout", ")" ]
perform an a6 record lookup .
train
false
7,545
def get_saml_client(): if settings.SAML_CALLBACK_SERVER_NAME: acs_url = (settings.SAML_CALLBACK_SERVER_NAME + url_for('saml_auth.idp_initiated')) else: acs_url = url_for('saml_auth.idp_initiated', _external=True) if (settings.SAML_METADATA_URL != ''): rv = requests.get(settings.SAML_METADATA_URL) import tempfile tmp = tempfile.NamedTemporaryFile() f = open(tmp.name, 'w') f.write(rv.text) f.close() metadata_path = tmp.name else: metadata_path = settings.SAML_LOCAL_METADATA_PATH saml_settings = {'metadata': {'local': [metadata_path]}, 'service': {'sp': {'endpoints': {'assertion_consumer_service': [(acs_url, BINDING_HTTP_REDIRECT), (acs_url, BINDING_HTTP_POST)]}, 'allow_unsolicited': True, 'authn_requests_signed': False, 'logout_requests_signed': True, 'want_assertions_signed': True, 'want_response_signed': False}}} if (settings.SAML_ENTITY_ID != ''): saml_settings['entityid'] = settings.SAML_ENTITY_ID spConfig = Saml2Config() spConfig.load(saml_settings) spConfig.allow_unknown_attributes = True saml_client = Saml2Client(config=spConfig) if (settings.SAML_METADATA_URL != ''): tmp.close() return saml_client
[ "def", "get_saml_client", "(", ")", ":", "if", "settings", ".", "SAML_CALLBACK_SERVER_NAME", ":", "acs_url", "=", "(", "settings", ".", "SAML_CALLBACK_SERVER_NAME", "+", "url_for", "(", "'saml_auth.idp_initiated'", ")", ")", "else", ":", "acs_url", "=", "url_for", "(", "'saml_auth.idp_initiated'", ",", "_external", "=", "True", ")", "if", "(", "settings", ".", "SAML_METADATA_URL", "!=", "''", ")", ":", "rv", "=", "requests", ".", "get", "(", "settings", ".", "SAML_METADATA_URL", ")", "import", "tempfile", "tmp", "=", "tempfile", ".", "NamedTemporaryFile", "(", ")", "f", "=", "open", "(", "tmp", ".", "name", ",", "'w'", ")", "f", ".", "write", "(", "rv", ".", "text", ")", "f", ".", "close", "(", ")", "metadata_path", "=", "tmp", ".", "name", "else", ":", "metadata_path", "=", "settings", ".", "SAML_LOCAL_METADATA_PATH", "saml_settings", "=", "{", "'metadata'", ":", "{", "'local'", ":", "[", "metadata_path", "]", "}", ",", "'service'", ":", "{", "'sp'", ":", "{", "'endpoints'", ":", "{", "'assertion_consumer_service'", ":", "[", "(", "acs_url", ",", "BINDING_HTTP_REDIRECT", ")", ",", "(", "acs_url", ",", "BINDING_HTTP_POST", ")", "]", "}", ",", "'allow_unsolicited'", ":", "True", ",", "'authn_requests_signed'", ":", "False", ",", "'logout_requests_signed'", ":", "True", ",", "'want_assertions_signed'", ":", "True", ",", "'want_response_signed'", ":", "False", "}", "}", "}", "if", "(", "settings", ".", "SAML_ENTITY_ID", "!=", "''", ")", ":", "saml_settings", "[", "'entityid'", "]", "=", "settings", ".", "SAML_ENTITY_ID", "spConfig", "=", "Saml2Config", "(", ")", "spConfig", ".", "load", "(", "saml_settings", ")", "spConfig", ".", "allow_unknown_attributes", "=", "True", "saml_client", "=", "Saml2Client", "(", "config", "=", "spConfig", ")", "if", "(", "settings", ".", "SAML_METADATA_URL", "!=", "''", ")", ":", "tmp", ".", "close", "(", ")", "return", "saml_client" ]
return saml configuration .
train
false