id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
49,244
def get_email_preferences(user_id): email_preferences_model = user_models.UserEmailPreferencesModel.get(user_id, strict=False) if (email_preferences_model is None): return user_domain.UserGlobalPrefs.create_default_prefs() else: return user_domain.UserGlobalPrefs(email_preferences_model.site_updates, email_preferences_model.editor_role_notifications, email_preferences_model.feedback_message_notifications, email_preferences_model.subscription_notifications)
[ "def", "get_email_preferences", "(", "user_id", ")", ":", "email_preferences_model", "=", "user_models", ".", "UserEmailPreferencesModel", ".", "get", "(", "user_id", ",", "strict", "=", "False", ")", "if", "(", "email_preferences_model", "is", "None", ")", ":", "return", "user_domain", ".", "UserGlobalPrefs", ".", "create_default_prefs", "(", ")", "else", ":", "return", "user_domain", ".", "UserGlobalPrefs", "(", "email_preferences_model", ".", "site_updates", ",", "email_preferences_model", ".", "editor_role_notifications", ",", "email_preferences_model", ".", "feedback_message_notifications", ",", "email_preferences_model", ".", "subscription_notifications", ")" ]
gives email preferences of user with given user_id .
train
false
49,245
def _get_src(tree_base, source, saltenv='base'): parsed = _urlparse(source) sbase = os.path.basename(source) dest = os.path.join(tree_base, 'SOURCES', sbase) if parsed.scheme: lsrc = __salt__['cp.get_url'](source, dest, saltenv=saltenv) else: shutil.copy(source, dest)
[ "def", "_get_src", "(", "tree_base", ",", "source", ",", "saltenv", "=", "'base'", ")", ":", "parsed", "=", "_urlparse", "(", "source", ")", "sbase", "=", "os", ".", "path", ".", "basename", "(", "source", ")", "dest", "=", "os", ".", "path", ".", "join", "(", "tree_base", ",", "'SOURCES'", ",", "sbase", ")", "if", "parsed", ".", "scheme", ":", "lsrc", "=", "__salt__", "[", "'cp.get_url'", "]", "(", "source", ",", "dest", ",", "saltenv", "=", "saltenv", ")", "else", ":", "shutil", ".", "copy", "(", "source", ",", "dest", ")" ]
get the named sources and place them into the tree_base .
train
true
49,246
def pre_encrypt_assertion(response): assertion = response.assertion response.assertion = None response.encrypted_assertion = EncryptedAssertion() if isinstance(assertion, list): response.encrypted_assertion.add_extension_elements(assertion) else: response.encrypted_assertion.add_extension_element(assertion) return response
[ "def", "pre_encrypt_assertion", "(", "response", ")", ":", "assertion", "=", "response", ".", "assertion", "response", ".", "assertion", "=", "None", "response", ".", "encrypted_assertion", "=", "EncryptedAssertion", "(", ")", "if", "isinstance", "(", "assertion", ",", "list", ")", ":", "response", ".", "encrypted_assertion", ".", "add_extension_elements", "(", "assertion", ")", "else", ":", "response", ".", "encrypted_assertion", ".", "add_extension_element", "(", "assertion", ")", "return", "response" ]
move the assertion to within a encrypted_assertion .
train
true
49,247
def savepoint_commit(sid, using=None): if (using is None): using = DEFAULT_DB_ALIAS connection = connections[using] connection.savepoint_commit(sid)
[ "def", "savepoint_commit", "(", "sid", ",", "using", "=", "None", ")", ":", "if", "(", "using", "is", "None", ")", ":", "using", "=", "DEFAULT_DB_ALIAS", "connection", "=", "connections", "[", "using", "]", "connection", ".", "savepoint_commit", "(", "sid", ")" ]
commits the most recent savepoint .
train
false
49,250
def namespace_to_regex(namespace): (db_name, coll_name) = namespace.split('.', 1) db_regex = re.escape(db_name).replace('\\*', '([^.]*)') coll_regex = re.escape(coll_name).replace('\\*', '(.*)') return re.compile((((('\\A' + db_regex) + '\\.') + coll_regex) + '\\Z'))
[ "def", "namespace_to_regex", "(", "namespace", ")", ":", "(", "db_name", ",", "coll_name", ")", "=", "namespace", ".", "split", "(", "'.'", ",", "1", ")", "db_regex", "=", "re", ".", "escape", "(", "db_name", ")", ".", "replace", "(", "'\\\\*'", ",", "'([^.]*)'", ")", "coll_regex", "=", "re", ".", "escape", "(", "coll_name", ")", ".", "replace", "(", "'\\\\*'", ",", "'(.*)'", ")", "return", "re", ".", "compile", "(", "(", "(", "(", "(", "'\\\\A'", "+", "db_regex", ")", "+", "'\\\\.'", ")", "+", "coll_regex", ")", "+", "'\\\\Z'", ")", ")" ]
create a regexobject from a wildcard namespace .
train
false
49,251
def retry_db_errors(f): @_tag_retriables_as_unretriable @_retry_db_errors @six.wraps(f) def wrapped(*args, **kwargs): try: dup_args = [_copy_if_lds(a) for a in args] dup_kwargs = {k: _copy_if_lds(v) for (k, v) in kwargs.items()} return f(*dup_args, **dup_kwargs) except Exception as e: with excutils.save_and_reraise_exception(): if is_retriable(e): LOG.debug('Retry wrapper got retriable exception: %s', traceback.format_exc()) return wrapped
[ "def", "retry_db_errors", "(", "f", ")", ":", "@", "_tag_retriables_as_unretriable", "@", "_retry_db_errors", "@", "six", ".", "wraps", "(", "f", ")", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "dup_args", "=", "[", "_copy_if_lds", "(", "a", ")", "for", "a", "in", "args", "]", "dup_kwargs", "=", "{", "k", ":", "_copy_if_lds", "(", "v", ")", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "items", "(", ")", "}", "return", "f", "(", "*", "dup_args", ",", "**", "dup_kwargs", ")", "except", "Exception", "as", "e", ":", "with", "excutils", ".", "save_and_reraise_exception", "(", ")", ":", "if", "is_retriable", "(", "e", ")", ":", "LOG", ".", "debug", "(", "'Retry wrapper got retriable exception: %s'", ",", "traceback", ".", "format_exc", "(", ")", ")", "return", "wrapped" ]
nesting-safe retry decorator with auto-arg-copy and logging .
train
false
49,253
def ascii85Encode(stream): encodedStream = '' return ((-1), 'Ascii85Encode not supported yet')
[ "def", "ascii85Encode", "(", "stream", ")", ":", "encodedStream", "=", "''", "return", "(", "(", "-", "1", ")", ",", "'Ascii85Encode not supported yet'", ")" ]
method to encode streams using ascii85 .
train
false
49,254
def generate_video(v, params=[]): sources = [] if v.webm: sources.append({'src': _get_video_url(v.webm), 'type': 'webm'}) if v.ogv: sources.append({'src': _get_video_url(v.ogv), 'type': 'ogg'}) data_fallback = '' if v.flv: data_fallback = _get_video_url(v.flv) return render_to_string('wikiparser/hook_video.html', {'fallback': data_fallback, 'sources': sources, 'params': params, 'video': v, 'height': settings.WIKI_VIDEO_HEIGHT, 'width': settings.WIKI_VIDEO_WIDTH})
[ "def", "generate_video", "(", "v", ",", "params", "=", "[", "]", ")", ":", "sources", "=", "[", "]", "if", "v", ".", "webm", ":", "sources", ".", "append", "(", "{", "'src'", ":", "_get_video_url", "(", "v", ".", "webm", ")", ",", "'type'", ":", "'webm'", "}", ")", "if", "v", ".", "ogv", ":", "sources", ".", "append", "(", "{", "'src'", ":", "_get_video_url", "(", "v", ".", "ogv", ")", ",", "'type'", ":", "'ogg'", "}", ")", "data_fallback", "=", "''", "if", "v", ".", "flv", ":", "data_fallback", "=", "_get_video_url", "(", "v", ".", "flv", ")", "return", "render_to_string", "(", "'wikiparser/hook_video.html'", ",", "{", "'fallback'", ":", "data_fallback", ",", "'sources'", ":", "sources", ",", "'params'", ":", "params", ",", "'video'", ":", "v", ",", "'height'", ":", "settings", ".", "WIKI_VIDEO_HEIGHT", ",", "'width'", ":", "settings", ".", "WIKI_VIDEO_WIDTH", "}", ")" ]
takes a video object and returns html markup for embedding it .
train
false
49,255
def _SendAuthRequest(tester, url, http_method, user_cookie=None, request_dict=None, allow_errors=None): headers = {'Content-Type': 'application/json', 'Content-Encoding': 'gzip'} if (user_cookie is not None): headers['Cookie'] = ('user=%s' % user_cookie) headers['X-Xsrftoken'] = 'fake_xsrf' headers['Cookie'] = ((headers['Cookie'] + ';_xsrf=fake_xsrf') if headers.has_key('Cookie') else '_xsrf=fake_xsrf') with mock.patch.object(FetchContactsOperation, '_SKIP_UPDATE_FOR_TEST', True): response = tester._RunAsync(tester.http_client.fetch, url, method=http_method, body=(None if (request_dict is None) else GzipEncode(json.dumps(request_dict))), headers=headers, follow_redirects=False) if (response.code >= 400): if ((allow_errors is None) or (response.code not in allow_errors)): response.rethrow() return response
[ "def", "_SendAuthRequest", "(", "tester", ",", "url", ",", "http_method", ",", "user_cookie", "=", "None", ",", "request_dict", "=", "None", ",", "allow_errors", "=", "None", ")", ":", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", ",", "'Content-Encoding'", ":", "'gzip'", "}", "if", "(", "user_cookie", "is", "not", "None", ")", ":", "headers", "[", "'Cookie'", "]", "=", "(", "'user=%s'", "%", "user_cookie", ")", "headers", "[", "'X-Xsrftoken'", "]", "=", "'fake_xsrf'", "headers", "[", "'Cookie'", "]", "=", "(", "(", "headers", "[", "'Cookie'", "]", "+", "';_xsrf=fake_xsrf'", ")", "if", "headers", ".", "has_key", "(", "'Cookie'", ")", "else", "'_xsrf=fake_xsrf'", ")", "with", "mock", ".", "patch", ".", "object", "(", "FetchContactsOperation", ",", "'_SKIP_UPDATE_FOR_TEST'", ",", "True", ")", ":", "response", "=", "tester", ".", "_RunAsync", "(", "tester", ".", "http_client", ".", "fetch", ",", "url", ",", "method", "=", "http_method", ",", "body", "=", "(", "None", "if", "(", "request_dict", "is", "None", ")", "else", "GzipEncode", "(", "json", ".", "dumps", "(", "request_dict", ")", ")", ")", ",", "headers", "=", "headers", ",", "follow_redirects", "=", "False", ")", "if", "(", "response", ".", "code", ">=", "400", ")", ":", "if", "(", "(", "allow_errors", "is", "None", ")", "or", "(", "response", ".", "code", "not", "in", "allow_errors", ")", ")", ":", "response", ".", "rethrow", "(", ")", "return", "response" ]
sends request to auth service .
train
false
49,256
def ParseAndSimplify(query): node = Parse(query).tree node = _ColonToEquals(node) node = SequenceToConjunction(node) node = SimplifyNode(node) return node
[ "def", "ParseAndSimplify", "(", "query", ")", ":", "node", "=", "Parse", "(", "query", ")", ".", "tree", "node", "=", "_ColonToEquals", "(", "node", ")", "node", "=", "SequenceToConjunction", "(", "node", ")", "node", "=", "SimplifyNode", "(", "node", ")", "return", "node" ]
parses a query and performs all necessary transformations on the tree .
train
false
49,258
@treeio_login_required @handle_response_format def asset_delete(request, asset_id, response_format='html'): asset = get_object_or_404(Asset, pk=asset_id) if (not request.user.profile.has_permission(asset, mode='w')): return user_denied(request, "You don't have access to this Asset", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): asset.trash = True asset.save() else: asset.delete() return HttpResponseRedirect(reverse('finance_index_assets')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('finance_asset_view', args=[asset.id])) return render_to_response('finance/asset_delete', {'asset': asset}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "asset_delete", "(", "request", ",", "asset_id", ",", "response_format", "=", "'html'", ")", ":", "asset", "=", "get_object_or_404", "(", "Asset", ",", "pk", "=", "asset_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "asset", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Asset\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "asset", ".", "trash", "=", "True", "asset", ".", "save", "(", ")", "else", ":", "asset", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_index_assets'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_asset_view'", ",", "args", "=", "[", "asset", ".", "id", "]", ")", ")", "return", "render_to_response", "(", "'finance/asset_delete'", ",", "{", "'asset'", ":", "asset", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
asset delete .
train
false
49,259
def get_or_create_node(title, user): try: node = Node.find_one((Q('title', 'iexact', title) & Q('contributors', 'eq', user))) return (node, False) except ModularOdmException: node = new_node('project', title, user) return (node, True)
[ "def", "get_or_create_node", "(", "title", ",", "user", ")", ":", "try", ":", "node", "=", "Node", ".", "find_one", "(", "(", "Q", "(", "'title'", ",", "'iexact'", ",", "title", ")", "&", "Q", "(", "'contributors'", ",", "'eq'", ",", "user", ")", ")", ")", "return", "(", "node", ",", "False", ")", "except", "ModularOdmException", ":", "node", "=", "new_node", "(", "'project'", ",", "title", ",", "user", ")", "return", "(", "node", ",", "True", ")" ]
get or create node by title and creating user .
train
false
49,260
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
look for message catalogs in locale_dirs and *ensure* that there is at least a nulltranslations catalog set in translators .
train
false
49,261
def humanize_speed(speed): if ((speed >= 1000000000) and ((speed % 1000000000) == 0)): return '{} Tbps'.format((speed / 1000000000)) elif ((speed >= 1000000) and ((speed % 1000000) == 0)): return '{} Gbps'.format((speed / 1000000)) elif ((speed >= 1000) and ((speed % 1000) == 0)): return '{} Mbps'.format((speed / 1000)) elif (speed >= 1000): return '{} Mbps'.format((float(speed) / 1000)) else: return '{} Kbps'.format(speed)
[ "def", "humanize_speed", "(", "speed", ")", ":", "if", "(", "(", "speed", ">=", "1000000000", ")", "and", "(", "(", "speed", "%", "1000000000", ")", "==", "0", ")", ")", ":", "return", "'{} Tbps'", ".", "format", "(", "(", "speed", "/", "1000000000", ")", ")", "elif", "(", "(", "speed", ">=", "1000000", ")", "and", "(", "(", "speed", "%", "1000000", ")", "==", "0", ")", ")", ":", "return", "'{} Gbps'", ".", "format", "(", "(", "speed", "/", "1000000", ")", ")", "elif", "(", "(", "speed", ">=", "1000", ")", "and", "(", "(", "speed", "%", "1000", ")", "==", "0", ")", ")", ":", "return", "'{} Mbps'", ".", "format", "(", "(", "speed", "/", "1000", ")", ")", "elif", "(", "speed", ">=", "1000", ")", ":", "return", "'{} Mbps'", ".", "format", "(", "(", "float", "(", "speed", ")", "/", "1000", ")", ")", "else", ":", "return", "'{} Kbps'", ".", "format", "(", "speed", ")" ]
humanize speeds given in kbps .
train
false
49,262
def pmf_hist(a, bins=10): (n, x) = np.histogram(a, bins) h = (n / n.sum()) w = (x[1] - x[0]) return (x[:(-1)], h, w)
[ "def", "pmf_hist", "(", "a", ",", "bins", "=", "10", ")", ":", "(", "n", ",", "x", ")", "=", "np", ".", "histogram", "(", "a", ",", "bins", ")", "h", "=", "(", "n", "/", "n", ".", "sum", "(", ")", ")", "w", "=", "(", "x", "[", "1", "]", "-", "x", "[", "0", "]", ")", "return", "(", "x", "[", ":", "(", "-", "1", ")", "]", ",", "h", ",", "w", ")" ]
return arguments to plt .
train
false
49,264
def test_ordered_dict_only_dict(): schema = vol.Schema(cv.ordered_dict(cv.match_all, cv.match_all)) for value in (None, [], 100, 'hello'): with pytest.raises(vol.MultipleInvalid): schema(value)
[ "def", "test_ordered_dict_only_dict", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "ordered_dict", "(", "cv", ".", "match_all", ",", "cv", ".", "match_all", ")", ")", "for", "value", "in", "(", "None", ",", "[", "]", ",", "100", ",", "'hello'", ")", ":", "with", "pytest", ".", "raises", "(", "vol", ".", "MultipleInvalid", ")", ":", "schema", "(", "value", ")" ]
test ordered_dict validator .
train
false
49,265
def get_filter_name(field_name, lookup_expr): filter_name = LOOKUP_SEP.join([field_name, lookup_expr]) _exact = (LOOKUP_SEP + u'exact') if filter_name.endswith(_exact): filter_name = filter_name[:(- len(_exact))] return filter_name
[ "def", "get_filter_name", "(", "field_name", ",", "lookup_expr", ")", ":", "filter_name", "=", "LOOKUP_SEP", ".", "join", "(", "[", "field_name", ",", "lookup_expr", "]", ")", "_exact", "=", "(", "LOOKUP_SEP", "+", "u'exact'", ")", "if", "filter_name", ".", "endswith", "(", "_exact", ")", ":", "filter_name", "=", "filter_name", "[", ":", "(", "-", "len", "(", "_exact", ")", ")", "]", "return", "filter_name" ]
combine a field name and lookup expression into a usable filter name .
train
false
49,266
def code_version(version): if (not isinstance(version, tuple)): raise TypeError('version must be tuple', version) def deco(f): f.code_version = version return f return deco
[ "def", "code_version", "(", "version", ")", ":", "if", "(", "not", "isinstance", "(", "version", ",", "tuple", ")", ")", ":", "raise", "TypeError", "(", "'version must be tuple'", ",", "version", ")", "def", "deco", "(", "f", ")", ":", "f", ".", "code_version", "=", "version", "return", "f", "return", "deco" ]
decorator to support version-based cache mechanism .
train
false
49,268
def makeCgiPrintEncoding(encoding): def cgiPrint(s): ('Encode the given string using the %s encoding, and replace\n chars outside the given charset with XML char references.' % encoding) s = escape(s, quote=1) if isinstance(s, unicode): s = s.encode(encoding, 'xmlcharrefreplace') return s return cgiPrint
[ "def", "makeCgiPrintEncoding", "(", "encoding", ")", ":", "def", "cgiPrint", "(", "s", ")", ":", "(", "'Encode the given string using the %s encoding, and replace\\n chars outside the given charset with XML char references.'", "%", "encoding", ")", "s", "=", "escape", "(", "s", ",", "quote", "=", "1", ")", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "s", "=", "s", ".", "encode", "(", "encoding", ",", "'xmlcharrefreplace'", ")", "return", "s", "return", "cgiPrint" ]
make a function to pretty-print strings for the web .
train
false
49,269
def default_latex_engine(config): if (config.language == 'ja'): return 'platex' else: return 'pdflatex'
[ "def", "default_latex_engine", "(", "config", ")", ":", "if", "(", "config", ".", "language", "==", "'ja'", ")", ":", "return", "'platex'", "else", ":", "return", "'pdflatex'" ]
better default latex_engine settings for specific languages .
train
false
49,270
def _from_hass_brightness(brightness): return (brightness / 255)
[ "def", "_from_hass_brightness", "(", "brightness", ")", ":", "return", "(", "brightness", "/", "255", ")" ]
convert home assistant brightness units to percentage .
train
false
49,272
@task def request_ssl_cert(ctx, domain): generate_key(ctx, domain) generate_key_nopass(ctx, domain) generate_csr(ctx, domain)
[ "@", "task", "def", "request_ssl_cert", "(", "ctx", ",", "domain", ")", ":", "generate_key", "(", "ctx", ",", "domain", ")", "generate_key_nopass", "(", "ctx", ",", "domain", ")", "generate_csr", "(", "ctx", ",", "domain", ")" ]
generate a key .
train
false
49,274
def fake_loads_value_error(content, *args, **kwargs): raise ValueError('HAHAHA! Totally not simplejson & you gave me bad JSON.')
[ "def", "fake_loads_value_error", "(", "content", ",", "*", "args", ",", "**", "kwargs", ")", ":", "raise", "ValueError", "(", "'HAHAHA! Totally not simplejson & you gave me bad JSON.'", ")" ]
callable to generate a fake valueerror .
train
false
49,275
def read_definitions(fromfile): with open(fromfile, 'r') as stream: contents = yaml.safe_load(stream) return [dict({'api': k}, **v) for (k, v) in contents.iteritems()]
[ "def", "read_definitions", "(", "fromfile", ")", ":", "with", "open", "(", "fromfile", ",", "'r'", ")", "as", "stream", ":", "contents", "=", "yaml", ".", "safe_load", "(", "stream", ")", "return", "[", "dict", "(", "{", "'api'", ":", "k", "}", ",", "**", "v", ")", "for", "(", "k", ",", "v", ")", "in", "contents", ".", "iteritems", "(", ")", "]" ]
read api signatures from a file .
train
false
49,276
def msbuild(registry, xml_parent, data): msbuilder = XML.SubElement(xml_parent, 'hudson.plugins.msbuild.MsBuildBuilder') msbuilder.set('plugin', 'msbuild') mapping = [('msbuild-version', 'msBuildName', '(Default)'), ('solution-file', 'msBuildFile', None), ('extra-parameters', 'cmdLineArgs', ''), ('pass-build-variables', 'buildVariablesAsProperties', True), ('continue-on-build-failure', 'continueOnBuildFailure', False), ('unstable-if-warnings', 'unstableIfWarnings', False)] convert_mapping_to_xml(msbuilder, data, mapping, fail_required=True)
[ "def", "msbuild", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "msbuilder", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.msbuild.MsBuildBuilder'", ")", "msbuilder", ".", "set", "(", "'plugin'", ",", "'msbuild'", ")", "mapping", "=", "[", "(", "'msbuild-version'", ",", "'msBuildName'", ",", "'(Default)'", ")", ",", "(", "'solution-file'", ",", "'msBuildFile'", ",", "None", ")", ",", "(", "'extra-parameters'", ",", "'cmdLineArgs'", ",", "''", ")", ",", "(", "'pass-build-variables'", ",", "'buildVariablesAsProperties'", ",", "True", ")", ",", "(", "'continue-on-build-failure'", ",", "'continueOnBuildFailure'", ",", "False", ")", ",", "(", "'unstable-if-warnings'", ",", "'unstableIfWarnings'", ",", "False", ")", "]", "convert_mapping_to_xml", "(", "msbuilder", ",", "data", ",", "mapping", ",", "fail_required", "=", "True", ")" ]
yaml: msbuild build .
train
false
49,278
def set_file_position(body, pos): if (pos is not None): rewind_body(body, pos) elif (getattr(body, 'tell', None) is not None): try: pos = body.tell() except (IOError, OSError): pos = _FAILEDTELL return pos
[ "def", "set_file_position", "(", "body", ",", "pos", ")", ":", "if", "(", "pos", "is", "not", "None", ")", ":", "rewind_body", "(", "body", ",", "pos", ")", "elif", "(", "getattr", "(", "body", ",", "'tell'", ",", "None", ")", "is", "not", "None", ")", ":", "try", ":", "pos", "=", "body", ".", "tell", "(", ")", "except", "(", "IOError", ",", "OSError", ")", ":", "pos", "=", "_FAILEDTELL", "return", "pos" ]
if a position is provided .
train
true
49,280
def info_parameter(): def prep(r): if (r.representation == 'json'): list_fields = ['id', 'alert_id', 'info_id', 'name', 'value', 'mobile'] s3db.configure('cap_info_parameter', list_fields=list_fields) else: return return True s3.prep = prep return s3_rest_controller()
[ "def", "info_parameter", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "(", "r", ".", "representation", "==", "'json'", ")", ":", "list_fields", "=", "[", "'id'", ",", "'alert_id'", ",", "'info_id'", ",", "'name'", ",", "'value'", ",", "'mobile'", "]", "s3db", ".", "configure", "(", "'cap_info_parameter'", ",", "list_fields", "=", "list_fields", ")", "else", ":", "return", "return", "True", "s3", ".", "prep", "=", "prep", "return", "s3_rest_controller", "(", ")" ]
restful crud controller should only be accessed from mobile client .
train
false
49,281
def fix_id(ID): if re.match(u'^[A-Za-z_][A-Za-z0-9_\\.\\-]*$', ID): return ID if len(ID): corrected = ID if ((not len(corrected)) or re.match(u'^[^A-Za-z_]$', corrected[0])): corrected = (u'_' + corrected) corrected = (re.sub(u'[^A-Za-z_]', u'_', corrected[0]) + re.sub(u'[^A-Za-z0-9_\\.\\-]', u'_', corrected[1:])) return corrected return u''
[ "def", "fix_id", "(", "ID", ")", ":", "if", "re", ".", "match", "(", "u'^[A-Za-z_][A-Za-z0-9_\\\\.\\\\-]*$'", ",", "ID", ")", ":", "return", "ID", "if", "len", "(", "ID", ")", ":", "corrected", "=", "ID", "if", "(", "(", "not", "len", "(", "corrected", ")", ")", "or", "re", ".", "match", "(", "u'^[^A-Za-z_]$'", ",", "corrected", "[", "0", "]", ")", ")", ":", "corrected", "=", "(", "u'_'", "+", "corrected", ")", "corrected", "=", "(", "re", ".", "sub", "(", "u'[^A-Za-z_]'", ",", "u'_'", ",", "corrected", "[", "0", "]", ")", "+", "re", ".", "sub", "(", "u'[^A-Za-z0-9_\\\\.\\\\-]'", ",", "u'_'", ",", "corrected", "[", "1", ":", "]", ")", ")", "return", "corrected", "return", "u''" ]
given an arbitrary string .
train
false
49,282
def getGraphicsControlExt(duration=0.1): bb = '!\xf9\x04' bb += '\x08' bb += intToBin(int((duration * 100))) bb += '\x00' bb += '\x00' return bb
[ "def", "getGraphicsControlExt", "(", "duration", "=", "0.1", ")", ":", "bb", "=", "'!\\xf9\\x04'", "bb", "+=", "'\\x08'", "bb", "+=", "intToBin", "(", "int", "(", "(", "duration", "*", "100", ")", ")", ")", "bb", "+=", "'\\x00'", "bb", "+=", "'\\x00'", "return", "bb" ]
graphics control extension .
train
false
49,283
@pytest.fixture def function_loader(): return loaders.FunctionLoader({'justfunction.html': 'FOO'}.get)
[ "@", "pytest", ".", "fixture", "def", "function_loader", "(", ")", ":", "return", "loaders", ".", "FunctionLoader", "(", "{", "'justfunction.html'", ":", "'FOO'", "}", ".", "get", ")" ]
returns a functionloader .
train
false
49,285
def dctEncode(stream, parameters): encodedStream = '' return ((-1), 'DctEncode not supported yet')
[ "def", "dctEncode", "(", "stream", ",", "parameters", ")", ":", "encodedStream", "=", "''", "return", "(", "(", "-", "1", ")", ",", "'DctEncode not supported yet'", ")" ]
method to encode streams using a dct technique based on the jpeg standard .
train
false
49,286
def github_signature(string, shared_secret, challenge_hmac): if six.PY3: msg = salt.utils.to_bytes(string) key = salt.utils.to_bytes(shared_secret) (hashtype, challenge) = salt.utils.to_bytes(challenge_hmac).split('=') else: msg = string key = shared_secret (hashtype, challenge) = challenge_hmac.split('=') hmac_hash = hmac.new(key, msg, getattr(hashlib, hashtype)) return (hmac_hash.hexdigest() == challenge)
[ "def", "github_signature", "(", "string", ",", "shared_secret", ",", "challenge_hmac", ")", ":", "if", "six", ".", "PY3", ":", "msg", "=", "salt", ".", "utils", ".", "to_bytes", "(", "string", ")", "key", "=", "salt", ".", "utils", ".", "to_bytes", "(", "shared_secret", ")", "(", "hashtype", ",", "challenge", ")", "=", "salt", ".", "utils", ".", "to_bytes", "(", "challenge_hmac", ")", ".", "split", "(", "'='", ")", "else", ":", "msg", "=", "string", "key", "=", "shared_secret", "(", "hashtype", ",", "challenge", ")", "=", "challenge_hmac", ".", "split", "(", "'='", ")", "hmac_hash", "=", "hmac", ".", "new", "(", "key", ",", "msg", ",", "getattr", "(", "hashlib", ",", "hashtype", ")", ")", "return", "(", "hmac_hash", ".", "hexdigest", "(", ")", "==", "challenge", ")" ]
verify a challenging hmac signature against a string / shared-secret for github webhooks .
train
false
49,288
@register.filter def cache_tree_children(queryset): return get_cached_trees(queryset)
[ "@", "register", ".", "filter", "def", "cache_tree_children", "(", "queryset", ")", ":", "return", "get_cached_trees", "(", "queryset", ")" ]
alias to mptt .
train
false
49,290
def test_count(objects): assert (objects.history.count() == len(ITEMS))
[ "def", "test_count", "(", "objects", ")", ":", "assert", "(", "objects", ".", "history", ".", "count", "(", ")", "==", "len", "(", "ITEMS", ")", ")" ]
check if the historys count was loaded correctly .
train
false
49,293
def discoverItems(disp, jid, node=None): ' According to JEP-0030:\n query MAY have node attribute\n item: MUST HAVE jid attribute and MAY HAVE name, node, action attributes.\n action attribute of item can be either of remove or update value.' ret = [] for i in _discover(disp, NS_DISCO_ITEMS, jid, node): if ((i.getName() == 'agent') and i.getTag('name')): i.setAttr('name', i.getTagData('name')) ret.append(i.attrs) return ret
[ "def", "discoverItems", "(", "disp", ",", "jid", ",", "node", "=", "None", ")", ":", "ret", "=", "[", "]", "for", "i", "in", "_discover", "(", "disp", ",", "NS_DISCO_ITEMS", ",", "jid", ",", "node", ")", ":", "if", "(", "(", "i", ".", "getName", "(", ")", "==", "'agent'", ")", "and", "i", ".", "getTag", "(", "'name'", ")", ")", ":", "i", ".", "setAttr", "(", "'name'", ",", "i", ".", "getTagData", "(", "'name'", ")", ")", "ret", ".", "append", "(", "i", ".", "attrs", ")", "return", "ret" ]
query remote object about any items that it contains .
train
false
49,294
def check_errcode(result, func, cargs, cpl=False): check_err(result, cpl=cpl)
[ "def", "check_errcode", "(", "result", ",", "func", ",", "cargs", ",", "cpl", "=", "False", ")", ":", "check_err", "(", "result", ",", "cpl", "=", "cpl", ")" ]
check the error code returned .
train
false
49,295
def RC4(data, key): y = 0 hash = {} box = {} ret = '' keyLength = len(key) dataLength = len(data) for x in range(256): hash[x] = ord(key[(x % keyLength)]) box[x] = x for x in range(256): y = (((y + int(box[x])) + int(hash[x])) % 256) tmp = box[x] box[x] = box[y] box[y] = tmp z = y = 0 for x in range(0, dataLength): z = ((z + 1) % 256) y = ((y + box[z]) % 256) tmp = box[z] box[z] = box[y] box[y] = tmp k = box[((box[z] + box[y]) % 256)] ret += chr((ord(data[x]) ^ k)) return ret
[ "def", "RC4", "(", "data", ",", "key", ")", ":", "y", "=", "0", "hash", "=", "{", "}", "box", "=", "{", "}", "ret", "=", "''", "keyLength", "=", "len", "(", "key", ")", "dataLength", "=", "len", "(", "data", ")", "for", "x", "in", "range", "(", "256", ")", ":", "hash", "[", "x", "]", "=", "ord", "(", "key", "[", "(", "x", "%", "keyLength", ")", "]", ")", "box", "[", "x", "]", "=", "x", "for", "x", "in", "range", "(", "256", ")", ":", "y", "=", "(", "(", "(", "y", "+", "int", "(", "box", "[", "x", "]", ")", ")", "+", "int", "(", "hash", "[", "x", "]", ")", ")", "%", "256", ")", "tmp", "=", "box", "[", "x", "]", "box", "[", "x", "]", "=", "box", "[", "y", "]", "box", "[", "y", "]", "=", "tmp", "z", "=", "y", "=", "0", "for", "x", "in", "range", "(", "0", ",", "dataLength", ")", ":", "z", "=", "(", "(", "z", "+", "1", ")", "%", "256", ")", "y", "=", "(", "(", "y", "+", "box", "[", "z", "]", ")", "%", "256", ")", "tmp", "=", "box", "[", "z", "]", "box", "[", "z", "]", "=", "box", "[", "y", "]", "box", "[", "y", "]", "=", "tmp", "k", "=", "box", "[", "(", "(", "box", "[", "z", "]", "+", "box", "[", "y", "]", ")", "%", "256", ")", "]", "ret", "+=", "chr", "(", "(", "ord", "(", "data", "[", "x", "]", ")", "^", "k", ")", ")", "return", "ret" ]
rc4 implementation .
train
false
49,298
def test_train_cv(): skip_if_no_sklearn() (handle, layer0_filename) = tempfile.mkstemp() (handle, layer1_filename) = tempfile.mkstemp() (handle, layer2_filename) = tempfile.mkstemp() trainer = yaml_parse.load((test_yaml_layer0 % {'layer0_filename': layer0_filename})) trainer.main_loop() trainer = yaml_parse.load((test_yaml_layer1 % {'layer0_filename': layer0_filename, 'layer1_filename': layer1_filename})) trainer.main_loop() trainer = yaml_parse.load((test_yaml_layer2 % {'layer0_filename': layer0_filename, 'layer1_filename': layer1_filename, 'layer2_filename': layer2_filename})) trainer.main_loop() trainer = yaml_parse.load((test_yaml_layer3 % {'layer0_filename': layer0_filename, 'layer1_filename': layer1_filename, 'layer2_filename': layer2_filename})) trainer.main_loop() os.remove(layer0_filename) os.remove(layer1_filename)
[ "def", "test_train_cv", "(", ")", ":", "skip_if_no_sklearn", "(", ")", "(", "handle", ",", "layer0_filename", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "(", "handle", ",", "layer1_filename", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "(", "handle", ",", "layer2_filename", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "trainer", "=", "yaml_parse", ".", "load", "(", "(", "test_yaml_layer0", "%", "{", "'layer0_filename'", ":", "layer0_filename", "}", ")", ")", "trainer", ".", "main_loop", "(", ")", "trainer", "=", "yaml_parse", ".", "load", "(", "(", "test_yaml_layer1", "%", "{", "'layer0_filename'", ":", "layer0_filename", ",", "'layer1_filename'", ":", "layer1_filename", "}", ")", ")", "trainer", ".", "main_loop", "(", ")", "trainer", "=", "yaml_parse", ".", "load", "(", "(", "test_yaml_layer2", "%", "{", "'layer0_filename'", ":", "layer0_filename", ",", "'layer1_filename'", ":", "layer1_filename", ",", "'layer2_filename'", ":", "layer2_filename", "}", ")", ")", "trainer", ".", "main_loop", "(", ")", "trainer", "=", "yaml_parse", ".", "load", "(", "(", "test_yaml_layer3", "%", "{", "'layer0_filename'", ":", "layer0_filename", ",", "'layer1_filename'", ":", "layer1_filename", ",", "'layer2_filename'", ":", "layer2_filename", "}", ")", ")", "trainer", ".", "main_loop", "(", ")", "os", ".", "remove", "(", "layer0_filename", ")", "os", ".", "remove", "(", "layer1_filename", ")" ]
test traincv class .
train
false
49,299
def test_equalize_uint8_approx(): img_eq0 = exposure.equalize_hist(test_img_int) img_eq1 = exposure.equalize_hist(test_img_int, nbins=3) np.testing.assert_allclose(img_eq0, img_eq1)
[ "def", "test_equalize_uint8_approx", "(", ")", ":", "img_eq0", "=", "exposure", ".", "equalize_hist", "(", "test_img_int", ")", "img_eq1", "=", "exposure", ".", "equalize_hist", "(", "test_img_int", ",", "nbins", "=", "3", ")", "np", ".", "testing", ".", "assert_allclose", "(", "img_eq0", ",", "img_eq1", ")" ]
check integer bins used for uint8 images .
train
false
49,300
def quote_key(key, reverse=False): r = ((-1) if reverse else 1) for k in key.split('.')[::r]: if k.startswith('$'): k = parse.quote(k) (yield k)
[ "def", "quote_key", "(", "key", ",", "reverse", "=", "False", ")", ":", "r", "=", "(", "(", "-", "1", ")", "if", "reverse", "else", "1", ")", "for", "k", "in", "key", ".", "split", "(", "'.'", ")", "[", ":", ":", "r", "]", ":", "if", "k", ".", "startswith", "(", "'$'", ")", ":", "k", "=", "parse", ".", "quote", "(", "k", ")", "(", "yield", "k", ")" ]
prepare key for storage data in mongodb .
train
false
49,301
def relative_time(d, other=None, ndigits=0): (drt, unit) = decimal_relative_time(d, other, ndigits, cardinalize=True) phrase = 'ago' if (drt < 0): phrase = 'from now' return ('%g %s %s' % (abs(drt), unit, phrase))
[ "def", "relative_time", "(", "d", ",", "other", "=", "None", ",", "ndigits", "=", "0", ")", ":", "(", "drt", ",", "unit", ")", "=", "decimal_relative_time", "(", "d", ",", "other", ",", "ndigits", ",", "cardinalize", "=", "True", ")", "phrase", "=", "'ago'", "if", "(", "drt", "<", "0", ")", ":", "phrase", "=", "'from now'", "return", "(", "'%g %s %s'", "%", "(", "abs", "(", "drt", ")", ",", "unit", ",", "phrase", ")", ")" ]
get a string representation of the difference between two :class:~datetime .
train
true
49,302
def get_default_user(): superusers = get_user_model().objects.filter(is_superuser=True).order_by('id') if (superusers.count() > 0): return superusers[0] else: raise GeoNodeException('You must have an admin account configured before importing data. Try: django-admin.py createsuperuser')
[ "def", "get_default_user", "(", ")", ":", "superusers", "=", "get_user_model", "(", ")", ".", "objects", ".", "filter", "(", "is_superuser", "=", "True", ")", ".", "order_by", "(", "'id'", ")", "if", "(", "superusers", ".", "count", "(", ")", ">", "0", ")", ":", "return", "superusers", "[", "0", "]", "else", ":", "raise", "GeoNodeException", "(", "'You must have an admin account configured before importing data. Try: django-admin.py createsuperuser'", ")" ]
create a default user .
train
false
49,303
def shorten_comment(line, max_line_length, last_comment=False): assert (len(line) > max_line_length) line = line.rstrip() indentation = (_get_indentation(line) + u'# ') max_line_length = min(max_line_length, (len(indentation) + 72)) MIN_CHARACTER_REPEAT = 5 if (((len(line) - len(line.rstrip(line[(-1)]))) >= MIN_CHARACTER_REPEAT) and (not line[(-1)].isalnum())): return (line[:max_line_length] + u'\n') elif (last_comment and re.match(u'\\s*#+\\s*\\w+', line)): split_lines = textwrap.wrap(line.lstrip(u' DCTB #'), initial_indent=indentation, subsequent_indent=indentation, width=max_line_length, break_long_words=False, break_on_hyphens=False) return (u'\n'.join(split_lines) + u'\n') else: return (line + u'\n')
[ "def", "shorten_comment", "(", "line", ",", "max_line_length", ",", "last_comment", "=", "False", ")", ":", "assert", "(", "len", "(", "line", ")", ">", "max_line_length", ")", "line", "=", "line", ".", "rstrip", "(", ")", "indentation", "=", "(", "_get_indentation", "(", "line", ")", "+", "u'# '", ")", "max_line_length", "=", "min", "(", "max_line_length", ",", "(", "len", "(", "indentation", ")", "+", "72", ")", ")", "MIN_CHARACTER_REPEAT", "=", "5", "if", "(", "(", "(", "len", "(", "line", ")", "-", "len", "(", "line", ".", "rstrip", "(", "line", "[", "(", "-", "1", ")", "]", ")", ")", ")", ">=", "MIN_CHARACTER_REPEAT", ")", "and", "(", "not", "line", "[", "(", "-", "1", ")", "]", ".", "isalnum", "(", ")", ")", ")", ":", "return", "(", "line", "[", ":", "max_line_length", "]", "+", "u'\\n'", ")", "elif", "(", "last_comment", "and", "re", ".", "match", "(", "u'\\\\s*#+\\\\s*\\\\w+'", ",", "line", ")", ")", ":", "split_lines", "=", "textwrap", ".", "wrap", "(", "line", ".", "lstrip", "(", "u' DCTB #'", ")", ",", "initial_indent", "=", "indentation", ",", "subsequent_indent", "=", "indentation", ",", "width", "=", "max_line_length", ",", "break_long_words", "=", "False", ",", "break_on_hyphens", "=", "False", ")", "return", "(", "u'\\n'", ".", "join", "(", "split_lines", ")", "+", "u'\\n'", ")", "else", ":", "return", "(", "line", "+", "u'\\n'", ")" ]
return trimmed or split long comment line .
train
true
49,304
@require_admin @api_handle_error_with_json def delete_videos(request): paths = OrderedSet(json.loads((request.body or '{}')).get('paths', [])) lang = json.loads((request.body or '{}')).get('lang', 'en') youtube_ids = get_download_youtube_ids(paths, language=lang, downloaded=True) num_deleted = 0 for id in youtube_ids: if delete_downloaded_files(id): num_deleted += 1 annotate_content_models_by_youtube_id(youtube_ids=youtube_ids.keys(), language=lang) return JsonResponseMessageSuccess((_('Deleted %(num_videos)s video(s) successfully.') % {'num_videos': num_deleted}))
[ "@", "require_admin", "@", "api_handle_error_with_json", "def", "delete_videos", "(", "request", ")", ":", "paths", "=", "OrderedSet", "(", "json", ".", "loads", "(", "(", "request", ".", "body", "or", "'{}'", ")", ")", ".", "get", "(", "'paths'", ",", "[", "]", ")", ")", "lang", "=", "json", ".", "loads", "(", "(", "request", ".", "body", "or", "'{}'", ")", ")", ".", "get", "(", "'lang'", ",", "'en'", ")", "youtube_ids", "=", "get_download_youtube_ids", "(", "paths", ",", "language", "=", "lang", ",", "downloaded", "=", "True", ")", "num_deleted", "=", "0", "for", "id", "in", "youtube_ids", ":", "if", "delete_downloaded_files", "(", "id", ")", ":", "num_deleted", "+=", "1", "annotate_content_models_by_youtube_id", "(", "youtube_ids", "=", "youtube_ids", ".", "keys", "(", ")", ",", "language", "=", "lang", ")", "return", "JsonResponseMessageSuccess", "(", "(", "_", "(", "'Deleted %(num_videos)s video(s) successfully.'", ")", "%", "{", "'num_videos'", ":", "num_deleted", "}", ")", ")" ]
api endpoint for deleting videos .
train
false
49,305
def s3_rheader_resource(r): _vars = r.get_vars if ('viewing' in _vars): try: (tablename, record_id) = _vars.viewing.rsplit('.', 1) db = current.db record = db[tablename][record_id] except: tablename = r.tablename record = r.record else: tablename = r.tablename record = r.record return (tablename, record)
[ "def", "s3_rheader_resource", "(", "r", ")", ":", "_vars", "=", "r", ".", "get_vars", "if", "(", "'viewing'", "in", "_vars", ")", ":", "try", ":", "(", "tablename", ",", "record_id", ")", "=", "_vars", ".", "viewing", ".", "rsplit", "(", "'.'", ",", "1", ")", "db", "=", "current", ".", "db", "record", "=", "db", "[", "tablename", "]", "[", "record_id", "]", "except", ":", "tablename", "=", "r", ".", "tablename", "record", "=", "r", ".", "record", "else", ":", "tablename", "=", "r", ".", "tablename", "record", "=", "r", ".", "record", "return", "(", "tablename", ",", "record", ")" ]
identify the tablename and record id for the rheader .
train
false
49,306
@region.cache_on_arguments(expiration_time=REFINER_EXPIRATION_TIME) def get_series_episode(series_id, season, episode): result = tvdb_client.query_series_episodes(series_id, aired_season=season, aired_episode=episode) if result: return tvdb_client.get_episode(result['data'][0]['id'])
[ "@", "region", ".", "cache_on_arguments", "(", "expiration_time", "=", "REFINER_EXPIRATION_TIME", ")", "def", "get_series_episode", "(", "series_id", ",", "season", ",", "episode", ")", ":", "result", "=", "tvdb_client", ".", "query_series_episodes", "(", "series_id", ",", "aired_season", "=", "season", ",", "aired_episode", "=", "episode", ")", "if", "result", ":", "return", "tvdb_client", ".", "get_episode", "(", "result", "[", "'data'", "]", "[", "0", "]", "[", "'id'", "]", ")" ]
get an episode of a series .
train
false
49,307
def square_sort(arr): a_list = [(- i) for i in arr if (i < 0)] if a_list: b_list = arr[:(len(a_list) - 1):(-1)] res = [] while (a_list and b_list): if (a_list[(-1)] <= b_list[(-1)]): res.append((a_list.pop((-1)) ** 2)) else: res.append((b_list.pop((-1)) ** 2)) return (res + [(o ** 2) for o in (a_list[::(-1)] + b_list[::(-1)])]) else: return [(o ** 2) for o in arr]
[ "def", "square_sort", "(", "arr", ")", ":", "a_list", "=", "[", "(", "-", "i", ")", "for", "i", "in", "arr", "if", "(", "i", "<", "0", ")", "]", "if", "a_list", ":", "b_list", "=", "arr", "[", ":", "(", "len", "(", "a_list", ")", "-", "1", ")", ":", "(", "-", "1", ")", "]", "res", "=", "[", "]", "while", "(", "a_list", "and", "b_list", ")", ":", "if", "(", "a_list", "[", "(", "-", "1", ")", "]", "<=", "b_list", "[", "(", "-", "1", ")", "]", ")", ":", "res", ".", "append", "(", "(", "a_list", ".", "pop", "(", "(", "-", "1", ")", ")", "**", "2", ")", ")", "else", ":", "res", ".", "append", "(", "(", "b_list", ".", "pop", "(", "(", "-", "1", ")", ")", "**", "2", ")", ")", "return", "(", "res", "+", "[", "(", "o", "**", "2", ")", "for", "o", "in", "(", "a_list", "[", ":", ":", "(", "-", "1", ")", "]", "+", "b_list", "[", ":", ":", "(", "-", "1", ")", "]", ")", "]", ")", "else", ":", "return", "[", "(", "o", "**", "2", ")", "for", "o", "in", "arr", "]" ]
from [-2 .
train
false
49,308
def warp_coords(coord_map, shape, dtype=np.float64): shape = safe_as_int(shape) (rows, cols) = (shape[0], shape[1]) coords_shape = [len(shape), rows, cols] if (len(shape) == 3): coords_shape.append(shape[2]) coords = np.empty(coords_shape, dtype=dtype) tf_coords = np.indices((cols, rows), dtype=dtype).reshape(2, (-1)).T tf_coords = coord_map(tf_coords) tf_coords = tf_coords.T.reshape(((-1), cols, rows)).swapaxes(1, 2) _stackcopy(coords[1, ...], tf_coords[0, ...]) _stackcopy(coords[0, ...], tf_coords[1, ...]) if (len(shape) == 3): coords[2, ...] = range(shape[2]) return coords
[ "def", "warp_coords", "(", "coord_map", ",", "shape", ",", "dtype", "=", "np", ".", "float64", ")", ":", "shape", "=", "safe_as_int", "(", "shape", ")", "(", "rows", ",", "cols", ")", "=", "(", "shape", "[", "0", "]", ",", "shape", "[", "1", "]", ")", "coords_shape", "=", "[", "len", "(", "shape", ")", ",", "rows", ",", "cols", "]", "if", "(", "len", "(", "shape", ")", "==", "3", ")", ":", "coords_shape", ".", "append", "(", "shape", "[", "2", "]", ")", "coords", "=", "np", ".", "empty", "(", "coords_shape", ",", "dtype", "=", "dtype", ")", "tf_coords", "=", "np", ".", "indices", "(", "(", "cols", ",", "rows", ")", ",", "dtype", "=", "dtype", ")", ".", "reshape", "(", "2", ",", "(", "-", "1", ")", ")", ".", "T", "tf_coords", "=", "coord_map", "(", "tf_coords", ")", "tf_coords", "=", "tf_coords", ".", "T", ".", "reshape", "(", "(", "(", "-", "1", ")", ",", "cols", ",", "rows", ")", ")", ".", "swapaxes", "(", "1", ",", "2", ")", "_stackcopy", "(", "coords", "[", "1", ",", "...", "]", ",", "tf_coords", "[", "0", ",", "...", "]", ")", "_stackcopy", "(", "coords", "[", "0", ",", "...", "]", ",", "tf_coords", "[", "1", ",", "...", "]", ")", "if", "(", "len", "(", "shape", ")", "==", "3", ")", ":", "coords", "[", "2", ",", "...", "]", "=", "range", "(", "shape", "[", "2", "]", ")", "return", "coords" ]
build the source coordinates for the output of a 2-d image warp .
train
false
49,310
def follow_log(module, le_path, logs, name=None, logtype=None): followed_count = 0 for log in logs: if query_log_status(module, le_path, log): continue if module.check_mode: module.exit_json(changed=True) cmd = [le_path, 'follow', log] if name: cmd.extend(['--name', name]) if logtype: cmd.extend(['--type', logtype]) (rc, out, err) = module.run_command(' '.join(cmd)) if (not query_log_status(module, le_path, log)): module.fail_json(msg=("failed to follow '%s': %s" % (log, err.strip()))) followed_count += 1 if (followed_count > 0): module.exit_json(changed=True, msg=('followed %d log(s)' % (followed_count,))) module.exit_json(changed=False, msg='logs(s) already followed')
[ "def", "follow_log", "(", "module", ",", "le_path", ",", "logs", ",", "name", "=", "None", ",", "logtype", "=", "None", ")", ":", "followed_count", "=", "0", "for", "log", "in", "logs", ":", "if", "query_log_status", "(", "module", ",", "le_path", ",", "log", ")", ":", "continue", "if", "module", ".", "check_mode", ":", "module", ".", "exit_json", "(", "changed", "=", "True", ")", "cmd", "=", "[", "le_path", ",", "'follow'", ",", "log", "]", "if", "name", ":", "cmd", ".", "extend", "(", "[", "'--name'", ",", "name", "]", ")", "if", "logtype", ":", "cmd", ".", "extend", "(", "[", "'--type'", ",", "logtype", "]", ")", "(", "rc", ",", "out", ",", "err", ")", "=", "module", ".", "run_command", "(", "' '", ".", "join", "(", "cmd", ")", ")", "if", "(", "not", "query_log_status", "(", "module", ",", "le_path", ",", "log", ")", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "\"failed to follow '%s': %s\"", "%", "(", "log", ",", "err", ".", "strip", "(", ")", ")", ")", ")", "followed_count", "+=", "1", "if", "(", "followed_count", ">", "0", ")", ":", "module", ".", "exit_json", "(", "changed", "=", "True", ",", "msg", "=", "(", "'followed %d log(s)'", "%", "(", "followed_count", ",", ")", ")", ")", "module", ".", "exit_json", "(", "changed", "=", "False", ",", "msg", "=", "'logs(s) already followed'", ")" ]
follows one or more logs if not already followed .
train
false
49,312
def _ensure_inventory_uptodate(inventory, container_skel): host_vars = inventory['_meta']['hostvars'] for (hostname, _vars) in host_vars.items(): if ('container_name' not in _vars): _vars['container_name'] = hostname for rh in REQUIRED_HOSTVARS: if (rh not in _vars): _vars[rh] = None if (rh == 'container_networks'): _vars[rh] = {} for (container_type, type_vars) in container_skel.items(): item = inventory.get(container_type) hosts = item.get('hosts') if hosts: for host in hosts: container = host_vars[host] if ('properties' in type_vars): logger.debug('Copied propeties for %s from skeleton', container) container['properties'] = type_vars['properties']
[ "def", "_ensure_inventory_uptodate", "(", "inventory", ",", "container_skel", ")", ":", "host_vars", "=", "inventory", "[", "'_meta'", "]", "[", "'hostvars'", "]", "for", "(", "hostname", ",", "_vars", ")", "in", "host_vars", ".", "items", "(", ")", ":", "if", "(", "'container_name'", "not", "in", "_vars", ")", ":", "_vars", "[", "'container_name'", "]", "=", "hostname", "for", "rh", "in", "REQUIRED_HOSTVARS", ":", "if", "(", "rh", "not", "in", "_vars", ")", ":", "_vars", "[", "rh", "]", "=", "None", "if", "(", "rh", "==", "'container_networks'", ")", ":", "_vars", "[", "rh", "]", "=", "{", "}", "for", "(", "container_type", ",", "type_vars", ")", "in", "container_skel", ".", "items", "(", ")", ":", "item", "=", "inventory", ".", "get", "(", "container_type", ")", "hosts", "=", "item", ".", "get", "(", "'hosts'", ")", "if", "hosts", ":", "for", "host", "in", "hosts", ":", "container", "=", "host_vars", "[", "host", "]", "if", "(", "'properties'", "in", "type_vars", ")", ":", "logger", ".", "debug", "(", "'Copied propeties for %s from skeleton'", ",", "container", ")", "container", "[", "'properties'", "]", "=", "type_vars", "[", "'properties'", "]" ]
update inventory if needed .
train
false
49,313
@register.function def services_url(viewname, *args, **kwargs): kwargs.update({'host': settings.SERVICES_URL}) return url(viewname, *args, **kwargs)
[ "@", "register", ".", "function", "def", "services_url", "(", "viewname", ",", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", ".", "update", "(", "{", "'host'", ":", "settings", ".", "SERVICES_URL", "}", ")", "return", "url", "(", "viewname", ",", "*", "args", ",", "**", "kwargs", ")" ]
helper for url with host=services_url .
train
false
49,315
def trace_view(request): if (request.method.upper() != 'TRACE'): return HttpResponseNotAllowed('TRACE') elif request.body: return HttpResponseBadRequest('TRACE requests MUST NOT include an entity') else: protocol = request.META['SERVER_PROTOCOL'] t = Template('{{ method }} {{ uri }} {{ version }}', name='TRACE Template') c = Context({'method': request.method, 'uri': request.path, 'version': protocol}) return HttpResponse(t.render(c))
[ "def", "trace_view", "(", "request", ")", ":", "if", "(", "request", ".", "method", ".", "upper", "(", ")", "!=", "'TRACE'", ")", ":", "return", "HttpResponseNotAllowed", "(", "'TRACE'", ")", "elif", "request", ".", "body", ":", "return", "HttpResponseBadRequest", "(", "'TRACE requests MUST NOT include an entity'", ")", "else", ":", "protocol", "=", "request", ".", "META", "[", "'SERVER_PROTOCOL'", "]", "t", "=", "Template", "(", "'{{ method }} {{ uri }} {{ version }}'", ",", "name", "=", "'TRACE Template'", ")", "c", "=", "Context", "(", "{", "'method'", ":", "request", ".", "method", ",", "'uri'", ":", "request", ".", "path", ",", "'version'", ":", "protocol", "}", ")", "return", "HttpResponse", "(", "t", ".", "render", "(", "c", ")", ")" ]
a simple view that expects a trace request and echoes its status line .
train
false
49,316
def htmlentityreplace_errors(ex): if isinstance(ex, UnicodeEncodeError): bad_text = ex.object[ex.start:ex.end] text = _html_entities_escaper.escape(bad_text) return (unicode(text), ex.end) raise ex
[ "def", "htmlentityreplace_errors", "(", "ex", ")", ":", "if", "isinstance", "(", "ex", ",", "UnicodeEncodeError", ")", ":", "bad_text", "=", "ex", ".", "object", "[", "ex", ".", "start", ":", "ex", ".", "end", "]", "text", "=", "_html_entities_escaper", ".", "escape", "(", "bad_text", ")", "return", "(", "unicode", "(", "text", ")", ",", "ex", ".", "end", ")", "raise", "ex" ]
an encoding error handler .
train
false
49,317
def _defined_characters_in_range(range_str): characters = set() for code in _range_string_to_set(range_str): if (unicode_data.is_defined(code) and (unicode_data.age(code) is not None)): characters.add(code) return characters
[ "def", "_defined_characters_in_range", "(", "range_str", ")", ":", "characters", "=", "set", "(", ")", "for", "code", "in", "_range_string_to_set", "(", "range_str", ")", ":", "if", "(", "unicode_data", ".", "is_defined", "(", "code", ")", "and", "(", "unicode_data", ".", "age", "(", "code", ")", "is", "not", "None", ")", ")", ":", "characters", ".", "add", "(", "code", ")", "return", "characters" ]
given a range string .
train
false
49,318
def check_parallel(name, X, y): ForestEstimator = FOREST_ESTIMATORS[name] forest = ForestEstimator(n_estimators=10, n_jobs=3, random_state=0) forest.fit(X, y) assert_equal(len(forest), 10) forest.set_params(n_jobs=1) y1 = forest.predict(X) forest.set_params(n_jobs=2) y2 = forest.predict(X) assert_array_almost_equal(y1, y2, 3)
[ "def", "check_parallel", "(", "name", ",", "X", ",", "y", ")", ":", "ForestEstimator", "=", "FOREST_ESTIMATORS", "[", "name", "]", "forest", "=", "ForestEstimator", "(", "n_estimators", "=", "10", ",", "n_jobs", "=", "3", ",", "random_state", "=", "0", ")", "forest", ".", "fit", "(", "X", ",", "y", ")", "assert_equal", "(", "len", "(", "forest", ")", ",", "10", ")", "forest", ".", "set_params", "(", "n_jobs", "=", "1", ")", "y1", "=", "forest", ".", "predict", "(", "X", ")", "forest", ".", "set_params", "(", "n_jobs", "=", "2", ")", "y2", "=", "forest", ".", "predict", "(", "X", ")", "assert_array_almost_equal", "(", "y1", ",", "y2", ",", "3", ")" ]
check parallel computations in classification .
train
false
49,319
def xml_escape(text): return escape(text, entities={"'": '&apos;', '"': '&quot;', '|': '&#124;', '[': '&#91;', ']': '&#93;'})
[ "def", "xml_escape", "(", "text", ")", ":", "return", "escape", "(", "text", ",", "entities", "=", "{", "\"'\"", ":", "'&apos;'", ",", "'\"'", ":", "'&quot;'", ",", "'|'", ":", "'&#124;'", ",", "'['", ":", "'&#91;'", ",", "']'", ":", "'&#93;'", "}", ")" ]
this function transforms the input text into an "escaped" version suitable for well-formed xml formatting .
train
false
49,321
def requires_basic_auth(func): @wraps(func) def auth_wrapper(self, *args, **kwargs): if (hasattr(self, 'session') and self.session.auth): return func(self, *args, **kwargs) else: from .exceptions import error_for r = generate_fake_error_response('{"message": "Requires username/password authentication"}') raise error_for(r) return auth_wrapper
[ "def", "requires_basic_auth", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "auth_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "hasattr", "(", "self", ",", "'session'", ")", "and", "self", ".", "session", ".", "auth", ")", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "from", ".", "exceptions", "import", "error_for", "r", "=", "generate_fake_error_response", "(", "'{\"message\": \"Requires username/password authentication\"}'", ")", "raise", "error_for", "(", "r", ")", "return", "auth_wrapper" ]
specific authentication decorator .
train
false
49,322
def test_no_dupes(): request = HttpRequest() setattr(request, '_messages', default_storage(request)) info(request, 'Title', 'Body') info(request, 'Title', 'Body') info(request, 'Another Title', 'Another Body') storage = django_messages.get_messages(request) assert (len(storage) == 2), 'Too few or too many messages recorded.'
[ "def", "test_no_dupes", "(", ")", ":", "request", "=", "HttpRequest", "(", ")", "setattr", "(", "request", ",", "'_messages'", ",", "default_storage", "(", "request", ")", ")", "info", "(", "request", ",", "'Title'", ",", "'Body'", ")", "info", "(", "request", ",", "'Title'", ",", "'Body'", ")", "info", "(", "request", ",", "'Another Title'", ",", "'Another Body'", ")", "storage", "=", "django_messages", ".", "get_messages", "(", "request", ")", "assert", "(", "len", "(", "storage", ")", "==", "2", ")", ",", "'Too few or too many messages recorded.'" ]
test that duplicate messages arent saved .
train
false
49,323
def get_compute_capability(device_id=None, verbose=False): try: import pycuda import pycuda.driver as drv except ImportError: if verbose: neon_logger.display('PyCUDA module not found') return 0 try: drv.init() except pycuda._driver.RuntimeError as e: neon_logger.display('PyCUDA Runtime error: {0}'.format(str(e))) return 0 major_string = pycuda._driver.device_attribute.COMPUTE_CAPABILITY_MAJOR minor_string = pycuda._driver.device_attribute.COMPUTE_CAPABILITY_MINOR full_version = [] if (device_id is None): device_id = list(range(drv.Device.count())) elif isinstance(device_id, int): device_id = [device_id] for i in device_id: major = drv.Device(i).get_attribute(major_string) minor = drv.Device(i).get_attribute(minor_string) full_version += [(major + (minor / 10.0))] if verbose: neon_logger.display('Found GPU(s) with compute capability: {}'.format(full_version)) return max(full_version)
[ "def", "get_compute_capability", "(", "device_id", "=", "None", ",", "verbose", "=", "False", ")", ":", "try", ":", "import", "pycuda", "import", "pycuda", ".", "driver", "as", "drv", "except", "ImportError", ":", "if", "verbose", ":", "neon_logger", ".", "display", "(", "'PyCUDA module not found'", ")", "return", "0", "try", ":", "drv", ".", "init", "(", ")", "except", "pycuda", ".", "_driver", ".", "RuntimeError", "as", "e", ":", "neon_logger", ".", "display", "(", "'PyCUDA Runtime error: {0}'", ".", "format", "(", "str", "(", "e", ")", ")", ")", "return", "0", "major_string", "=", "pycuda", ".", "_driver", ".", "device_attribute", ".", "COMPUTE_CAPABILITY_MAJOR", "minor_string", "=", "pycuda", ".", "_driver", ".", "device_attribute", ".", "COMPUTE_CAPABILITY_MINOR", "full_version", "=", "[", "]", "if", "(", "device_id", "is", "None", ")", ":", "device_id", "=", "list", "(", "range", "(", "drv", ".", "Device", ".", "count", "(", ")", ")", ")", "elif", "isinstance", "(", "device_id", ",", "int", ")", ":", "device_id", "=", "[", "device_id", "]", "for", "i", "in", "device_id", ":", "major", "=", "drv", ".", "Device", "(", "i", ")", ".", "get_attribute", "(", "major_string", ")", "minor", "=", "drv", ".", "Device", "(", "i", ")", ".", "get_attribute", "(", "minor_string", ")", "full_version", "+=", "[", "(", "major", "+", "(", "minor", "/", "10.0", ")", ")", "]", "if", "verbose", ":", "neon_logger", ".", "display", "(", "'Found GPU(s) with compute capability: {}'", ".", "format", "(", "full_version", ")", ")", "return", "max", "(", "full_version", ")" ]
query compute capability through pycuda and check its 5 .
train
false
49,324
def sorted_fields(fields): recursed = [dict(field, fields=sorted_fields(field['fields'])) for field in fields] return sorted(recursed, key=(lambda field: field['id']))
[ "def", "sorted_fields", "(", "fields", ")", ":", "recursed", "=", "[", "dict", "(", "field", ",", "fields", "=", "sorted_fields", "(", "field", "[", "'fields'", "]", ")", ")", "for", "field", "in", "fields", "]", "return", "sorted", "(", "recursed", ",", "key", "=", "(", "lambda", "field", ":", "field", "[", "'id'", "]", ")", ")" ]
recursively sort field lists to ease comparison .
train
false
49,325
def abort_merge(): title = N_(u'Abort Merge...') txt = N_(u'Aborting the current merge will cause *ALL* uncommitted changes to be lost.\nRecovering uncommitted changes is not possible.') info_txt = N_(u'Aborting the current merge?') ok_txt = N_(u'Abort Merge') if qtutils.confirm(title, txt, info_txt, ok_txt, default=False, icon=icons.undo()): gitcmds.abort_merge()
[ "def", "abort_merge", "(", ")", ":", "title", "=", "N_", "(", "u'Abort Merge...'", ")", "txt", "=", "N_", "(", "u'Aborting the current merge will cause *ALL* uncommitted changes to be lost.\\nRecovering uncommitted changes is not possible.'", ")", "info_txt", "=", "N_", "(", "u'Aborting the current merge?'", ")", "ok_txt", "=", "N_", "(", "u'Abort Merge'", ")", "if", "qtutils", ".", "confirm", "(", "title", ",", "txt", ",", "info_txt", ",", "ok_txt", ",", "default", "=", "False", ",", "icon", "=", "icons", ".", "undo", "(", ")", ")", ":", "gitcmds", ".", "abort_merge", "(", ")" ]
abort a merge by reading the tree at head .
train
false
49,326
def ServerEnum(): resume = 0 while 1: (data, total, resume) = win32net.NetServerEnum(server, 100, win32netcon.SV_TYPE_ALL, None, resume) for s in data: verbose(('Found server %s' % s['name'])) shareresume = 0 while 1: (sharedata, total, shareresume) = win32net.NetShareEnum(server, 2, shareresume) for share in sharedata: verbose((' %(netname)s (%(path)s):%(remark)s - in use by %(current_uses)d users' % share)) if (not shareresume): break if (not resume): break print 'Enumerated all the servers on the network'
[ "def", "ServerEnum", "(", ")", ":", "resume", "=", "0", "while", "1", ":", "(", "data", ",", "total", ",", "resume", ")", "=", "win32net", ".", "NetServerEnum", "(", "server", ",", "100", ",", "win32netcon", ".", "SV_TYPE_ALL", ",", "None", ",", "resume", ")", "for", "s", "in", "data", ":", "verbose", "(", "(", "'Found server %s'", "%", "s", "[", "'name'", "]", ")", ")", "shareresume", "=", "0", "while", "1", ":", "(", "sharedata", ",", "total", ",", "shareresume", ")", "=", "win32net", ".", "NetShareEnum", "(", "server", ",", "2", ",", "shareresume", ")", "for", "share", "in", "sharedata", ":", "verbose", "(", "(", "' %(netname)s (%(path)s):%(remark)s - in use by %(current_uses)d users'", "%", "share", ")", ")", "if", "(", "not", "shareresume", ")", ":", "break", "if", "(", "not", "resume", ")", ":", "break", "print", "'Enumerated all the servers on the network'" ]
enumerates all servers on the network .
train
false
49,327
def cross_validate(features, labels): error = 0.0 for fold in range(10): training = np.ones(len(features), bool) training[fold::10] = 0 testing = (~ training) model = fit_model(1, features[training], labels[training]) test_error = accuracy(features[testing], labels[testing], model) error += test_error return (error / 10.0)
[ "def", "cross_validate", "(", "features", ",", "labels", ")", ":", "error", "=", "0.0", "for", "fold", "in", "range", "(", "10", ")", ":", "training", "=", "np", ".", "ones", "(", "len", "(", "features", ")", ",", "bool", ")", "training", "[", "fold", ":", ":", "10", "]", "=", "0", "testing", "=", "(", "~", "training", ")", "model", "=", "fit_model", "(", "1", ",", "features", "[", "training", "]", ",", "labels", "[", "training", "]", ")", "test_error", "=", "accuracy", "(", "features", "[", "testing", "]", ",", "labels", "[", "testing", "]", ",", "model", ")", "error", "+=", "test_error", "return", "(", "error", "/", "10.0", ")" ]
compute cross-validation errors .
train
false
49,328
def worker_status(worker, profile='default'): config = get_running(profile) try: return {'activation': config['worker.{0}.activation'.format(worker)], 'state': config['worker.{0}.state'.format(worker)]} except KeyError: return False
[ "def", "worker_status", "(", "worker", ",", "profile", "=", "'default'", ")", ":", "config", "=", "get_running", "(", "profile", ")", "try", ":", "return", "{", "'activation'", ":", "config", "[", "'worker.{0}.activation'", ".", "format", "(", "worker", ")", "]", ",", "'state'", ":", "config", "[", "'worker.{0}.state'", ".", "format", "(", "worker", ")", "]", "}", "except", "KeyError", ":", "return", "False" ]
return the state of the worker cli examples: .
train
true
49,330
def pad_binary_digits(bin_octet): while True: if (len(bin_octet) >= 8): break bin_octet = ('0' + bin_octet) return bin_octet
[ "def", "pad_binary_digits", "(", "bin_octet", ")", ":", "while", "True", ":", "if", "(", "len", "(", "bin_octet", ")", ">=", "8", ")", ":", "break", "bin_octet", "=", "(", "'0'", "+", "bin_octet", ")", "return", "bin_octet" ]
pad the binary number to eight digits .
train
false
49,331
def ftest_anova_power(effect_size, nobs, alpha, k_groups=2, df=None): df_num = (nobs - k_groups) df_denom = (k_groups - 1) crit = stats.f.isf(alpha, df_denom, df_num) pow_ = stats.ncf.sf(crit, df_denom, df_num, ((effect_size ** 2) * nobs)) return pow_
[ "def", "ftest_anova_power", "(", "effect_size", ",", "nobs", ",", "alpha", ",", "k_groups", "=", "2", ",", "df", "=", "None", ")", ":", "df_num", "=", "(", "nobs", "-", "k_groups", ")", "df_denom", "=", "(", "k_groups", "-", "1", ")", "crit", "=", "stats", ".", "f", ".", "isf", "(", "alpha", ",", "df_denom", ",", "df_num", ")", "pow_", "=", "stats", ".", "ncf", ".", "sf", "(", "crit", ",", "df_denom", ",", "df_num", ",", "(", "(", "effect_size", "**", "2", ")", "*", "nobs", ")", ")", "return", "pow_" ]
power for ftest for one way anova with k equal sized groups nobs total sample size .
train
false
49,333
def mkdir(path, owner=None, grant_perms=None, deny_perms=None, inheritance=True): drive = os.path.splitdrive(path)[0] if (not os.path.isdir(drive)): raise CommandExecutionError('Drive {0} is not mapped'.format(drive)) path = os.path.expanduser(path) path = os.path.expandvars(path) if (not os.path.isdir(path)): os.mkdir(path) if owner: salt.utils.win_dacl.set_owner(path, owner) set_perms(path, grant_perms, deny_perms, inheritance) return True
[ "def", "mkdir", "(", "path", ",", "owner", "=", "None", ",", "grant_perms", "=", "None", ",", "deny_perms", "=", "None", ",", "inheritance", "=", "True", ")", ":", "drive", "=", "os", ".", "path", ".", "splitdrive", "(", "path", ")", "[", "0", "]", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "drive", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Drive {0} is not mapped'", ".", "format", "(", "drive", ")", ")", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "path", "=", "os", ".", "path", ".", "expandvars", "(", "path", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ")", ":", "os", ".", "mkdir", "(", "path", ")", "if", "owner", ":", "salt", ".", "utils", ".", "win_dacl", ".", "set_owner", "(", "path", ",", "owner", ")", "set_perms", "(", "path", ",", "grant_perms", ",", "deny_perms", ",", "inheritance", ")", "return", "True" ]
insure the dir exist and mode ok .
train
false
49,334
def link_source_files(generator): posts = [getattr(generator, attr, None) for attr in PROCESS if (getattr(generator, attr, None) is not None)] for post in posts[0]: if ((not ('SHOW_SOURCE_ON_SIDEBAR' in generator.settings)) and (not ('SHOW_SOURCE_IN_SECTION' in generator.settings))): return if (('SHOW_SOURCE_ALL_POSTS' in generator.settings) or ('show_source' in post.metadata)): show_source_filename = generator.settings.get('SHOW_SOURCE_FILENAME', '{}.txt'.format(post.slug)) try: source_out = os.path.join(post.settings['OUTPUT_PATH'], post.save_as) source_out_path = os.path.split(source_out)[0] copy_to = os.path.join(source_out_path, show_source_filename) source_url = urljoin(post.save_as, show_source_filename) except Exception: return out = dict() out['copy_raw_from'] = post.source_path out['copy_raw_to'] = copy_to logger.debug('Linked %s to %s', post.source_path, copy_to) source_files.append(out) post.show_source_url = source_url
[ "def", "link_source_files", "(", "generator", ")", ":", "posts", "=", "[", "getattr", "(", "generator", ",", "attr", ",", "None", ")", "for", "attr", "in", "PROCESS", "if", "(", "getattr", "(", "generator", ",", "attr", ",", "None", ")", "is", "not", "None", ")", "]", "for", "post", "in", "posts", "[", "0", "]", ":", "if", "(", "(", "not", "(", "'SHOW_SOURCE_ON_SIDEBAR'", "in", "generator", ".", "settings", ")", ")", "and", "(", "not", "(", "'SHOW_SOURCE_IN_SECTION'", "in", "generator", ".", "settings", ")", ")", ")", ":", "return", "if", "(", "(", "'SHOW_SOURCE_ALL_POSTS'", "in", "generator", ".", "settings", ")", "or", "(", "'show_source'", "in", "post", ".", "metadata", ")", ")", ":", "show_source_filename", "=", "generator", ".", "settings", ".", "get", "(", "'SHOW_SOURCE_FILENAME'", ",", "'{}.txt'", ".", "format", "(", "post", ".", "slug", ")", ")", "try", ":", "source_out", "=", "os", ".", "path", ".", "join", "(", "post", ".", "settings", "[", "'OUTPUT_PATH'", "]", ",", "post", ".", "save_as", ")", "source_out_path", "=", "os", ".", "path", ".", "split", "(", "source_out", ")", "[", "0", "]", "copy_to", "=", "os", ".", "path", ".", "join", "(", "source_out_path", ",", "show_source_filename", ")", "source_url", "=", "urljoin", "(", "post", ".", "save_as", ",", "show_source_filename", ")", "except", "Exception", ":", "return", "out", "=", "dict", "(", ")", "out", "[", "'copy_raw_from'", "]", "=", "post", ".", "source_path", "out", "[", "'copy_raw_to'", "]", "=", "copy_to", "logger", ".", "debug", "(", "'Linked %s to %s'", ",", "post", ".", "source_path", ",", "copy_to", ")", "source_files", ".", "append", "(", "out", ")", "post", ".", "show_source_url", "=", "source_url" ]
processes each article/page object and formulates copy from and copy to destinations .
train
true
49,335
def _build_qsub_command(cmd, job_name, outfile, errfile, pe, n_cpu): qsub_template = 'echo {cmd} | qsub -o ":{outfile}" -e ":{errfile}" -V -r y -pe {pe} {n_cpu} -N {job_name}' return qsub_template.format(cmd=cmd, job_name=job_name, outfile=outfile, errfile=errfile, pe=pe, n_cpu=n_cpu)
[ "def", "_build_qsub_command", "(", "cmd", ",", "job_name", ",", "outfile", ",", "errfile", ",", "pe", ",", "n_cpu", ")", ":", "qsub_template", "=", "'echo {cmd} | qsub -o \":{outfile}\" -e \":{errfile}\" -V -r y -pe {pe} {n_cpu} -N {job_name}'", "return", "qsub_template", ".", "format", "(", "cmd", "=", "cmd", ",", "job_name", "=", "job_name", ",", "outfile", "=", "outfile", ",", "errfile", "=", "errfile", ",", "pe", "=", "pe", ",", "n_cpu", "=", "n_cpu", ")" ]
submit shell command to sge queue via qsub .
train
true
49,336
@mock_ec2 def test_igw_desribe_bad_id(): conn = boto.connect_vpc(u'the_key', u'the_secret') with assert_raises(EC2ResponseError) as cm: conn.get_all_internet_gateways([BAD_IGW]) cm.exception.code.should.equal(u'InvalidInternetGatewayID.NotFound') cm.exception.status.should.equal(400) cm.exception.request_id.should_not.be.none
[ "@", "mock_ec2", "def", "test_igw_desribe_bad_id", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "with", "assert_raises", "(", "EC2ResponseError", ")", "as", "cm", ":", "conn", ".", "get_all_internet_gateways", "(", "[", "BAD_IGW", "]", ")", "cm", ".", "exception", ".", "code", ".", "should", ".", "equal", "(", "u'InvalidInternetGatewayID.NotFound'", ")", "cm", ".", "exception", ".", "status", ".", "should", ".", "equal", "(", "400", ")", "cm", ".", "exception", ".", "request_id", ".", "should_not", ".", "be", ".", "none" ]
internet gateway fail to fetch by bad id .
train
false
49,337
def findmatch(caps, MIMEtype, key='view', filename='/dev/null', plist=[]): entries = lookup(caps, MIMEtype, key) for e in entries: if ('test' in e): test = subst(e['test'], filename, plist) if (test and (os.system(test) != 0)): continue command = subst(e[key], MIMEtype, filename, plist) return (command, e) return (None, None)
[ "def", "findmatch", "(", "caps", ",", "MIMEtype", ",", "key", "=", "'view'", ",", "filename", "=", "'/dev/null'", ",", "plist", "=", "[", "]", ")", ":", "entries", "=", "lookup", "(", "caps", ",", "MIMEtype", ",", "key", ")", "for", "e", "in", "entries", ":", "if", "(", "'test'", "in", "e", ")", ":", "test", "=", "subst", "(", "e", "[", "'test'", "]", ",", "filename", ",", "plist", ")", "if", "(", "test", "and", "(", "os", ".", "system", "(", "test", ")", "!=", "0", ")", ")", ":", "continue", "command", "=", "subst", "(", "e", "[", "key", "]", ",", "MIMEtype", ",", "filename", ",", "plist", ")", "return", "(", "command", ",", "e", ")", "return", "(", "None", ",", "None", ")" ]
find a match for a mailcap entry .
train
false
49,338
def _get_user_from_environ(): assert ('OAUTH_EMAIL' in os.environ) assert ('OAUTH_AUTH_DOMAIN' in os.environ) assert ('OAUTH_USER_ID' in os.environ) return users.User(email=os.environ['OAUTH_EMAIL'], _auth_domain=os.environ['OAUTH_AUTH_DOMAIN'], _user_id=os.environ['OAUTH_USER_ID'])
[ "def", "_get_user_from_environ", "(", ")", ":", "assert", "(", "'OAUTH_EMAIL'", "in", "os", ".", "environ", ")", "assert", "(", "'OAUTH_AUTH_DOMAIN'", "in", "os", ".", "environ", ")", "assert", "(", "'OAUTH_USER_ID'", "in", "os", ".", "environ", ")", "return", "users", ".", "User", "(", "email", "=", "os", ".", "environ", "[", "'OAUTH_EMAIL'", "]", ",", "_auth_domain", "=", "os", ".", "environ", "[", "'OAUTH_AUTH_DOMAIN'", "]", ",", "_user_id", "=", "os", ".", "environ", "[", "'OAUTH_USER_ID'", "]", ")" ]
returns a user based on values stored in os .
train
false
49,339
def _mag(x): from math import log10, ceil, log from sympy import Float xpos = abs(x.n()) if (not xpos): return S.Zero try: mag_first_dig = int(ceil(log10(xpos))) except (ValueError, OverflowError): mag_first_dig = int(ceil((Float(mpf_log(xpos._mpf_, 53)) / log(10)))) if ((xpos / (10 ** mag_first_dig)) >= 1): assert (1 <= (xpos / (10 ** mag_first_dig)) < 10) mag_first_dig += 1 return mag_first_dig
[ "def", "_mag", "(", "x", ")", ":", "from", "math", "import", "log10", ",", "ceil", ",", "log", "from", "sympy", "import", "Float", "xpos", "=", "abs", "(", "x", ".", "n", "(", ")", ")", "if", "(", "not", "xpos", ")", ":", "return", "S", ".", "Zero", "try", ":", "mag_first_dig", "=", "int", "(", "ceil", "(", "log10", "(", "xpos", ")", ")", ")", "except", "(", "ValueError", ",", "OverflowError", ")", ":", "mag_first_dig", "=", "int", "(", "ceil", "(", "(", "Float", "(", "mpf_log", "(", "xpos", ".", "_mpf_", ",", "53", ")", ")", "/", "log", "(", "10", ")", ")", ")", ")", "if", "(", "(", "xpos", "/", "(", "10", "**", "mag_first_dig", ")", ")", ">=", "1", ")", ":", "assert", "(", "1", "<=", "(", "xpos", "/", "(", "10", "**", "mag_first_dig", ")", ")", "<", "10", ")", "mag_first_dig", "+=", "1", "return", "mag_first_dig" ]
return integer i such that .
train
false
49,340
def except_on_missing_scheme(url): (scheme, netloc, path, params, query, fragment) = urlparse(url) if (not scheme): raise MissingSchema('Proxy URLs must have explicit schemes.')
[ "def", "except_on_missing_scheme", "(", "url", ")", ":", "(", "scheme", ",", "netloc", ",", "path", ",", "params", ",", "query", ",", "fragment", ")", "=", "urlparse", "(", "url", ")", "if", "(", "not", "scheme", ")", ":", "raise", "MissingSchema", "(", "'Proxy URLs must have explicit schemes.'", ")" ]
given a url .
train
false
49,341
def test_approve_addons_get_review_type(use_case): (addon, file1, _, review_type) = use_case assert (approve_addons.get_review_type(file1) == review_type)
[ "def", "test_approve_addons_get_review_type", "(", "use_case", ")", ":", "(", "addon", ",", "file1", ",", "_", ",", "review_type", ")", "=", "use_case", "assert", "(", "approve_addons", ".", "get_review_type", "(", "file1", ")", "==", "review_type", ")" ]
review type depends on the file and addon status .
train
false
49,345
def serialize_graftpoints(graftpoints): graft_lines = [] for (commit, parents) in graftpoints.items(): if parents: graft_lines.append(((commit + ' ') + ' '.join(parents))) else: graft_lines.append(commit) return '\n'.join(graft_lines)
[ "def", "serialize_graftpoints", "(", "graftpoints", ")", ":", "graft_lines", "=", "[", "]", "for", "(", "commit", ",", "parents", ")", "in", "graftpoints", ".", "items", "(", ")", ":", "if", "parents", ":", "graft_lines", ".", "append", "(", "(", "(", "commit", "+", "' '", ")", "+", "' '", ".", "join", "(", "parents", ")", ")", ")", "else", ":", "graft_lines", ".", "append", "(", "commit", ")", "return", "'\\n'", ".", "join", "(", "graft_lines", ")" ]
convert a dictionary of grafts into string the graft dictionary is: <commit sha1>: [<parent sha1>*] each line is formatted as: <commit sha1> <parent sha1> [<parent sha1>]* URL .
train
false
49,346
def getRequest(request): return request.session.get('openid_request')
[ "def", "getRequest", "(", "request", ")", ":", "return", "request", ".", "session", ".", "get", "(", "'openid_request'", ")" ]
get an openid request from the session .
train
false
49,347
@core.flake8ext @core.off_by_default def check_unittest_imports(logical_line): if (re.match(unittest_imports_from, logical_line) or re.match(unittest_imports_dot, logical_line)): msg = ("N334: '%s' must be used instead of '%s'." % (logical_line.replace('unittest', 'unittest2'), logical_line)) (yield (0, msg))
[ "@", "core", ".", "flake8ext", "@", "core", ".", "off_by_default", "def", "check_unittest_imports", "(", "logical_line", ")", ":", "if", "(", "re", ".", "match", "(", "unittest_imports_from", ",", "logical_line", ")", "or", "re", ".", "match", "(", "unittest_imports_dot", ",", "logical_line", ")", ")", ":", "msg", "=", "(", "\"N334: '%s' must be used instead of '%s'.\"", "%", "(", "logical_line", ".", "replace", "(", "'unittest'", ",", "'unittest2'", ")", ",", "logical_line", ")", ")", "(", "yield", "(", "0", ",", "msg", ")", ")" ]
n334 - use unittest2 instead of unittest .
train
false
49,348
def requirements(): requirements_list = [] with open('requirements.txt') as requirements: for install in requirements: requirements_list.append(install.strip()) return requirements_list
[ "def", "requirements", "(", ")", ":", "requirements_list", "=", "[", "]", "with", "open", "(", "'requirements.txt'", ")", "as", "requirements", ":", "for", "install", "in", "requirements", ":", "requirements_list", ".", "append", "(", "install", ".", "strip", "(", ")", ")", "return", "requirements_list" ]
build the requirements list for this project .
train
true
49,349
def get_comment_items(srs, src, count=4): link_fullnames = normalized_hot([sr._id for sr in srs]) hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False) top_comments = [] for link in hot_links: builder = CommentBuilder(link, operators.desc('_confidence'), comment=None, context=None, num=1, load_more=False) listing = NestedListing(builder, parent_name=link._fullname).listing() top_comments.extend(listing.things) srs = Subreddit._byID([com.sr_id for com in top_comments]) links = Link._byID([com.link_id for com in top_comments]) comment_items = [ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com) for com in top_comments] return comment_items
[ "def", "get_comment_items", "(", "srs", ",", "src", ",", "count", "=", "4", ")", ":", "link_fullnames", "=", "normalized_hot", "(", "[", "sr", ".", "_id", "for", "sr", "in", "srs", "]", ")", "hot_links", "=", "Link", ".", "_by_fullname", "(", "link_fullnames", "[", ":", "count", "]", ",", "return_dict", "=", "False", ")", "top_comments", "=", "[", "]", "for", "link", "in", "hot_links", ":", "builder", "=", "CommentBuilder", "(", "link", ",", "operators", ".", "desc", "(", "'_confidence'", ")", ",", "comment", "=", "None", ",", "context", "=", "None", ",", "num", "=", "1", ",", "load_more", "=", "False", ")", "listing", "=", "NestedListing", "(", "builder", ",", "parent_name", "=", "link", ".", "_fullname", ")", ".", "listing", "(", ")", "top_comments", ".", "extend", "(", "listing", ".", "things", ")", "srs", "=", "Subreddit", ".", "_byID", "(", "[", "com", ".", "sr_id", "for", "com", "in", "top_comments", "]", ")", "links", "=", "Link", ".", "_byID", "(", "[", "com", ".", "link_id", "for", "com", "in", "top_comments", "]", ")", "comment_items", "=", "[", "ExploreItem", "(", "TYPE_COMMENT", ",", "src", ",", "srs", "[", "com", ".", "sr_id", "]", ",", "links", "[", "com", ".", "link_id", "]", ",", "com", ")", "for", "com", "in", "top_comments", "]", "return", "comment_items" ]
get hot links from srs .
train
false
49,350
def test_arangedataset(): preprocessor = RemoveMean() dataset = ArangeDataset(1000, preprocessor=preprocessor, fit_preprocessor=True) dataset_no_preprocessing = ArangeDataset(1000) assert (dataset.get_data() != dataset_no_preprocessing.get_data()).any()
[ "def", "test_arangedataset", "(", ")", ":", "preprocessor", "=", "RemoveMean", "(", ")", "dataset", "=", "ArangeDataset", "(", "1000", ",", "preprocessor", "=", "preprocessor", ",", "fit_preprocessor", "=", "True", ")", "dataset_no_preprocessing", "=", "ArangeDataset", "(", "1000", ")", "assert", "(", "dataset", ".", "get_data", "(", ")", "!=", "dataset_no_preprocessing", ".", "get_data", "(", ")", ")", ".", "any", "(", ")" ]
this test will verify if arangedataset can be used with preprocessors .
train
false
49,353
def diff_hists(h1, h2): for k in h1: if (k not in h2): h2[k] = 0 if (h1[k] != h2[k]): print ('%s: %d -> %d (%s%d)' % (k, h1[k], h2[k], (((h2[k] > h1[k]) and '+') or ''), (h2[k] - h1[k])))
[ "def", "diff_hists", "(", "h1", ",", "h2", ")", ":", "for", "k", "in", "h1", ":", "if", "(", "k", "not", "in", "h2", ")", ":", "h2", "[", "k", "]", "=", "0", "if", "(", "h1", "[", "k", "]", "!=", "h2", "[", "k", "]", ")", ":", "print", "(", "'%s: %d -> %d (%s%d)'", "%", "(", "k", ",", "h1", "[", "k", "]", ",", "h2", "[", "k", "]", ",", "(", "(", "(", "h2", "[", "k", "]", ">", "h1", "[", "k", "]", ")", "and", "'+'", ")", "or", "''", ")", ",", "(", "h2", "[", "k", "]", "-", "h1", "[", "k", "]", ")", ")", ")" ]
prints differences between two results of gc_histogram() .
train
false
49,354
def save_event_image(image_url, upload_path, ext='png', remove_after_upload=True): filename = '{filename}.{ext}'.format(filename=time.time(), ext=ext) file_path = get_path_of_temp_url(image_url) logo_file = UploadedFile(file_path, filename) url = upload(logo_file, upload_path) if remove_after_upload: os.remove(file_path) return (url if url else '')
[ "def", "save_event_image", "(", "image_url", ",", "upload_path", ",", "ext", "=", "'png'", ",", "remove_after_upload", "=", "True", ")", ":", "filename", "=", "'{filename}.{ext}'", ".", "format", "(", "filename", "=", "time", ".", "time", "(", ")", ",", "ext", "=", "ext", ")", "file_path", "=", "get_path_of_temp_url", "(", "image_url", ")", "logo_file", "=", "UploadedFile", "(", "file_path", ",", "filename", ")", "url", "=", "upload", "(", "logo_file", ",", "upload_path", ")", "if", "remove_after_upload", ":", "os", ".", "remove", "(", "file_path", ")", "return", "(", "url", "if", "url", "else", "''", ")" ]
save the image .
train
false
49,355
def _validate_email(email): if (not isinstance(email, basestring)): raise AccountEmailInvalid(u'Email must be a string') if (len(email) < EMAIL_MIN_LENGTH): raise AccountEmailInvalid(u"Email '{email}' must be at least {min} characters long".format(email=email, min=EMAIL_MIN_LENGTH)) if (len(email) > EMAIL_MAX_LENGTH): raise AccountEmailInvalid(u"Email '{email}' must be at most {max} characters long".format(email=email, max=EMAIL_MAX_LENGTH)) try: validate_email(email) except ValidationError: raise AccountEmailInvalid(u"Email '{email}' format is not valid".format(email=email))
[ "def", "_validate_email", "(", "email", ")", ":", "if", "(", "not", "isinstance", "(", "email", ",", "basestring", ")", ")", ":", "raise", "AccountEmailInvalid", "(", "u'Email must be a string'", ")", "if", "(", "len", "(", "email", ")", "<", "EMAIL_MIN_LENGTH", ")", ":", "raise", "AccountEmailInvalid", "(", "u\"Email '{email}' must be at least {min} characters long\"", ".", "format", "(", "email", "=", "email", ",", "min", "=", "EMAIL_MIN_LENGTH", ")", ")", "if", "(", "len", "(", "email", ")", ">", "EMAIL_MAX_LENGTH", ")", ":", "raise", "AccountEmailInvalid", "(", "u\"Email '{email}' must be at most {max} characters long\"", ".", "format", "(", "email", "=", "email", ",", "max", "=", "EMAIL_MAX_LENGTH", ")", ")", "try", ":", "validate_email", "(", "email", ")", "except", "ValidationError", ":", "raise", "AccountEmailInvalid", "(", "u\"Email '{email}' format is not valid\"", ".", "format", "(", "email", "=", "email", ")", ")" ]
validate the format of the email address .
train
false
49,357
def color_in_equal_space(hue, saturation=0.55, value=2.3): golden_ratio = ((1 + (5 ** 0.5)) / 2) hue += golden_ratio hue %= 1 return '#{:02X}{:02X}{:02X}'.format(*tuple((int((a * 100)) for a in hsv_to_rgb(hue, saturation, value))))
[ "def", "color_in_equal_space", "(", "hue", ",", "saturation", "=", "0.55", ",", "value", "=", "2.3", ")", ":", "golden_ratio", "=", "(", "(", "1", "+", "(", "5", "**", "0.5", ")", ")", "/", "2", ")", "hue", "+=", "golden_ratio", "hue", "%=", "1", "return", "'#{:02X}{:02X}{:02X}'", ".", "format", "(", "*", "tuple", "(", "(", "int", "(", "(", "a", "*", "100", ")", ")", "for", "a", "in", "hsv_to_rgb", "(", "hue", ",", "saturation", ",", "value", ")", ")", ")", ")" ]
args: hue : a numerical value that you want to assign a color returns: str: hexadecimal color value to a given number .
train
false
49,358
def write_workbook_rels(workbook): root = Element('Relationships', {'xmlns': 'http://schemas.openxmlformats.org/package/2006/relationships'}) for i in range(len(workbook.worksheets)): SubElement(root, 'Relationship', {'Id': ('rId%d' % (i + 1)), 'Type': 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet', 'Target': ('worksheets/sheet%s.xml' % (i + 1))}) rid = (len(workbook.worksheets) + 1) SubElement(root, 'Relationship', {'Id': ('rId%d' % rid), 'Target': 'sharedStrings.xml', 'Type': 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/sharedStrings'}) SubElement(root, 'Relationship', {'Id': ('rId%d' % (rid + 1)), 'Target': 'styles.xml', 'Type': 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/styles'}) SubElement(root, 'Relationship', {'Id': ('rId%d' % (rid + 2)), 'Target': 'theme/theme1.xml', 'Type': 'http://schemas.openxmlformats.org/officeDocument/2006/relationships/theme'}) return get_document_content(root)
[ "def", "write_workbook_rels", "(", "workbook", ")", ":", "root", "=", "Element", "(", "'Relationships'", ",", "{", "'xmlns'", ":", "'http://schemas.openxmlformats.org/package/2006/relationships'", "}", ")", "for", "i", "in", "range", "(", "len", "(", "workbook", ".", "worksheets", ")", ")", ":", "SubElement", "(", "root", ",", "'Relationship'", ",", "{", "'Id'", ":", "(", "'rId%d'", "%", "(", "i", "+", "1", ")", ")", ",", "'Type'", ":", "'http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet'", ",", "'Target'", ":", "(", "'worksheets/sheet%s.xml'", "%", "(", "i", "+", "1", ")", ")", "}", ")", "rid", "=", "(", "len", "(", "workbook", ".", "worksheets", ")", "+", "1", ")", "SubElement", "(", "root", ",", "'Relationship'", ",", "{", "'Id'", ":", "(", "'rId%d'", "%", "rid", ")", ",", "'Target'", ":", "'sharedStrings.xml'", ",", "'Type'", ":", "'http://schemas.openxmlformats.org/officeDocument/2006/relationships/sharedStrings'", "}", ")", "SubElement", "(", "root", ",", "'Relationship'", ",", "{", "'Id'", ":", "(", "'rId%d'", "%", "(", "rid", "+", "1", ")", ")", ",", "'Target'", ":", "'styles.xml'", ",", "'Type'", ":", "'http://schemas.openxmlformats.org/officeDocument/2006/relationships/styles'", "}", ")", "SubElement", "(", "root", ",", "'Relationship'", ",", "{", "'Id'", ":", "(", "'rId%d'", "%", "(", "rid", "+", "2", ")", ")", ",", "'Target'", ":", "'theme/theme1.xml'", ",", "'Type'", ":", "'http://schemas.openxmlformats.org/officeDocument/2006/relationships/theme'", "}", ")", "return", "get_document_content", "(", "root", ")" ]
write the workbook relationships xml .
train
false
49,359
def _ordered_points(p): return tuple(sorted(p, key=(lambda x: x.args)))
[ "def", "_ordered_points", "(", "p", ")", ":", "return", "tuple", "(", "sorted", "(", "p", ",", "key", "=", "(", "lambda", "x", ":", "x", ".", "args", ")", ")", ")" ]
return the tuple of points sorted numerically according to args .
train
false
49,360
def get_primitive_instance_by_uuid(context, instance_uuid): instance = db.instance_get_by_uuid(context, instance_uuid) return jsonutils.to_primitive(instance)
[ "def", "get_primitive_instance_by_uuid", "(", "context", ",", "instance_uuid", ")", ":", "instance", "=", "db", ".", "instance_get_by_uuid", "(", "context", ",", "instance_uuid", ")", "return", "jsonutils", ".", "to_primitive", "(", "instance", ")" ]
helper method to get an instance and then convert it to a primitive form using jsonutils .
train
false
49,363
def add_driver(key, driver_class, force=False): try: drivers[key] if (not force): raise ValueError, ('A DB driver named "%s" is already installed' % key) except KeyError: pass drivers[key] = driver_class
[ "def", "add_driver", "(", "key", ",", "driver_class", ",", "force", "=", "False", ")", ":", "try", ":", "drivers", "[", "key", "]", "if", "(", "not", "force", ")", ":", "raise", "ValueError", ",", "(", "'A DB driver named \"%s\" is already installed'", "%", "key", ")", "except", "KeyError", ":", "pass", "drivers", "[", "key", "]", "=", "driver_class" ]
add a driver class to the list of drivers .
train
false
49,365
def put_replication(Bucket, Role, Rules, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) Role = _get_role_arn(name=Role, region=region, key=key, keyid=keyid, profile=profile) if (Rules is None): Rules = [] elif isinstance(Rules, six.string_types): Rules = json.loads(Rules) conn.put_bucket_replication(Bucket=Bucket, ReplicationConfiguration={'Role': Role, 'Rules': Rules}) return {'updated': True, 'name': Bucket} except ClientError as e: return {'updated': False, 'error': __utils__['boto3.get_error'](e)}
[ "def", "put_replication", "(", "Bucket", ",", "Role", ",", "Rules", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "Role", "=", "_get_role_arn", "(", "name", "=", "Role", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "Rules", "is", "None", ")", ":", "Rules", "=", "[", "]", "elif", "isinstance", "(", "Rules", ",", "six", ".", "string_types", ")", ":", "Rules", "=", "json", ".", "loads", "(", "Rules", ")", "conn", ".", "put_bucket_replication", "(", "Bucket", "=", "Bucket", ",", "ReplicationConfiguration", "=", "{", "'Role'", ":", "Role", ",", "'Rules'", ":", "Rules", "}", ")", "return", "{", "'updated'", ":", "True", ",", "'name'", ":", "Bucket", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'updated'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
given a valid config .
train
true
49,367
def notify_new_contributor(unit, user): mails = [] subscriptions = Profile.objects.subscribed_new_contributor(unit.translation.subproject.project, unit.translation.language, user) for subscription in subscriptions: mails.append(subscription.notify_new_contributor(unit.translation, user)) send_mails(mails)
[ "def", "notify_new_contributor", "(", "unit", ",", "user", ")", ":", "mails", "=", "[", "]", "subscriptions", "=", "Profile", ".", "objects", ".", "subscribed_new_contributor", "(", "unit", ".", "translation", ".", "subproject", ".", "project", ",", "unit", ".", "translation", ".", "language", ",", "user", ")", "for", "subscription", "in", "subscriptions", ":", "mails", ".", "append", "(", "subscription", ".", "notify_new_contributor", "(", "unit", ".", "translation", ",", "user", ")", ")", "send_mails", "(", "mails", ")" ]
notify about new contributor .
train
false
49,368
def os_listdir(path): warnings.warn('Use zipio.listdir instead of os_listdir', DeprecationWarning) return zipio.listdir(path)
[ "def", "os_listdir", "(", "path", ")", ":", "warnings", ".", "warn", "(", "'Use zipio.listdir instead of os_listdir'", ",", "DeprecationWarning", ")", "return", "zipio", ".", "listdir", "(", "path", ")" ]
deprecated name .
train
false
49,369
def is_safe_filesystem_path_component(path): return (path and (not os.path.splitdrive(path)[0]) and (not os.path.split(path)[0]) and (path not in (os.curdir, os.pardir)) and (not path.startswith('.')) and (not path.endswith('~')))
[ "def", "is_safe_filesystem_path_component", "(", "path", ")", ":", "return", "(", "path", "and", "(", "not", "os", ".", "path", ".", "splitdrive", "(", "path", ")", "[", "0", "]", ")", "and", "(", "not", "os", ".", "path", ".", "split", "(", "path", ")", "[", "0", "]", ")", "and", "(", "path", "not", "in", "(", "os", ".", "curdir", ",", "os", ".", "pardir", ")", ")", "and", "(", "not", "path", ".", "startswith", "(", "'.'", ")", ")", "and", "(", "not", "path", ".", "endswith", "(", "'~'", ")", ")", ")" ]
check if path is a single component of a filesystem path .
train
false
49,370
def list_logical_volumes(vg): (out, err) = execute('lvs', '--noheadings', '-o', 'lv_name', vg, run_as_root=True) return [line.strip() for line in out.splitlines()]
[ "def", "list_logical_volumes", "(", "vg", ")", ":", "(", "out", ",", "err", ")", "=", "execute", "(", "'lvs'", ",", "'--noheadings'", ",", "'-o'", ",", "'lv_name'", ",", "vg", ",", "run_as_root", "=", "True", ")", "return", "[", "line", ".", "strip", "(", ")", "for", "line", "in", "out", ".", "splitlines", "(", ")", "]" ]
list logical volumes paths for given volume group .
train
false
49,372
def _create_diffs_for(current_path, subobj_a, subobj_b): if (subobj_a == subobj_b): return pvector([]) elif (isinstance(subobj_a, PClass) and isinstance(subobj_b, PClass)): a_dict = subobj_a._to_dict() b_dict = subobj_b._to_dict() return _create_diffs_for_mappings(current_path, a_dict, b_dict) elif (isinstance(subobj_a, PMap) and isinstance(subobj_b, PMap)): return _create_diffs_for_mappings(current_path, subobj_a, subobj_b) elif (isinstance(subobj_a, PSet) and isinstance(subobj_b, PSet)): return _create_diffs_for_sets(current_path, subobj_a, subobj_b) if (len(current_path) > 0): return pvector([_Set(path=current_path[:(-1)], key=current_path[(-1)], value=subobj_b)]) else: return pvector([_Replace(value=subobj_b)])
[ "def", "_create_diffs_for", "(", "current_path", ",", "subobj_a", ",", "subobj_b", ")", ":", "if", "(", "subobj_a", "==", "subobj_b", ")", ":", "return", "pvector", "(", "[", "]", ")", "elif", "(", "isinstance", "(", "subobj_a", ",", "PClass", ")", "and", "isinstance", "(", "subobj_b", ",", "PClass", ")", ")", ":", "a_dict", "=", "subobj_a", ".", "_to_dict", "(", ")", "b_dict", "=", "subobj_b", ".", "_to_dict", "(", ")", "return", "_create_diffs_for_mappings", "(", "current_path", ",", "a_dict", ",", "b_dict", ")", "elif", "(", "isinstance", "(", "subobj_a", ",", "PMap", ")", "and", "isinstance", "(", "subobj_b", ",", "PMap", ")", ")", ":", "return", "_create_diffs_for_mappings", "(", "current_path", ",", "subobj_a", ",", "subobj_b", ")", "elif", "(", "isinstance", "(", "subobj_a", ",", "PSet", ")", "and", "isinstance", "(", "subobj_b", ",", "PSet", ")", ")", ":", "return", "_create_diffs_for_sets", "(", "current_path", ",", "subobj_a", ",", "subobj_b", ")", "if", "(", "len", "(", "current_path", ")", ">", "0", ")", ":", "return", "pvector", "(", "[", "_Set", "(", "path", "=", "current_path", "[", ":", "(", "-", "1", ")", "]", ",", "key", "=", "current_path", "[", "(", "-", "1", ")", "]", ",", "value", "=", "subobj_b", ")", "]", ")", "else", ":", "return", "pvector", "(", "[", "_Replace", "(", "value", "=", "subobj_b", ")", "]", ")" ]
computes a series of _idiffchange s to turn subobj_a into subobj_b assuming that these subobjs are at current_path inside a nested pyrsistent object .
train
false
49,373
@never_cache def send_invitation_email(request, invite_pk): invite = get_object_or_404(Invite, pk=invite_pk) is_curator = invite.group.curators.filter(pk=request.user.userprofile.pk).exists() is_manager = request.user.userprofile.is_manager if (not (is_curator or is_manager)): raise Http404 notify_redeemer_invitation.delay(invite.pk, invite.group.invite_email_text) msg = _(u'Invitation to {0} has been sent successfully.'.format(invite.redeemer)) messages.success(request, msg) next_section = request.GET.get('next') next_url = urlparams(reverse('groups:group_edit', args=[invite.group.url]), next_section) return HttpResponseRedirect(next_url)
[ "@", "never_cache", "def", "send_invitation_email", "(", "request", ",", "invite_pk", ")", ":", "invite", "=", "get_object_or_404", "(", "Invite", ",", "pk", "=", "invite_pk", ")", "is_curator", "=", "invite", ".", "group", ".", "curators", ".", "filter", "(", "pk", "=", "request", ".", "user", ".", "userprofile", ".", "pk", ")", ".", "exists", "(", ")", "is_manager", "=", "request", ".", "user", ".", "userprofile", ".", "is_manager", "if", "(", "not", "(", "is_curator", "or", "is_manager", ")", ")", ":", "raise", "Http404", "notify_redeemer_invitation", ".", "delay", "(", "invite", ".", "pk", ",", "invite", ".", "group", ".", "invite_email_text", ")", "msg", "=", "_", "(", "u'Invitation to {0} has been sent successfully.'", ".", "format", "(", "invite", ".", "redeemer", ")", ")", "messages", ".", "success", "(", "request", ",", "msg", ")", "next_section", "=", "request", ".", "GET", ".", "get", "(", "'next'", ")", "next_url", "=", "urlparams", "(", "reverse", "(", "'groups:group_edit'", ",", "args", "=", "[", "invite", ".", "group", ".", "url", "]", ")", ",", "next_section", ")", "return", "HttpResponseRedirect", "(", "next_url", ")" ]
send group invitation email .
train
false
49,374
def sorted_division_locations(seq, npartitions=None, chunksize=None): if ((npartitions is None) == (chunksize is None)): raise ValueError('Exactly one of npartitions and chunksize must be specified.') if npartitions: chunksize = ceil((len(seq) / npartitions)) positions = [0] values = [seq[0]] for pos in list(range(0, len(seq), chunksize)): if (pos <= positions[(-1)]): continue while (((pos + 1) < len(seq)) and (seq[(pos - 1)] == seq[pos])): pos += 1 values.append(seq[pos]) if (pos == (len(seq) - 1)): pos += 1 positions.append(pos) if (positions[(-1)] != len(seq)): positions.append(len(seq)) values.append(seq[(-1)]) return (values, positions)
[ "def", "sorted_division_locations", "(", "seq", ",", "npartitions", "=", "None", ",", "chunksize", "=", "None", ")", ":", "if", "(", "(", "npartitions", "is", "None", ")", "==", "(", "chunksize", "is", "None", ")", ")", ":", "raise", "ValueError", "(", "'Exactly one of npartitions and chunksize must be specified.'", ")", "if", "npartitions", ":", "chunksize", "=", "ceil", "(", "(", "len", "(", "seq", ")", "/", "npartitions", ")", ")", "positions", "=", "[", "0", "]", "values", "=", "[", "seq", "[", "0", "]", "]", "for", "pos", "in", "list", "(", "range", "(", "0", ",", "len", "(", "seq", ")", ",", "chunksize", ")", ")", ":", "if", "(", "pos", "<=", "positions", "[", "(", "-", "1", ")", "]", ")", ":", "continue", "while", "(", "(", "(", "pos", "+", "1", ")", "<", "len", "(", "seq", ")", ")", "and", "(", "seq", "[", "(", "pos", "-", "1", ")", "]", "==", "seq", "[", "pos", "]", ")", ")", ":", "pos", "+=", "1", "values", ".", "append", "(", "seq", "[", "pos", "]", ")", "if", "(", "pos", "==", "(", "len", "(", "seq", ")", "-", "1", ")", ")", ":", "pos", "+=", "1", "positions", ".", "append", "(", "pos", ")", "if", "(", "positions", "[", "(", "-", "1", ")", "]", "!=", "len", "(", "seq", ")", ")", ":", "positions", ".", "append", "(", "len", "(", "seq", ")", ")", "values", ".", "append", "(", "seq", "[", "(", "-", "1", ")", "]", ")", "return", "(", "values", ",", "positions", ")" ]
find division locations and values in sorted list examples .
train
false
49,375
def identity(x): return x
[ "def", "identity", "(", "x", ")", ":", "return", "x" ]
identity fnc .
train
false
49,376
def print_visit_time_on_site_goal_details(goal_details): print('------ Visit Time On Site Goal -------') print(('Comparison Type = %s' % goal_details.get('comparisonType'))) print(('comparison Value = %s' % goal_details.get('comparisonValue')))
[ "def", "print_visit_time_on_site_goal_details", "(", "goal_details", ")", ":", "print", "(", "'------ Visit Time On Site Goal -------'", ")", "print", "(", "(", "'Comparison Type = %s'", "%", "goal_details", ".", "get", "(", "'comparisonType'", ")", ")", ")", "print", "(", "(", "'comparison Value = %s'", "%", "goal_details", ".", "get", "(", "'comparisonValue'", ")", ")", ")" ]
prints all the visit time on site goal type info .
train
false
49,377
def set_device_cookie_and_return_bool(cls, force=''): if (force != ''): device_cookie = force elif (cls.request.get('device') == ''): device_cookie = str(read_cookie(cls, 'dvc')) if ((not device_cookie) or (device_cookie == 'None') or (device_cookie == '')): if get_device(cls)['is_mobile']: device_cookie = 'mobile' else: device_cookie = 'desktop' else: device_cookie = cls.request.get('device') write_cookie(cls, 'dvc', str(device_cookie), '/', 1209600) return (device_cookie == 'mobile')
[ "def", "set_device_cookie_and_return_bool", "(", "cls", ",", "force", "=", "''", ")", ":", "if", "(", "force", "!=", "''", ")", ":", "device_cookie", "=", "force", "elif", "(", "cls", ".", "request", ".", "get", "(", "'device'", ")", "==", "''", ")", ":", "device_cookie", "=", "str", "(", "read_cookie", "(", "cls", ",", "'dvc'", ")", ")", "if", "(", "(", "not", "device_cookie", ")", "or", "(", "device_cookie", "==", "'None'", ")", "or", "(", "device_cookie", "==", "''", ")", ")", ":", "if", "get_device", "(", "cls", ")", "[", "'is_mobile'", "]", ":", "device_cookie", "=", "'mobile'", "else", ":", "device_cookie", "=", "'desktop'", "else", ":", "device_cookie", "=", "cls", ".", "request", ".", "get", "(", "'device'", ")", "write_cookie", "(", "cls", ",", "'dvc'", ",", "str", "(", "device_cookie", ")", ",", "'/'", ",", "1209600", ")", "return", "(", "device_cookie", "==", "'mobile'", ")" ]
set a cookie for device returning a dict and set cookie cookie value has to be "mobile" or "desktop" string .
train
false