id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
19,695
def log_dep(depmsg): try: depmsg = str(depmsg) except Exception as e: depmsg = str(e) for line in depmsg.splitlines(): log.msg(('[DP] %s' % line))
[ "def", "log_dep", "(", "depmsg", ")", ":", "try", ":", "depmsg", "=", "str", "(", "depmsg", ")", "except", "Exception", "as", "e", ":", "depmsg", "=", "str", "(", "e", ")", "for", "line", "in", "depmsg", ".", "splitlines", "(", ")", ":", "log", ".", "msg", "(", "(", "'[DP] %s'", "%", "line", ")", ")" ]
prints a deprecation message .
train
false
19,696
def RotR_64(x, N): return (np.right_shift(x, (N & 63), dtype=np.uint64) | np.left_shift(x, ((64 - N) & 63), dtype=np.uint64))
[ "def", "RotR_64", "(", "x", ",", "N", ")", ":", "return", "(", "np", ".", "right_shift", "(", "x", ",", "(", "N", "&", "63", ")", ",", "dtype", "=", "np", ".", "uint64", ")", "|", "np", ".", "left_shift", "(", "x", ",", "(", "(", "64", "-", "N", ")", "&", "63", ")", ",", "dtype", "=", "np", ".", "uint64", ")", ")" ]
return x rotated right by n .
train
false
19,697
def submit_calculate_may_enroll_csv(request, course_key, features): task_type = 'may_enroll_info_csv' task_class = calculate_may_enroll_csv task_input = {'features': features} task_key = '' return submit_task(request, task_type, task_class, course_key, task_input, task_key)
[ "def", "submit_calculate_may_enroll_csv", "(", "request", ",", "course_key", ",", "features", ")", ":", "task_type", "=", "'may_enroll_info_csv'", "task_class", "=", "calculate_may_enroll_csv", "task_input", "=", "{", "'features'", ":", "features", "}", "task_key", "=", "''", "return", "submit_task", "(", "request", ",", "task_type", ",", "task_class", ",", "course_key", ",", "task_input", ",", "task_key", ")" ]
submits a task to generate a csv file containing information about invited students who have not enrolled in a given course yet .
train
false
19,698
def register_rmtree(directory, cleaner=_mkdtemp_atexit_cleaner): with _MKDTEMP_LOCK: _mkdtemp_register_cleaner(cleaner) _MKDTEMP_DIRS[os.getpid()].add(directory) return directory
[ "def", "register_rmtree", "(", "directory", ",", "cleaner", "=", "_mkdtemp_atexit_cleaner", ")", ":", "with", "_MKDTEMP_LOCK", ":", "_mkdtemp_register_cleaner", "(", "cleaner", ")", "_MKDTEMP_DIRS", "[", "os", ".", "getpid", "(", ")", "]", ".", "add", "(", "directory", ")", "return", "directory" ]
register an existing directory to be cleaned up at process exit .
train
true
19,700
def GitHub_check_authentication(urls, username, password, token): query_GitHub(urls.api_url, username, password, token)
[ "def", "GitHub_check_authentication", "(", "urls", ",", "username", ",", "password", ",", "token", ")", ":", "query_GitHub", "(", "urls", ".", "api_url", ",", "username", ",", "password", ",", "token", ")" ]
checks that username & password is valid .
train
false
19,701
def plot_filters(net, layer, x, y): filters = net.layers[layer].w.eval() fig = plt.figure() for j in range(len(filters)): ax = fig.add_subplot(y, x, j) ax.matshow(filters[j][0], cmap=matplotlib.cm.binary) plt.xticks(np.array([])) plt.yticks(np.array([])) plt.tight_layout() return plt
[ "def", "plot_filters", "(", "net", ",", "layer", ",", "x", ",", "y", ")", ":", "filters", "=", "net", ".", "layers", "[", "layer", "]", ".", "w", ".", "eval", "(", ")", "fig", "=", "plt", ".", "figure", "(", ")", "for", "j", "in", "range", "(", "len", "(", "filters", ")", ")", ":", "ax", "=", "fig", ".", "add_subplot", "(", "y", ",", "x", ",", "j", ")", "ax", ".", "matshow", "(", "filters", "[", "j", "]", "[", "0", "]", ",", "cmap", "=", "matplotlib", ".", "cm", ".", "binary", ")", "plt", ".", "xticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "yticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "tight_layout", "(", ")", "return", "plt" ]
plot the filters for net after the layer number layer .
train
false
19,702
def getPythonFileNamesExceptInit(fileInDirectory=''): pythonFileNamesExceptInit = getFilesWithFileTypeWithoutWords('py', ['__init__.py'], fileInDirectory) pythonFileNamesExceptInit.sort() return pythonFileNamesExceptInit
[ "def", "getPythonFileNamesExceptInit", "(", "fileInDirectory", "=", "''", ")", ":", "pythonFileNamesExceptInit", "=", "getFilesWithFileTypeWithoutWords", "(", "'py'", ",", "[", "'__init__.py'", "]", ",", "fileInDirectory", ")", "pythonFileNamesExceptInit", ".", "sort", "(", ")", "return", "pythonFileNamesExceptInit" ]
get the python filenames of the directory which the fileindirectory is in .
train
false
19,703
def get_vcenter_version(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The get_vcenter_version function must be called with -f or --function.') inv = salt.utils.vmware.get_inventory(_get_si()) return inv.about.fullName
[ "def", "get_vcenter_version", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The get_vcenter_version function must be called with -f or --function.'", ")", "inv", "=", "salt", ".", "utils", ".", "vmware", ".", "get_inventory", "(", "_get_si", "(", ")", ")", "return", "inv", ".", "about", ".", "fullName" ]
show the vcenter server version with build number .
train
true
19,704
def fire_event(name, *args, **kwargs): if (name in _events): for event in get_events(name): result = event(*args, **kwargs) if (result is not None): args = ((result,) + args[1:]) return (args and args[0])
[ "def", "fire_event", "(", "name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "name", "in", "_events", ")", ":", "for", "event", "in", "get_events", "(", "name", ")", ":", "result", "=", "event", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "result", "is", "not", "None", ")", ":", "args", "=", "(", "(", "result", ",", ")", "+", "args", "[", "1", ":", "]", ")", "return", "(", "args", "and", "args", "[", "0", "]", ")" ]
trigger an event with *name* .
train
false
19,706
def clear_script_prefix(): try: del _prefixes.value except AttributeError: pass
[ "def", "clear_script_prefix", "(", ")", ":", "try", ":", "del", "_prefixes", ".", "value", "except", "AttributeError", ":", "pass" ]
unset the script prefix for the current thread .
train
false
19,707
def escalate_prerelease_permissions(app, validation, version): app_permissions = (validation.get('permissions') or []) if any(((perm in PRERELEASE_PERMISSIONS) for perm in app_permissions)): nobody = UserProfile.objects.get(email=settings.NOBODY_EMAIL_ADDRESS) escalate_app(app, version, nobody, 'App uses prerelease permissions', mkt.LOG.ESCALATION_PRERELEASE_APP)
[ "def", "escalate_prerelease_permissions", "(", "app", ",", "validation", ",", "version", ")", ":", "app_permissions", "=", "(", "validation", ".", "get", "(", "'permissions'", ")", "or", "[", "]", ")", "if", "any", "(", "(", "(", "perm", "in", "PRERELEASE_PERMISSIONS", ")", "for", "perm", "in", "app_permissions", ")", ")", ":", "nobody", "=", "UserProfile", ".", "objects", ".", "get", "(", "email", "=", "settings", ".", "NOBODY_EMAIL_ADDRESS", ")", "escalate_app", "(", "app", ",", "version", ",", "nobody", ",", "'App uses prerelease permissions'", ",", "mkt", ".", "LOG", ".", "ESCALATION_PRERELEASE_APP", ")" ]
escalate the app if it uses prerelease permissions .
train
false
19,708
def add_variable(var, restore=True): collections = [MODEL_VARIABLES] if restore: collections.append(VARIABLES_TO_RESTORE) for collection in collections: if (var not in tf.get_collection(collection)): tf.add_to_collection(collection, var)
[ "def", "add_variable", "(", "var", ",", "restore", "=", "True", ")", ":", "collections", "=", "[", "MODEL_VARIABLES", "]", "if", "restore", ":", "collections", ".", "append", "(", "VARIABLES_TO_RESTORE", ")", "for", "collection", "in", "collections", ":", "if", "(", "var", "not", "in", "tf", ".", "get_collection", "(", "collection", ")", ")", ":", "tf", ".", "add_to_collection", "(", "collection", ",", "var", ")" ]
adds a variable to the model_variables collection .
train
true
19,709
def searchFileFor(file, name): addresses = searchFileForAll(FilePath(file), name) if addresses: return addresses[0] return None
[ "def", "searchFileFor", "(", "file", ",", "name", ")", ":", "addresses", "=", "searchFileForAll", "(", "FilePath", "(", "file", ")", ",", "name", ")", "if", "addresses", ":", "return", "addresses", "[", "0", "]", "return", "None" ]
grep given file .
train
false
19,710
def get_date_time(format='%Y-%m-%d %H:%M:%S', UTC_OFFSET=3): local_datetime = datetime.now() now = (local_datetime - timedelta(hours=UTC_OFFSET)) if (format != 'datetimeProperty'): now = now.strftime(format) return now
[ "def", "get_date_time", "(", "format", "=", "'%Y-%m-%d %H:%M:%S'", ",", "UTC_OFFSET", "=", "3", ")", ":", "local_datetime", "=", "datetime", ".", "now", "(", ")", "now", "=", "(", "local_datetime", "-", "timedelta", "(", "hours", "=", "UTC_OFFSET", ")", ")", "if", "(", "format", "!=", "'datetimeProperty'", ")", ":", "now", "=", "now", ".", "strftime", "(", "format", ")", "return", "now" ]
get date and time in utc with a specific format by default it utc = -3 .
train
false
19,711
def default_completers(): return collections.OrderedDict([('python_mode', complete_python_mode), ('base', complete_base), ('completer', complete_completer), ('skip', complete_skipper), ('pip', complete_pip), ('cd', complete_cd), ('rmdir', complete_rmdir), ('xonfig', complete_xonfig), ('xontrib', complete_xontrib), ('bash', complete_from_bash), ('man', complete_from_man), ('import', complete_import), ('python', complete_python), ('path', complete_path)])
[ "def", "default_completers", "(", ")", ":", "return", "collections", ".", "OrderedDict", "(", "[", "(", "'python_mode'", ",", "complete_python_mode", ")", ",", "(", "'base'", ",", "complete_base", ")", ",", "(", "'completer'", ",", "complete_completer", ")", ",", "(", "'skip'", ",", "complete_skipper", ")", ",", "(", "'pip'", ",", "complete_pip", ")", ",", "(", "'cd'", ",", "complete_cd", ")", ",", "(", "'rmdir'", ",", "complete_rmdir", ")", ",", "(", "'xonfig'", ",", "complete_xonfig", ")", ",", "(", "'xontrib'", ",", "complete_xontrib", ")", ",", "(", "'bash'", ",", "complete_from_bash", ")", ",", "(", "'man'", ",", "complete_from_man", ")", ",", "(", "'import'", ",", "complete_import", ")", ",", "(", "'python'", ",", "complete_python", ")", ",", "(", "'path'", ",", "complete_path", ")", "]", ")" ]
creates a copy of the default completers .
train
false
19,712
@step('I send a test email$') def mail_send_simple(step): mail_send({'from_email': 'test-no-reply@infoxchange.net.au', 'to': ['other-test-no-reply@infoxchange.au'], 'subject': 'Lettuce Test', 'body': 'This is a test email sent from lettuce, right to your door!'})
[ "@", "step", "(", "'I send a test email$'", ")", "def", "mail_send_simple", "(", "step", ")", ":", "mail_send", "(", "{", "'from_email'", ":", "'test-no-reply@infoxchange.net.au'", ",", "'to'", ":", "[", "'other-test-no-reply@infoxchange.au'", "]", ",", "'subject'", ":", "'Lettuce Test'", ",", "'body'", ":", "'This is a test email sent from lettuce, right to your door!'", "}", ")" ]
send a test .
train
false
19,713
def frontend_rewriter_middleware(application): return functools.partial(_rewriter_middleware, _REQUEST_REWRITER_CHAIN, _FRONTEND_RESPONSE_REWRITER_CHAIN, application)
[ "def", "frontend_rewriter_middleware", "(", "application", ")", ":", "return", "functools", ".", "partial", "(", "_rewriter_middleware", ",", "_REQUEST_REWRITER_CHAIN", ",", "_FRONTEND_RESPONSE_REWRITER_CHAIN", ",", "application", ")" ]
wsgi middleware application that applies a chain of response rewriters .
train
false
19,714
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
resets the db contents .
train
false
19,715
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialize the macrorecorder .
train
false
19,716
def _get_jinja_error_slug(tb_data): try: return [x for x in tb_data if (x[2] in ('top-level template code', 'template'))][(-1)] except IndexError: pass
[ "def", "_get_jinja_error_slug", "(", "tb_data", ")", ":", "try", ":", "return", "[", "x", "for", "x", "in", "tb_data", "if", "(", "x", "[", "2", "]", "in", "(", "'top-level template code'", ",", "'template'", ")", ")", "]", "[", "(", "-", "1", ")", "]", "except", "IndexError", ":", "pass" ]
return the line number where the template error was found .
train
false
19,718
def MakeServerErrorResponse(): return '{"status": 21005}'
[ "def", "MakeServerErrorResponse", "(", ")", ":", "return", "'{\"status\": 21005}'" ]
returns a status code indicating a server error .
train
false
19,719
def is_valid_boolstr(val): boolstrs = ('true', 'false', 'yes', 'no', 'y', 'n', '1', '0') return (str(val).lower() in boolstrs)
[ "def", "is_valid_boolstr", "(", "val", ")", ":", "boolstrs", "=", "(", "'true'", ",", "'false'", ",", "'yes'", ",", "'no'", ",", "'y'", ",", "'n'", ",", "'1'", ",", "'0'", ")", "return", "(", "str", "(", "val", ")", ".", "lower", "(", ")", "in", "boolstrs", ")" ]
check if the provided string is a valid bool string or not .
train
false
19,721
def all_properties(decl): for item in decl.seq: p = item.value if isinstance(p, Property): (yield p)
[ "def", "all_properties", "(", "decl", ")", ":", "for", "item", "in", "decl", ".", "seq", ":", "p", "=", "item", ".", "value", "if", "isinstance", "(", "p", ",", "Property", ")", ":", "(", "yield", "p", ")" ]
this is needed because cssstyledeclaration .
train
false
19,723
def index_template_create(name, body, hosts=None, profile=None): es = _get_instance(hosts, profile) try: result = es.indices.put_template(name=name, body=body) return True except elasticsearch.exceptions.NotFoundError: return None return None
[ "def", "index_template_create", "(", "name", ",", "body", ",", "hosts", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", "result", "=", "es", ".", "indices", ".", "put_template", "(", "name", "=", "name", ",", "body", "=", "body", ")", "return", "True", "except", "elasticsearch", ".", "exceptions", ".", "NotFoundError", ":", "return", "None", "return", "None" ]
create an index template cli example:: salt myminion elasticsearch .
train
false
19,724
def test_install_as_egg(script, data): to_install = data.packages.join('FSPkg') result = script.pip('install', to_install, '--egg', expect_error=True) fspkg_folder = (script.site_packages / 'fspkg') egg_folder = ((script.site_packages / 'FSPkg-0.1.dev0-py%s.egg') % pyversion) assert (fspkg_folder not in result.files_created), str(result.stdout) assert (egg_folder in result.files_created), str(result) assert (join(egg_folder, 'fspkg') in result.files_created), str(result)
[ "def", "test_install_as_egg", "(", "script", ",", "data", ")", ":", "to_install", "=", "data", ".", "packages", ".", "join", "(", "'FSPkg'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "to_install", ",", "'--egg'", ",", "expect_error", "=", "True", ")", "fspkg_folder", "=", "(", "script", ".", "site_packages", "/", "'fspkg'", ")", "egg_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'FSPkg-0.1.dev0-py%s.egg'", ")", "%", "pyversion", ")", "assert", "(", "fspkg_folder", "not", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ".", "stdout", ")", "assert", "(", "egg_folder", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")", "assert", "(", "join", "(", "egg_folder", ",", "'fspkg'", ")", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")" ]
test installing as egg .
train
false
19,726
def apply_func_to_html_text(match, func=icu_upper, handle_entities=handle_entities): f = (lambda text: handle_entities(text, func)) parts = re.split(u'(<[^>]+>)', match.group()) parts = ((x if x.startswith(u'<') else f(x)) for x in parts) return u''.join(parts)
[ "def", "apply_func_to_html_text", "(", "match", ",", "func", "=", "icu_upper", ",", "handle_entities", "=", "handle_entities", ")", ":", "f", "=", "(", "lambda", "text", ":", "handle_entities", "(", "text", ",", "func", ")", ")", "parts", "=", "re", ".", "split", "(", "u'(<[^>]+>)'", ",", "match", ".", "group", "(", ")", ")", "parts", "=", "(", "(", "x", "if", "x", ".", "startswith", "(", "u'<'", ")", "else", "f", "(", "x", ")", ")", "for", "x", "in", "parts", ")", "return", "u''", ".", "join", "(", "parts", ")" ]
apply the specified function only to text between html tag definitions .
train
false
19,727
def dont_record(): recorder_proxy.clear_for_current_request()
[ "def", "dont_record", "(", ")", ":", "recorder_proxy", ".", "clear_for_current_request", "(", ")" ]
api to prevent recording of the current request .
train
false
19,728
def _TranslateError(error, detail=''): if ((error >= taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR) and isinstance(error, int)): from google.appengine.api import datastore datastore_exception = datastore._DatastoreExceptionFromErrorCodeAndDetail((error - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR), detail) class JointException(datastore_exception.__class__, DatastoreError, ): 'There was a datastore error while accessing the queue.' __msg = (u'taskqueue.DatastoreError caused by: %s %s' % (datastore_exception.__class__, detail)) def __str__(self): return JointException.__msg return JointException() else: exception_class = _ERROR_MAPPING.get(error, None) if exception_class: return exception_class(detail) else: return Error(('Application error %s: %s' % (error, detail)))
[ "def", "_TranslateError", "(", "error", ",", "detail", "=", "''", ")", ":", "if", "(", "(", "error", ">=", "taskqueue_service_pb", ".", "TaskQueueServiceError", ".", "DATASTORE_ERROR", ")", "and", "isinstance", "(", "error", ",", "int", ")", ")", ":", "from", "google", ".", "appengine", ".", "api", "import", "datastore", "datastore_exception", "=", "datastore", ".", "_DatastoreExceptionFromErrorCodeAndDetail", "(", "(", "error", "-", "taskqueue_service_pb", ".", "TaskQueueServiceError", ".", "DATASTORE_ERROR", ")", ",", "detail", ")", "class", "JointException", "(", "datastore_exception", ".", "__class__", ",", "DatastoreError", ",", ")", ":", "__msg", "=", "(", "u'taskqueue.DatastoreError caused by: %s %s'", "%", "(", "datastore_exception", ".", "__class__", ",", "detail", ")", ")", "def", "__str__", "(", "self", ")", ":", "return", "JointException", ".", "__msg", "return", "JointException", "(", ")", "else", ":", "exception_class", "=", "_ERROR_MAPPING", ".", "get", "(", "error", ",", "None", ")", "if", "exception_class", ":", "return", "exception_class", "(", "detail", ")", "else", ":", "return", "Error", "(", "(", "'Application error %s: %s'", "%", "(", "error", ",", "detail", ")", ")", ")" ]
translates a taskqueueserviceerror into an exception .
train
false
19,729
@core_helper def html_auto_link(data): LINK_FNS = {'tag': tag_link, 'group': group_link, 'dataset': dataset_link, 'package': dataset_link} def makelink(matchobj): obj = matchobj.group(1) name = matchobj.group(2) title = ('%s:%s' % (obj, name)) return LINK_FNS[obj]({'name': name.strip('"'), 'title': title}) def link(matchobj): return ('<a href="%s" target="_blank" rel="nofollow">%s</a>' % (matchobj.group(1), matchobj.group(1))) def process(matchobj): data = matchobj.group(2) data = RE_MD_INTERNAL_LINK.sub(makelink, data) data = RE_MD_EXTERNAL_LINK.sub(link, data) return (matchobj.group(1) + data) data = RE_MD_GET_INNER_HTML.sub(process, data) return data
[ "@", "core_helper", "def", "html_auto_link", "(", "data", ")", ":", "LINK_FNS", "=", "{", "'tag'", ":", "tag_link", ",", "'group'", ":", "group_link", ",", "'dataset'", ":", "dataset_link", ",", "'package'", ":", "dataset_link", "}", "def", "makelink", "(", "matchobj", ")", ":", "obj", "=", "matchobj", ".", "group", "(", "1", ")", "name", "=", "matchobj", ".", "group", "(", "2", ")", "title", "=", "(", "'%s:%s'", "%", "(", "obj", ",", "name", ")", ")", "return", "LINK_FNS", "[", "obj", "]", "(", "{", "'name'", ":", "name", ".", "strip", "(", "'\"'", ")", ",", "'title'", ":", "title", "}", ")", "def", "link", "(", "matchobj", ")", ":", "return", "(", "'<a href=\"%s\" target=\"_blank\" rel=\"nofollow\">%s</a>'", "%", "(", "matchobj", ".", "group", "(", "1", ")", ",", "matchobj", ".", "group", "(", "1", ")", ")", ")", "def", "process", "(", "matchobj", ")", ":", "data", "=", "matchobj", ".", "group", "(", "2", ")", "data", "=", "RE_MD_INTERNAL_LINK", ".", "sub", "(", "makelink", ",", "data", ")", "data", "=", "RE_MD_EXTERNAL_LINK", ".", "sub", "(", "link", ",", "data", ")", "return", "(", "matchobj", ".", "group", "(", "1", ")", "+", "data", ")", "data", "=", "RE_MD_GET_INNER_HTML", ".", "sub", "(", "process", ",", "data", ")", "return", "data" ]
linkifies html tag converted to a tag link dataset converted to a dataset link group converted to a group link http:// converted to a link .
train
false
19,730
def _find_tcl_tk_dir(): tcl_root = exec_statement(('from %s import Tcl; print(Tcl().eval("info library"))' % modname_tkinter)) tk_version = exec_statement('from _tkinter import TK_VERSION; print(TK_VERSION)') tk_root = os.path.join(os.path.dirname(tcl_root), ('tk%s' % tk_version)) return (tcl_root, tk_root)
[ "def", "_find_tcl_tk_dir", "(", ")", ":", "tcl_root", "=", "exec_statement", "(", "(", "'from %s import Tcl; print(Tcl().eval(\"info library\"))'", "%", "modname_tkinter", ")", ")", "tk_version", "=", "exec_statement", "(", "'from _tkinter import TK_VERSION; print(TK_VERSION)'", ")", "tk_root", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "tcl_root", ")", ",", "(", "'tk%s'", "%", "tk_version", ")", ")", "return", "(", "tcl_root", ",", "tk_root", ")" ]
get a platform-agnostic 2-tuple of the absolute paths of the top-level external data directories for both tcl and tk .
train
false
19,732
def getAddIndexedLoop(loop, vertexes, z): indexedLoop = [] for index in xrange(len(loop)): pointComplex = loop[index] vector3index = Vector3Index(len(vertexes), pointComplex.real, pointComplex.imag, z) indexedLoop.append(vector3index) vertexes.append(vector3index) return indexedLoop
[ "def", "getAddIndexedLoop", "(", "loop", ",", "vertexes", ",", "z", ")", ":", "indexedLoop", "=", "[", "]", "for", "index", "in", "xrange", "(", "len", "(", "loop", ")", ")", ":", "pointComplex", "=", "loop", "[", "index", "]", "vector3index", "=", "Vector3Index", "(", "len", "(", "vertexes", ")", ",", "pointComplex", ".", "real", ",", "pointComplex", ".", "imag", ",", "z", ")", "indexedLoop", ".", "append", "(", "vector3index", ")", "vertexes", ".", "append", "(", "vector3index", ")", "return", "indexedLoop" ]
get and add an indexed loop .
train
false
19,733
def _get_s3_key(): key = (__opts__['s3.key'] if ('s3.key' in __opts__) else None) keyid = (__opts__['s3.keyid'] if ('s3.keyid' in __opts__) else None) service_url = (__opts__['s3.service_url'] if ('s3.service_url' in __opts__) else None) verify_ssl = (__opts__['s3.verify_ssl'] if ('s3.verify_ssl' in __opts__) else None) kms_keyid = (__opts__['aws.kmw.keyid'] if ('aws.kms.keyid' in __opts__) else None) location = (__opts__['s3.location'] if ('s3.location' in __opts__) else None) path_style = (__opts__['s3.path_style'] if ('s3.path_style' in __opts__) else None) https_enable = (__opts__['s3.https_enable'] if ('s3.https_enable' in __opts__) else None) return (key, keyid, service_url, verify_ssl, kms_keyid, location, path_style, https_enable)
[ "def", "_get_s3_key", "(", ")", ":", "key", "=", "(", "__opts__", "[", "'s3.key'", "]", "if", "(", "'s3.key'", "in", "__opts__", ")", "else", "None", ")", "keyid", "=", "(", "__opts__", "[", "'s3.keyid'", "]", "if", "(", "'s3.keyid'", "in", "__opts__", ")", "else", "None", ")", "service_url", "=", "(", "__opts__", "[", "'s3.service_url'", "]", "if", "(", "'s3.service_url'", "in", "__opts__", ")", "else", "None", ")", "verify_ssl", "=", "(", "__opts__", "[", "'s3.verify_ssl'", "]", "if", "(", "'s3.verify_ssl'", "in", "__opts__", ")", "else", "None", ")", "kms_keyid", "=", "(", "__opts__", "[", "'aws.kmw.keyid'", "]", "if", "(", "'aws.kms.keyid'", "in", "__opts__", ")", "else", "None", ")", "location", "=", "(", "__opts__", "[", "'s3.location'", "]", "if", "(", "'s3.location'", "in", "__opts__", ")", "else", "None", ")", "path_style", "=", "(", "__opts__", "[", "'s3.path_style'", "]", "if", "(", "'s3.path_style'", "in", "__opts__", ")", "else", "None", ")", "https_enable", "=", "(", "__opts__", "[", "'s3.https_enable'", "]", "if", "(", "'s3.https_enable'", "in", "__opts__", ")", "else", "None", ")", "return", "(", "key", ",", "keyid", ",", "service_url", ",", "verify_ssl", ",", "kms_keyid", ",", "location", ",", "path_style", ",", "https_enable", ")" ]
get aws keys from pillar or config .
train
true
19,734
def message_if_missing(filename): return (_CLIENT_SECRETS_MESSAGE % filename)
[ "def", "message_if_missing", "(", "filename", ")", ":", "return", "(", "_CLIENT_SECRETS_MESSAGE", "%", "filename", ")" ]
helpful message to display if the client_secrets file is missing .
train
false
19,737
def convertProcessXMLElementRenameByPaths(geometryOutput, xmlElement): convertXMLElementRenameByPaths(geometryOutput, xmlElement) processXMLElement(xmlElement)
[ "def", "convertProcessXMLElementRenameByPaths", "(", "geometryOutput", ",", "xmlElement", ")", ":", "convertXMLElementRenameByPaths", "(", "geometryOutput", ",", "xmlElement", ")", "processXMLElement", "(", "xmlElement", ")" ]
convert the xml element to a path xml element .
train
false
19,738
def sftp_connect(conf): sftp = None tries = CONNECT_TRIES retry_interval = RETRY_INTERVAL while (not sftp): try: sftp = pysftp.Connection(host=conf.host, username=conf.username, private_key=conf.private_key, password=conf.password, port=conf.port, private_key_pass=conf.private_key_pass) sftp.timeout = SOCKET_TIMEOUT log.verbose((u'Connected to %s' % conf.host)) except Exception as e: if (not tries): raise e else: log.debug((u'Caught exception: %s' % e)) log.warning((u'Failed to connect to %s; waiting %d seconds before retrying.' % (conf.host, retry_interval))) time.sleep(retry_interval) tries -= 1 retry_interval += RETRY_STEP return sftp
[ "def", "sftp_connect", "(", "conf", ")", ":", "sftp", "=", "None", "tries", "=", "CONNECT_TRIES", "retry_interval", "=", "RETRY_INTERVAL", "while", "(", "not", "sftp", ")", ":", "try", ":", "sftp", "=", "pysftp", ".", "Connection", "(", "host", "=", "conf", ".", "host", ",", "username", "=", "conf", ".", "username", ",", "private_key", "=", "conf", ".", "private_key", ",", "password", "=", "conf", ".", "password", ",", "port", "=", "conf", ".", "port", ",", "private_key_pass", "=", "conf", ".", "private_key_pass", ")", "sftp", ".", "timeout", "=", "SOCKET_TIMEOUT", "log", ".", "verbose", "(", "(", "u'Connected to %s'", "%", "conf", ".", "host", ")", ")", "except", "Exception", "as", "e", ":", "if", "(", "not", "tries", ")", ":", "raise", "e", "else", ":", "log", ".", "debug", "(", "(", "u'Caught exception: %s'", "%", "e", ")", ")", "log", ".", "warning", "(", "(", "u'Failed to connect to %s; waiting %d seconds before retrying.'", "%", "(", "conf", ".", "host", ",", "retry_interval", ")", ")", ")", "time", ".", "sleep", "(", "retry_interval", ")", "tries", "-=", "1", "retry_interval", "+=", "RETRY_STEP", "return", "sftp" ]
helper function to connect to an sftp server .
train
false
19,739
@comm_guard(Variable, ANY_TYPE) def unify_merge(v, o, U): best_v = U[v] if (v is not best_v): return unify_merge(o, best_v, U) else: return FALL_THROUGH
[ "@", "comm_guard", "(", "Variable", ",", "ANY_TYPE", ")", "def", "unify_merge", "(", "v", ",", "o", ",", "U", ")", ":", "best_v", "=", "U", "[", "v", "]", "if", "(", "v", "is", "not", "best_v", ")", ":", "return", "unify_merge", "(", "o", ",", "best_v", ",", "U", ")", "else", ":", "return", "FALL_THROUGH" ]
this simply checks if the var has an unification in u and uses it instead of the var .
train
false
19,741
def get_st_classified(): df = fd.get_stock_basics() df.reset_index(level=0, inplace=True) df = df[ct.FOR_CLASSIFY_B_COLS] df = df.ix[df.name.str.contains('ST')] df = df.sort('code').reset_index(drop=True) return df
[ "def", "get_st_classified", "(", ")", ":", "df", "=", "fd", ".", "get_stock_basics", "(", ")", "df", ".", "reset_index", "(", "level", "=", "0", ",", "inplace", "=", "True", ")", "df", "=", "df", "[", "ct", ".", "FOR_CLASSIFY_B_COLS", "]", "df", "=", "df", ".", "ix", "[", "df", ".", "name", ".", "str", ".", "contains", "(", "'ST'", ")", "]", "df", "=", "df", ".", "sort", "(", "'code'", ")", ".", "reset_index", "(", "drop", "=", "True", ")", "return", "df" ]
return dataframe code :股票代码 name :股票名称 .
train
false
19,742
def _save_private_file(filename, json_contents): temp_filename = tempfile.mktemp() file_desc = os.open(temp_filename, (os.O_WRONLY | os.O_CREAT), 384) with os.fdopen(file_desc, 'w') as file_handle: json.dump(json_contents, file_handle, sort_keys=True, indent=2, separators=(',', ': ')) shutil.move(temp_filename, filename)
[ "def", "_save_private_file", "(", "filename", ",", "json_contents", ")", ":", "temp_filename", "=", "tempfile", ".", "mktemp", "(", ")", "file_desc", "=", "os", ".", "open", "(", "temp_filename", ",", "(", "os", ".", "O_WRONLY", "|", "os", ".", "O_CREAT", ")", ",", "384", ")", "with", "os", ".", "fdopen", "(", "file_desc", ",", "'w'", ")", "as", "file_handle", ":", "json", ".", "dump", "(", "json_contents", ",", "file_handle", ",", "sort_keys", "=", "True", ",", "indent", "=", "2", ",", "separators", "=", "(", "','", ",", "': '", ")", ")", "shutil", ".", "move", "(", "temp_filename", ",", "filename", ")" ]
saves a file with read-write permissions on for the owner .
train
true
19,743
def QueryRange(client, table, callback): def _OnQuery(retry_cb, count, result): for item in result.items: if options.options.col_names: item = dict([(k, v) for (k, v) in item.items() if (k in options.options.col_names)]) pprint.pprint(item) if result.last_key: retry_cb(result.last_key, (count + len(result.items))) else: return callback(('queried %d items' % count)) def _Query(last_key=None, count=0): client.Query(table.name, options.options.hash_key, None, partial(_OnQuery, _Query, count), None, limit=100, excl_start_key=last_key) _Query()
[ "def", "QueryRange", "(", "client", ",", "table", ",", "callback", ")", ":", "def", "_OnQuery", "(", "retry_cb", ",", "count", ",", "result", ")", ":", "for", "item", "in", "result", ".", "items", ":", "if", "options", ".", "options", ".", "col_names", ":", "item", "=", "dict", "(", "[", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "item", ".", "items", "(", ")", "if", "(", "k", "in", "options", ".", "options", ".", "col_names", ")", "]", ")", "pprint", ".", "pprint", "(", "item", ")", "if", "result", ".", "last_key", ":", "retry_cb", "(", "result", ".", "last_key", ",", "(", "count", "+", "len", "(", "result", ".", "items", ")", ")", ")", "else", ":", "return", "callback", "(", "(", "'queried %d items'", "%", "count", ")", ")", "def", "_Query", "(", "last_key", "=", "None", ",", "count", "=", "0", ")", ":", "client", ".", "Query", "(", "table", ".", "name", ",", "options", ".", "options", ".", "hash_key", ",", "None", ",", "partial", "(", "_OnQuery", ",", "_Query", ",", "count", ")", ",", "None", ",", "limit", "=", "100", ",", "excl_start_key", "=", "last_key", ")", "_Query", "(", ")" ]
queries the contents of a range identified by --hash_key .
train
false
19,744
def match_coordinates_3d(matchcoord, catalogcoord, nthneighbor=1, storekdtree=u'kdtree_3d'): if (catalogcoord.isscalar or (len(catalogcoord) < 1)): raise ValueError(u'The catalog for coordinate matching cannot be a scalar or length-0.') kdt = _get_cartesian_kdtree(catalogcoord, storekdtree) matchcoord = matchcoord.transform_to(catalogcoord) catunit = catalogcoord.cartesian.x.unit matchxyz = matchcoord.cartesian.xyz.to(catunit) matchflatxyz = matchxyz.reshape((3, (np.prod(matchxyz.shape) // 3))) (dist, idx) = kdt.query(matchflatxyz.T, nthneighbor) if (nthneighbor > 1): dist = dist[:, (-1)] idx = idx[:, (-1)] sep2d = catalogcoord[idx].separation(matchcoord) return (idx.reshape(matchxyz.shape[1:]), sep2d, (dist.reshape(matchxyz.shape[1:]) * catunit))
[ "def", "match_coordinates_3d", "(", "matchcoord", ",", "catalogcoord", ",", "nthneighbor", "=", "1", ",", "storekdtree", "=", "u'kdtree_3d'", ")", ":", "if", "(", "catalogcoord", ".", "isscalar", "or", "(", "len", "(", "catalogcoord", ")", "<", "1", ")", ")", ":", "raise", "ValueError", "(", "u'The catalog for coordinate matching cannot be a scalar or length-0.'", ")", "kdt", "=", "_get_cartesian_kdtree", "(", "catalogcoord", ",", "storekdtree", ")", "matchcoord", "=", "matchcoord", ".", "transform_to", "(", "catalogcoord", ")", "catunit", "=", "catalogcoord", ".", "cartesian", ".", "x", ".", "unit", "matchxyz", "=", "matchcoord", ".", "cartesian", ".", "xyz", ".", "to", "(", "catunit", ")", "matchflatxyz", "=", "matchxyz", ".", "reshape", "(", "(", "3", ",", "(", "np", ".", "prod", "(", "matchxyz", ".", "shape", ")", "//", "3", ")", ")", ")", "(", "dist", ",", "idx", ")", "=", "kdt", ".", "query", "(", "matchflatxyz", ".", "T", ",", "nthneighbor", ")", "if", "(", "nthneighbor", ">", "1", ")", ":", "dist", "=", "dist", "[", ":", ",", "(", "-", "1", ")", "]", "idx", "=", "idx", "[", ":", ",", "(", "-", "1", ")", "]", "sep2d", "=", "catalogcoord", "[", "idx", "]", ".", "separation", "(", "matchcoord", ")", "return", "(", "idx", ".", "reshape", "(", "matchxyz", ".", "shape", "[", "1", ":", "]", ")", ",", "sep2d", ",", "(", "dist", ".", "reshape", "(", "matchxyz", ".", "shape", "[", "1", ":", "]", ")", "*", "catunit", ")", ")" ]
finds the nearest 3-dimensional matches of a coordinate or coordinates in a set of catalog coordinates .
train
false
19,745
def is_ipv6_filter(ip, options=None): return _is_ipv(ip, 6, options=options)
[ "def", "is_ipv6_filter", "(", "ip", ",", "options", "=", "None", ")", ":", "return", "_is_ipv", "(", "ip", ",", "6", ",", "options", "=", "options", ")" ]
returns a bool telling if the value passed to it was a valid ipv6 address .
train
false
19,746
def _filter_dict(input_dict, search_key, search_value): output_dict = dict() for (key, key_list) in six.iteritems(input_dict): key_list_filtered = _filter_list(key_list, search_key, search_value) if key_list_filtered: output_dict[key] = key_list_filtered return output_dict
[ "def", "_filter_dict", "(", "input_dict", ",", "search_key", ",", "search_value", ")", ":", "output_dict", "=", "dict", "(", ")", "for", "(", "key", ",", "key_list", ")", "in", "six", ".", "iteritems", "(", "input_dict", ")", ":", "key_list_filtered", "=", "_filter_list", "(", "key_list", ",", "search_key", ",", "search_value", ")", "if", "key_list_filtered", ":", "output_dict", "[", "key", "]", "=", "key_list_filtered", "return", "output_dict" ]
filters a dictionary of dictionaries by a key-value pair .
train
true
19,747
def split_explicit_title(text): match = explicit_title_re.match(text) if match: return (True, match.group(1), match.group(2)) return (False, text, text)
[ "def", "split_explicit_title", "(", "text", ")", ":", "match", "=", "explicit_title_re", ".", "match", "(", "text", ")", "if", "match", ":", "return", "(", "True", ",", "match", ".", "group", "(", "1", ")", ",", "match", ".", "group", "(", "2", ")", ")", "return", "(", "False", ",", "text", ",", "text", ")" ]
split role content into title and target .
train
false
19,748
def continued_fraction_reduce(cf): from sympy.core.symbol import Dummy from sympy.solvers import solve period = [] x = Dummy('x') def untillist(cf): for nxt in cf: if isinstance(nxt, list): period.extend(nxt) (yield x) break (yield nxt) a = Integer(0) for a in continued_fraction_convergents(untillist(cf)): pass if period: y = Dummy('y') solns = solve((continued_fraction_reduce((period + [y])) - y), y) solns.sort() pure = solns[(-1)] return a.subs(x, pure).radsimp() else: return a
[ "def", "continued_fraction_reduce", "(", "cf", ")", ":", "from", "sympy", ".", "core", ".", "symbol", "import", "Dummy", "from", "sympy", ".", "solvers", "import", "solve", "period", "=", "[", "]", "x", "=", "Dummy", "(", "'x'", ")", "def", "untillist", "(", "cf", ")", ":", "for", "nxt", "in", "cf", ":", "if", "isinstance", "(", "nxt", ",", "list", ")", ":", "period", ".", "extend", "(", "nxt", ")", "(", "yield", "x", ")", "break", "(", "yield", "nxt", ")", "a", "=", "Integer", "(", "0", ")", "for", "a", "in", "continued_fraction_convergents", "(", "untillist", "(", "cf", ")", ")", ":", "pass", "if", "period", ":", "y", "=", "Dummy", "(", "'y'", ")", "solns", "=", "solve", "(", "(", "continued_fraction_reduce", "(", "(", "period", "+", "[", "y", "]", ")", ")", "-", "y", ")", ",", "y", ")", "solns", ".", "sort", "(", ")", "pure", "=", "solns", "[", "(", "-", "1", ")", "]", "return", "a", ".", "subs", "(", "x", ",", "pure", ")", ".", "radsimp", "(", ")", "else", ":", "return", "a" ]
reduce a continued fraction to a rational or quadratic irrational .
train
false
19,749
def _user_can_manage_leaders(user, group_profile): return user.has_perm('groups.change_groupprofile')
[ "def", "_user_can_manage_leaders", "(", "user", ",", "group_profile", ")", ":", "return", "user", ".", "has_perm", "(", "'groups.change_groupprofile'", ")" ]
can the given user add and remove leaders? .
train
false
19,751
def _track_successful_certificate_generation(user_id, course_id): if settings.LMS_SEGMENT_KEY: event_name = 'edx.bi.user.certificate.generate' tracking_context = tracker.get_tracker().resolve_context() analytics.track(user_id, event_name, {'category': 'certificates', 'label': unicode(course_id)}, context={'ip': tracking_context.get('ip'), 'Google Analytics': {'clientId': tracking_context.get('client_id')}})
[ "def", "_track_successful_certificate_generation", "(", "user_id", ",", "course_id", ")", ":", "if", "settings", ".", "LMS_SEGMENT_KEY", ":", "event_name", "=", "'edx.bi.user.certificate.generate'", "tracking_context", "=", "tracker", ".", "get_tracker", "(", ")", ".", "resolve_context", "(", ")", "analytics", ".", "track", "(", "user_id", ",", "event_name", ",", "{", "'category'", ":", "'certificates'", ",", "'label'", ":", "unicode", "(", "course_id", ")", "}", ",", "context", "=", "{", "'ip'", ":", "tracking_context", ".", "get", "(", "'ip'", ")", ",", "'Google Analytics'", ":", "{", "'clientId'", ":", "tracking_context", ".", "get", "(", "'client_id'", ")", "}", "}", ")" ]
track a successful certificate generation event .
train
false
19,753
def fltcols(vals): return np.atleast_2d(np.array(vals, dtype=float))
[ "def", "fltcols", "(", "vals", ")", ":", "return", "np", ".", "atleast_2d", "(", "np", ".", "array", "(", "vals", ",", "dtype", "=", "float", ")", ")" ]
trivial little function to make 1xn float vector .
train
false
19,754
def to_numeric(arg, errors='raise', downcast=None): if (downcast not in (None, 'integer', 'signed', 'unsigned', 'float')): raise ValueError('invalid downcasting method provided') is_series = False is_index = False is_scalar = False if isinstance(arg, pd.Series): is_series = True values = arg.values elif isinstance(arg, pd.Index): is_index = True values = arg.asi8 if (values is None): values = arg.values elif isinstance(arg, (list, tuple)): values = np.array(arg, dtype='O') elif isscalar(arg): if is_decimal(arg): return float(arg) if is_number(arg): return arg is_scalar = True values = np.array([arg], dtype='O') elif (getattr(arg, 'ndim', 1) > 1): raise TypeError('arg must be a list, tuple, 1-d array, or Series') else: values = arg try: if is_numeric_dtype(values): pass elif is_datetime_or_timedelta_dtype(values): values = values.astype(np.int64) else: values = _ensure_object(values) coerce_numeric = (False if (errors in ('ignore', 'raise')) else True) values = lib.maybe_convert_numeric(values, set(), coerce_numeric=coerce_numeric) except Exception: if (errors == 'raise'): raise if ((downcast is not None) and is_numeric_dtype(values)): typecodes = None if (downcast in ('integer', 'signed')): typecodes = np.typecodes['Integer'] elif ((downcast == 'unsigned') and (np.min(values) >= 0)): typecodes = np.typecodes['UnsignedInteger'] elif (downcast == 'float'): typecodes = np.typecodes['Float'] float_32_char = np.dtype(np.float32).char float_32_ind = typecodes.index(float_32_char) typecodes = typecodes[float_32_ind:] if (typecodes is not None): for dtype in typecodes: if (np.dtype(dtype).itemsize <= values.dtype.itemsize): values = _possibly_downcast_to_dtype(values, dtype) if (values.dtype == dtype): break if is_series: return pd.Series(values, index=arg.index, name=arg.name) elif is_index: return Index(values, name=arg.name) elif is_scalar: return values[0] else: return values
[ "def", "to_numeric", "(", "arg", ",", "errors", "=", "'raise'", ",", "downcast", "=", "None", ")", ":", "if", "(", "downcast", "not", "in", "(", "None", ",", "'integer'", ",", "'signed'", ",", "'unsigned'", ",", "'float'", ")", ")", ":", "raise", "ValueError", "(", "'invalid downcasting method provided'", ")", "is_series", "=", "False", "is_index", "=", "False", "is_scalar", "=", "False", "if", "isinstance", "(", "arg", ",", "pd", ".", "Series", ")", ":", "is_series", "=", "True", "values", "=", "arg", ".", "values", "elif", "isinstance", "(", "arg", ",", "pd", ".", "Index", ")", ":", "is_index", "=", "True", "values", "=", "arg", ".", "asi8", "if", "(", "values", "is", "None", ")", ":", "values", "=", "arg", ".", "values", "elif", "isinstance", "(", "arg", ",", "(", "list", ",", "tuple", ")", ")", ":", "values", "=", "np", ".", "array", "(", "arg", ",", "dtype", "=", "'O'", ")", "elif", "isscalar", "(", "arg", ")", ":", "if", "is_decimal", "(", "arg", ")", ":", "return", "float", "(", "arg", ")", "if", "is_number", "(", "arg", ")", ":", "return", "arg", "is_scalar", "=", "True", "values", "=", "np", ".", "array", "(", "[", "arg", "]", ",", "dtype", "=", "'O'", ")", "elif", "(", "getattr", "(", "arg", ",", "'ndim'", ",", "1", ")", ">", "1", ")", ":", "raise", "TypeError", "(", "'arg must be a list, tuple, 1-d array, or Series'", ")", "else", ":", "values", "=", "arg", "try", ":", "if", "is_numeric_dtype", "(", "values", ")", ":", "pass", "elif", "is_datetime_or_timedelta_dtype", "(", "values", ")", ":", "values", "=", "values", ".", "astype", "(", "np", ".", "int64", ")", "else", ":", "values", "=", "_ensure_object", "(", "values", ")", "coerce_numeric", "=", "(", "False", "if", "(", "errors", "in", "(", "'ignore'", ",", "'raise'", ")", ")", "else", "True", ")", "values", "=", "lib", ".", "maybe_convert_numeric", "(", "values", ",", "set", "(", ")", ",", "coerce_numeric", "=", "coerce_numeric", ")", "except", "Exception", ":", "if", "(", "errors", "==", "'raise'", ")", ":", "raise", "if", "(", "(", "downcast", "is", "not", "None", ")", "and", "is_numeric_dtype", "(", "values", ")", ")", ":", "typecodes", "=", "None", "if", "(", "downcast", "in", "(", "'integer'", ",", "'signed'", ")", ")", ":", "typecodes", "=", "np", ".", "typecodes", "[", "'Integer'", "]", "elif", "(", "(", "downcast", "==", "'unsigned'", ")", "and", "(", "np", ".", "min", "(", "values", ")", ">=", "0", ")", ")", ":", "typecodes", "=", "np", ".", "typecodes", "[", "'UnsignedInteger'", "]", "elif", "(", "downcast", "==", "'float'", ")", ":", "typecodes", "=", "np", ".", "typecodes", "[", "'Float'", "]", "float_32_char", "=", "np", ".", "dtype", "(", "np", ".", "float32", ")", ".", "char", "float_32_ind", "=", "typecodes", ".", "index", "(", "float_32_char", ")", "typecodes", "=", "typecodes", "[", "float_32_ind", ":", "]", "if", "(", "typecodes", "is", "not", "None", ")", ":", "for", "dtype", "in", "typecodes", ":", "if", "(", "np", ".", "dtype", "(", "dtype", ")", ".", "itemsize", "<=", "values", ".", "dtype", ".", "itemsize", ")", ":", "values", "=", "_possibly_downcast_to_dtype", "(", "values", ",", "dtype", ")", "if", "(", "values", ".", "dtype", "==", "dtype", ")", ":", "break", "if", "is_series", ":", "return", "pd", ".", "Series", "(", "values", ",", "index", "=", "arg", ".", "index", ",", "name", "=", "arg", ".", "name", ")", "elif", "is_index", ":", "return", "Index", "(", "values", ",", "name", "=", "arg", ".", "name", ")", "elif", "is_scalar", ":", "return", "values", "[", "0", "]", "else", ":", "return", "values" ]
return value of a numeric literal string .
train
true
19,756
def unzip_file(upload_file, extension='.shp', tempdir=None): absolute_base_file = None if (tempdir is None): tempdir = tempfile.mkdtemp() the_zip = ZipFile(upload_file) the_zip.extractall(tempdir) for item in the_zip.namelist(): if item.endswith(extension): absolute_base_file = os.path.join(tempdir, item) return absolute_base_file
[ "def", "unzip_file", "(", "upload_file", ",", "extension", "=", "'.shp'", ",", "tempdir", "=", "None", ")", ":", "absolute_base_file", "=", "None", "if", "(", "tempdir", "is", "None", ")", ":", "tempdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "the_zip", "=", "ZipFile", "(", "upload_file", ")", "the_zip", ".", "extractall", "(", "tempdir", ")", "for", "item", "in", "the_zip", ".", "namelist", "(", ")", ":", "if", "item", ".", "endswith", "(", "extension", ")", ":", "absolute_base_file", "=", "os", ".", "path", ".", "join", "(", "tempdir", ",", "item", ")", "return", "absolute_base_file" ]
unzip the file to the destination location .
train
false
19,757
def _exec_template(callable_, context, args=None, kwargs=None): template = context._with_template if ((template is not None) and (template.format_exceptions or template.error_handler)): error = None try: callable_(context, *args, **kwargs) except Exception as e: _render_error(template, context, e) except: e = sys.exc_info()[0] _render_error(template, context, e) else: callable_(context, *args, **kwargs)
[ "def", "_exec_template", "(", "callable_", ",", "context", ",", "args", "=", "None", ",", "kwargs", "=", "None", ")", ":", "template", "=", "context", ".", "_with_template", "if", "(", "(", "template", "is", "not", "None", ")", "and", "(", "template", ".", "format_exceptions", "or", "template", ".", "error_handler", ")", ")", ":", "error", "=", "None", "try", ":", "callable_", "(", "context", ",", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", "as", "e", ":", "_render_error", "(", "template", ",", "context", ",", "e", ")", "except", ":", "e", "=", "sys", ".", "exc_info", "(", ")", "[", "0", "]", "_render_error", "(", "template", ",", "context", ",", "e", ")", "else", ":", "callable_", "(", "context", ",", "*", "args", ",", "**", "kwargs", ")" ]
execute a rendering callable given the callable .
train
true
19,758
def on_mismatch(function): def decorated(matcher): def make_matcher(*args, **kwargs): return _OnMismatch(function, matcher(*args, **kwargs)) return make_matcher return decorated
[ "def", "on_mismatch", "(", "function", ")", ":", "def", "decorated", "(", "matcher", ")", ":", "def", "make_matcher", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "_OnMismatch", "(", "function", ",", "matcher", "(", "*", "args", ",", "**", "kwargs", ")", ")", "return", "make_matcher", "return", "decorated" ]
return a function that maps matcher factories to new matcher factories .
train
false
19,760
def _check_for_validation_errors(app=None): try: from cStringIO import StringIO except ImportError: from StringIO import StringIO s = StringIO() num_errors = get_validation_errors(s, app) if num_errors: if app: sys.stderr.write(style.ERROR(("Error: %s couldn't be installed, because there were errors in your model:\n" % app))) else: sys.stderr.write(style.ERROR("Error: Couldn't install apps, because there were errors in one or more models:\n")) s.seek(0) sys.stderr.write(s.read()) sys.exit(1)
[ "def", "_check_for_validation_errors", "(", "app", "=", "None", ")", ":", "try", ":", "from", "cStringIO", "import", "StringIO", "except", "ImportError", ":", "from", "StringIO", "import", "StringIO", "s", "=", "StringIO", "(", ")", "num_errors", "=", "get_validation_errors", "(", "s", ",", "app", ")", "if", "num_errors", ":", "if", "app", ":", "sys", ".", "stderr", ".", "write", "(", "style", ".", "ERROR", "(", "(", "\"Error: %s couldn't be installed, because there were errors in your model:\\n\"", "%", "app", ")", ")", ")", "else", ":", "sys", ".", "stderr", ".", "write", "(", "style", ".", "ERROR", "(", "\"Error: Couldn't install apps, because there were errors in one or more models:\\n\"", ")", ")", "s", ".", "seek", "(", "0", ")", "sys", ".", "stderr", ".", "write", "(", "s", ".", "read", "(", ")", ")", "sys", ".", "exit", "(", "1", ")" ]
check that an app has no validation errors .
train
false
19,763
def makeChecker(description): if (':' in description): (authType, argstring) = description.split(':', 1) else: authType = description argstring = '' return findCheckerFactory(authType).generateChecker(argstring)
[ "def", "makeChecker", "(", "description", ")", ":", "if", "(", "':'", "in", "description", ")", ":", "(", "authType", ",", "argstring", ")", "=", "description", ".", "split", "(", "':'", ",", "1", ")", "else", ":", "authType", "=", "description", "argstring", "=", "''", "return", "findCheckerFactory", "(", "authType", ")", ".", "generateChecker", "(", "argstring", ")" ]
returns an l{twisted .
train
false
19,764
def get_sw_name(): return get_sw_version().split('-')[0]
[ "def", "get_sw_name", "(", ")", ":", "return", "get_sw_version", "(", ")", ".", "split", "(", "'-'", ")", "[", "0", "]" ]
return the software name .
train
false
19,765
def FormatIntegralLastKey(value): assert (value < 1000000000000000), value return ('%015d' % value)
[ "def", "FormatIntegralLastKey", "(", "value", ")", ":", "assert", "(", "value", "<", "1000000000000000", ")", ",", "value", "return", "(", "'%015d'", "%", "value", ")" ]
formats an integral last key as a string .
train
false
19,766
def requires_nibabel(vox2ras_tkr=False): extra = (' with vox2ras_tkr support' if vox2ras_tkr else '') return np.testing.dec.skipif((not has_nibabel(vox2ras_tkr)), ('Requires nibabel%s' % extra))
[ "def", "requires_nibabel", "(", "vox2ras_tkr", "=", "False", ")", ":", "extra", "=", "(", "' with vox2ras_tkr support'", "if", "vox2ras_tkr", "else", "''", ")", "return", "np", ".", "testing", ".", "dec", ".", "skipif", "(", "(", "not", "has_nibabel", "(", "vox2ras_tkr", ")", ")", ",", "(", "'Requires nibabel%s'", "%", "extra", ")", ")" ]
aux function .
train
false
19,767
def _get_assets_for_page(request, course_key, options): current_page = options['current_page'] page_size = options['page_size'] sort = options['sort'] filter_params = (options['filter_params'] if options['filter_params'] else None) start = (current_page * page_size) return contentstore().get_all_content_for_course(course_key, start=start, maxresults=page_size, sort=sort, filter_params=filter_params)
[ "def", "_get_assets_for_page", "(", "request", ",", "course_key", ",", "options", ")", ":", "current_page", "=", "options", "[", "'current_page'", "]", "page_size", "=", "options", "[", "'page_size'", "]", "sort", "=", "options", "[", "'sort'", "]", "filter_params", "=", "(", "options", "[", "'filter_params'", "]", "if", "options", "[", "'filter_params'", "]", "else", "None", ")", "start", "=", "(", "current_page", "*", "page_size", ")", "return", "contentstore", "(", ")", ".", "get_all_content_for_course", "(", "course_key", ",", "start", "=", "start", ",", "maxresults", "=", "page_size", ",", "sort", "=", "sort", ",", "filter_params", "=", "filter_params", ")" ]
returns the list of assets for the specified page and page size .
train
false
19,768
def remove_article(text=u''): return re.sub(u'(?i)^(?:(?:A(?!\\s+to)n?)|The)\\s(\\w)', u'\\1', text)
[ "def", "remove_article", "(", "text", "=", "u''", ")", ":", "return", "re", ".", "sub", "(", "u'(?i)^(?:(?:A(?!\\\\s+to)n?)|The)\\\\s(\\\\w)'", ",", "u'\\\\1'", ",", "text", ")" ]
remove the english articles from a text string .
train
false
19,769
def rosen_hess_prod(x, p): x = atleast_1d(x) Hp = numpy.zeros(len(x), dtype=x.dtype) Hp[0] = (((((1200 * (x[0] ** 2)) - (400 * x[1])) + 2) * p[0]) - ((400 * x[0]) * p[1])) Hp[1:(-1)] = (((((-400) * x[:(-2)]) * p[:(-2)]) + (((202 + (1200 * (x[1:(-1)] ** 2))) - (400 * x[2:])) * p[1:(-1)])) - ((400 * x[1:(-1)]) * p[2:])) Hp[(-1)] = ((((-400) * x[(-2)]) * p[(-2)]) + (200 * p[(-1)])) return Hp
[ "def", "rosen_hess_prod", "(", "x", ",", "p", ")", ":", "x", "=", "atleast_1d", "(", "x", ")", "Hp", "=", "numpy", ".", "zeros", "(", "len", "(", "x", ")", ",", "dtype", "=", "x", ".", "dtype", ")", "Hp", "[", "0", "]", "=", "(", "(", "(", "(", "(", "1200", "*", "(", "x", "[", "0", "]", "**", "2", ")", ")", "-", "(", "400", "*", "x", "[", "1", "]", ")", ")", "+", "2", ")", "*", "p", "[", "0", "]", ")", "-", "(", "(", "400", "*", "x", "[", "0", "]", ")", "*", "p", "[", "1", "]", ")", ")", "Hp", "[", "1", ":", "(", "-", "1", ")", "]", "=", "(", "(", "(", "(", "(", "-", "400", ")", "*", "x", "[", ":", "(", "-", "2", ")", "]", ")", "*", "p", "[", ":", "(", "-", "2", ")", "]", ")", "+", "(", "(", "(", "202", "+", "(", "1200", "*", "(", "x", "[", "1", ":", "(", "-", "1", ")", "]", "**", "2", ")", ")", ")", "-", "(", "400", "*", "x", "[", "2", ":", "]", ")", ")", "*", "p", "[", "1", ":", "(", "-", "1", ")", "]", ")", ")", "-", "(", "(", "400", "*", "x", "[", "1", ":", "(", "-", "1", ")", "]", ")", "*", "p", "[", "2", ":", "]", ")", ")", "Hp", "[", "(", "-", "1", ")", "]", "=", "(", "(", "(", "(", "-", "400", ")", "*", "x", "[", "(", "-", "2", ")", "]", ")", "*", "p", "[", "(", "-", "2", ")", "]", ")", "+", "(", "200", "*", "p", "[", "(", "-", "1", ")", "]", ")", ")", "return", "Hp" ]
product of the hessian matrix of the rosenbrock function with a vector .
train
false
19,770
def setAttributeDictionaryToMultipliedTetragrid(tetragrid, xmlElement): targetMatrix = getFromObjectOrXMLElement(xmlElement).getOtherTimesSelf(tetragrid) setAttributeDictionaryMatrixToMatrix(targetMatrix, xmlElement)
[ "def", "setAttributeDictionaryToMultipliedTetragrid", "(", "tetragrid", ",", "xmlElement", ")", ":", "targetMatrix", "=", "getFromObjectOrXMLElement", "(", "xmlElement", ")", ".", "getOtherTimesSelf", "(", "tetragrid", ")", "setAttributeDictionaryMatrixToMatrix", "(", "targetMatrix", ",", "xmlElement", ")" ]
set the element attribute dictionary and element matrix to the matrix times the tetragrid .
train
false
19,772
def list_topics(): pubsub_client = pubsub.Client() for topic in pubsub_client.list_topics(): print topic.name
[ "def", "list_topics", "(", ")", ":", "pubsub_client", "=", "pubsub", ".", "Client", "(", ")", "for", "topic", "in", "pubsub_client", ".", "list_topics", "(", ")", ":", "print", "topic", ".", "name" ]
list topics .
train
false
19,773
def _get_row(row, key): if (row is not None): value = row[0] if (isinstance(value, (sqlite3.Binary, bytes)) and (key != 'resumed_from')): value = cPickle.loads(bytes(value)) return value raise KeyError(key)
[ "def", "_get_row", "(", "row", ",", "key", ")", ":", "if", "(", "row", "is", "not", "None", ")", ":", "value", "=", "row", "[", "0", "]", "if", "(", "isinstance", "(", "value", ",", "(", "sqlite3", ".", "Binary", ",", "bytes", ")", ")", "and", "(", "key", "!=", "'resumed_from'", ")", ")", ":", "value", "=", "cPickle", ".", "loads", "(", "bytes", "(", "value", ")", ")", "return", "value", "raise", "KeyError", "(", "key", ")" ]
handle the returned row e .
train
false
19,774
def test_epochs(): raw = read_raw_kit(sqd_path, stim=None) events = read_events(events_path) raw_epochs = Epochs(raw, events, None, tmin=0, tmax=0.099, baseline=None) data1 = raw_epochs.get_data() epochs = read_epochs_kit(epochs_path, events_path) data11 = epochs.get_data() assert_array_equal(data1, data11)
[ "def", "test_epochs", "(", ")", ":", "raw", "=", "read_raw_kit", "(", "sqd_path", ",", "stim", "=", "None", ")", "events", "=", "read_events", "(", "events_path", ")", "raw_epochs", "=", "Epochs", "(", "raw", ",", "events", ",", "None", ",", "tmin", "=", "0", ",", "tmax", "=", "0.099", ",", "baseline", "=", "None", ")", "data1", "=", "raw_epochs", ".", "get_data", "(", ")", "epochs", "=", "read_epochs_kit", "(", "epochs_path", ",", "events_path", ")", "data11", "=", "epochs", ".", "get_data", "(", ")", "assert_array_equal", "(", "data1", ",", "data11", ")" ]
test reading epoched sqd file .
train
false
19,778
def is_python_interpreter_valid_name(filename): pattern = '.*python(\\d\\.?\\d*)?(w)?(.exe)?$' if (re.match(pattern, filename, flags=re.I) is None): return False else: return True
[ "def", "is_python_interpreter_valid_name", "(", "filename", ")", ":", "pattern", "=", "'.*python(\\\\d\\\\.?\\\\d*)?(w)?(.exe)?$'", "if", "(", "re", ".", "match", "(", "pattern", ",", "filename", ",", "flags", "=", "re", ".", "I", ")", "is", "None", ")", ":", "return", "False", "else", ":", "return", "True" ]
check that the python interpreter file has a valid name .
train
true
19,780
@require_admin_context @_retry_on_deadlock def cluster_update(context, id, values): query = _cluster_query(context, id=id) result = query.update(values) if (not result): raise exception.ClusterNotFound(id=id)
[ "@", "require_admin_context", "@", "_retry_on_deadlock", "def", "cluster_update", "(", "context", ",", "id", ",", "values", ")", ":", "query", "=", "_cluster_query", "(", "context", ",", "id", "=", "id", ")", "result", "=", "query", ".", "update", "(", "values", ")", "if", "(", "not", "result", ")", ":", "raise", "exception", ".", "ClusterNotFound", "(", "id", "=", "id", ")" ]
set the given properties on an cluster and update it .
train
false
19,781
def normalise_alias_format_string(alias_format): display = None representation = [] if isinstance(alias_format, six.string_types): display = alias_format representation.append(alias_format) elif isinstance(alias_format, dict): display = alias_format.get('display') representation = (alias_format.get('representation') or []) if isinstance(representation, six.string_types): representation = [representation] else: raise TypeError(("alias_format '%s' is neither a dictionary or string type." % repr(alias_format))) return (display, representation)
[ "def", "normalise_alias_format_string", "(", "alias_format", ")", ":", "display", "=", "None", "representation", "=", "[", "]", "if", "isinstance", "(", "alias_format", ",", "six", ".", "string_types", ")", ":", "display", "=", "alias_format", "representation", ".", "append", "(", "alias_format", ")", "elif", "isinstance", "(", "alias_format", ",", "dict", ")", ":", "display", "=", "alias_format", ".", "get", "(", "'display'", ")", "representation", "=", "(", "alias_format", ".", "get", "(", "'representation'", ")", "or", "[", "]", ")", "if", "isinstance", "(", "representation", ",", "six", ".", "string_types", ")", ":", "representation", "=", "[", "representation", "]", "else", ":", "raise", "TypeError", "(", "(", "\"alias_format '%s' is neither a dictionary or string type.\"", "%", "repr", "(", "alias_format", ")", ")", ")", "return", "(", "display", ",", "representation", ")" ]
stackstorm action aliases come in two forms; 1 .
train
false
19,782
def cxSimulatedBinary(ind1, ind2, eta): for (i, (x1, x2)) in enumerate(zip(ind1, ind2)): rand = random.random() if (rand <= 0.5): beta = (2.0 * rand) else: beta = (1.0 / (2.0 * (1.0 - rand))) beta **= (1.0 / (eta + 1.0)) ind1[i] = (0.5 * (((1 + beta) * x1) + ((1 - beta) * x2))) ind2[i] = (0.5 * (((1 - beta) * x1) + ((1 + beta) * x2))) return (ind1, ind2)
[ "def", "cxSimulatedBinary", "(", "ind1", ",", "ind2", ",", "eta", ")", ":", "for", "(", "i", ",", "(", "x1", ",", "x2", ")", ")", "in", "enumerate", "(", "zip", "(", "ind1", ",", "ind2", ")", ")", ":", "rand", "=", "random", ".", "random", "(", ")", "if", "(", "rand", "<=", "0.5", ")", ":", "beta", "=", "(", "2.0", "*", "rand", ")", "else", ":", "beta", "=", "(", "1.0", "/", "(", "2.0", "*", "(", "1.0", "-", "rand", ")", ")", ")", "beta", "**=", "(", "1.0", "/", "(", "eta", "+", "1.0", ")", ")", "ind1", "[", "i", "]", "=", "(", "0.5", "*", "(", "(", "(", "1", "+", "beta", ")", "*", "x1", ")", "+", "(", "(", "1", "-", "beta", ")", "*", "x2", ")", ")", ")", "ind2", "[", "i", "]", "=", "(", "0.5", "*", "(", "(", "(", "1", "-", "beta", ")", "*", "x1", ")", "+", "(", "(", "1", "+", "beta", ")", "*", "x2", ")", ")", ")", "return", "(", "ind1", ",", "ind2", ")" ]
executes a simulated binary crossover that modify in-place the input individuals .
train
false
19,783
@pytest.fixture() def fake_proc(monkeypatch, stubs): p = guiprocess.GUIProcess('testprocess') monkeypatch.setattr(p, '_proc', stubs.fake_qprocess()) return p
[ "@", "pytest", ".", "fixture", "(", ")", "def", "fake_proc", "(", "monkeypatch", ",", "stubs", ")", ":", "p", "=", "guiprocess", ".", "GUIProcess", "(", "'testprocess'", ")", "monkeypatch", ".", "setattr", "(", "p", ",", "'_proc'", ",", "stubs", ".", "fake_qprocess", "(", ")", ")", "return", "p" ]
a fixture providing a guiprocess with a mocked qprocess .
train
false
19,784
def exhaustive_dropout_average(mlp, inputs, masked_input_layers=None, default_input_scale=2.0, input_scales=None): if (masked_input_layers is None): masked_input_layers = mlp.layer_names mlp._validate_layer_names(masked_input_layers) if (input_scales is None): input_scales = {} mlp._validate_layer_names(input_scales.keys()) if any(((key not in masked_input_layers) for key in input_scales)): not_in = [key for key in input_scales if (key not in mlp.layer_names)] raise ValueError((', '.join(not_in) + ' in input_scales but not masked')) num_inputs = mlp.get_total_input_dimension(masked_input_layers) outputs = [mlp.masked_fprop(inputs, mask, masked_input_layers, default_input_scale, input_scales) for mask in xrange((2 ** num_inputs))] return geometric_mean_prediction(outputs)
[ "def", "exhaustive_dropout_average", "(", "mlp", ",", "inputs", ",", "masked_input_layers", "=", "None", ",", "default_input_scale", "=", "2.0", ",", "input_scales", "=", "None", ")", ":", "if", "(", "masked_input_layers", "is", "None", ")", ":", "masked_input_layers", "=", "mlp", ".", "layer_names", "mlp", ".", "_validate_layer_names", "(", "masked_input_layers", ")", "if", "(", "input_scales", "is", "None", ")", ":", "input_scales", "=", "{", "}", "mlp", ".", "_validate_layer_names", "(", "input_scales", ".", "keys", "(", ")", ")", "if", "any", "(", "(", "(", "key", "not", "in", "masked_input_layers", ")", "for", "key", "in", "input_scales", ")", ")", ":", "not_in", "=", "[", "key", "for", "key", "in", "input_scales", "if", "(", "key", "not", "in", "mlp", ".", "layer_names", ")", "]", "raise", "ValueError", "(", "(", "', '", ".", "join", "(", "not_in", ")", "+", "' in input_scales but not masked'", ")", ")", "num_inputs", "=", "mlp", ".", "get_total_input_dimension", "(", "masked_input_layers", ")", "outputs", "=", "[", "mlp", ".", "masked_fprop", "(", "inputs", ",", "mask", ",", "masked_input_layers", ",", "default_input_scale", ",", "input_scales", ")", "for", "mask", "in", "xrange", "(", "(", "2", "**", "num_inputs", ")", ")", "]", "return", "geometric_mean_prediction", "(", "outputs", ")" ]
take the geometric mean over all dropout masks of an mlp with softmax outputs .
train
false
19,785
def showWarning(text, parent=None, help='', title='Anki'): return showInfo(text, parent, help, 'warning', title=title)
[ "def", "showWarning", "(", "text", ",", "parent", "=", "None", ",", "help", "=", "''", ",", "title", "=", "'Anki'", ")", ":", "return", "showInfo", "(", "text", ",", "parent", ",", "help", ",", "'warning'", ",", "title", "=", "title", ")" ]
show a small warning with an ok button .
train
false
19,786
def binding_ops(): names = [u'existential', u'universal', u'lambda'] for pair in zip(names, [Tokens.EXISTS, Tokens.ALL, Tokens.LAMBDA]): print((u'%-15s DCTB %s' % pair))
[ "def", "binding_ops", "(", ")", ":", "names", "=", "[", "u'existential'", ",", "u'universal'", ",", "u'lambda'", "]", "for", "pair", "in", "zip", "(", "names", ",", "[", "Tokens", ".", "EXISTS", ",", "Tokens", ".", "ALL", ",", "Tokens", ".", "LAMBDA", "]", ")", ":", "print", "(", "(", "u'%-15s DCTB %s'", "%", "pair", ")", ")" ]
binding operators .
train
false
19,789
def trigger_500(request): raise Exception("Congratulations, you've triggered an exception! Go tell all your friends what an exceptional person you are.")
[ "def", "trigger_500", "(", "request", ")", ":", "raise", "Exception", "(", "\"Congratulations, you've triggered an exception! Go tell all your friends what an exceptional person you are.\"", ")" ]
hot-wired method of triggering a server error to test reporting .
train
false
19,790
def save_exploration_summary(exp_summary): exp_summary_model = exp_models.ExpSummaryModel(id=exp_summary.id, title=exp_summary.title, category=exp_summary.category, objective=exp_summary.objective, language_code=exp_summary.language_code, tags=exp_summary.tags, ratings=exp_summary.ratings, scaled_average_rating=exp_summary.scaled_average_rating, status=exp_summary.status, community_owned=exp_summary.community_owned, owner_ids=exp_summary.owner_ids, editor_ids=exp_summary.editor_ids, viewer_ids=exp_summary.viewer_ids, contributor_ids=exp_summary.contributor_ids, contributors_summary=exp_summary.contributors_summary, version=exp_summary.version, exploration_model_last_updated=exp_summary.exploration_model_last_updated, exploration_model_created_on=exp_summary.exploration_model_created_on, first_published_msec=exp_summary.first_published_msec) exp_summary_model.put()
[ "def", "save_exploration_summary", "(", "exp_summary", ")", ":", "exp_summary_model", "=", "exp_models", ".", "ExpSummaryModel", "(", "id", "=", "exp_summary", ".", "id", ",", "title", "=", "exp_summary", ".", "title", ",", "category", "=", "exp_summary", ".", "category", ",", "objective", "=", "exp_summary", ".", "objective", ",", "language_code", "=", "exp_summary", ".", "language_code", ",", "tags", "=", "exp_summary", ".", "tags", ",", "ratings", "=", "exp_summary", ".", "ratings", ",", "scaled_average_rating", "=", "exp_summary", ".", "scaled_average_rating", ",", "status", "=", "exp_summary", ".", "status", ",", "community_owned", "=", "exp_summary", ".", "community_owned", ",", "owner_ids", "=", "exp_summary", ".", "owner_ids", ",", "editor_ids", "=", "exp_summary", ".", "editor_ids", ",", "viewer_ids", "=", "exp_summary", ".", "viewer_ids", ",", "contributor_ids", "=", "exp_summary", ".", "contributor_ids", ",", "contributors_summary", "=", "exp_summary", ".", "contributors_summary", ",", "version", "=", "exp_summary", ".", "version", ",", "exploration_model_last_updated", "=", "exp_summary", ".", "exploration_model_last_updated", ",", "exploration_model_created_on", "=", "exp_summary", ".", "exploration_model_created_on", ",", "first_published_msec", "=", "exp_summary", ".", "first_published_msec", ")", "exp_summary_model", ".", "put", "(", ")" ]
save an exploration summary domain object as an expsummarymodel entity in the datastore .
train
false
19,791
def imap(requests, stream=False, size=2, exception_handler=None): pool = Pool(size) def send(r): return r.send(stream=stream) for request in pool.imap_unordered(send, requests): if (request.response is not None): (yield request.response) elif exception_handler: exception_handler(request, request.exception) pool.join()
[ "def", "imap", "(", "requests", ",", "stream", "=", "False", ",", "size", "=", "2", ",", "exception_handler", "=", "None", ")", ":", "pool", "=", "Pool", "(", "size", ")", "def", "send", "(", "r", ")", ":", "return", "r", ".", "send", "(", "stream", "=", "stream", ")", "for", "request", "in", "pool", ".", "imap_unordered", "(", "send", ",", "requests", ")", ":", "if", "(", "request", ".", "response", "is", "not", "None", ")", ":", "(", "yield", "request", ".", "response", ")", "elif", "exception_handler", ":", "exception_handler", "(", "request", ",", "request", ".", "exception", ")", "pool", ".", "join", "(", ")" ]
concurrently converts a generator object of requests to a generator of responses .
train
true
19,793
def _onenorm_matrix_power_nnm(A, p): if ((int(p) != p) or (p < 0)): raise ValueError('expected non-negative integer p') p = int(p) if ((len(A.shape) != 2) or (A.shape[0] != A.shape[1])): raise ValueError('expected A to be like a square matrix') v = np.ones((A.shape[0], 1), dtype=float) M = A.T for i in range(p): v = M.dot(v) return max(v)
[ "def", "_onenorm_matrix_power_nnm", "(", "A", ",", "p", ")", ":", "if", "(", "(", "int", "(", "p", ")", "!=", "p", ")", "or", "(", "p", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "'expected non-negative integer p'", ")", "p", "=", "int", "(", "p", ")", "if", "(", "(", "len", "(", "A", ".", "shape", ")", "!=", "2", ")", "or", "(", "A", ".", "shape", "[", "0", "]", "!=", "A", ".", "shape", "[", "1", "]", ")", ")", ":", "raise", "ValueError", "(", "'expected A to be like a square matrix'", ")", "v", "=", "np", ".", "ones", "(", "(", "A", ".", "shape", "[", "0", "]", ",", "1", ")", ",", "dtype", "=", "float", ")", "M", "=", "A", ".", "T", "for", "i", "in", "range", "(", "p", ")", ":", "v", "=", "M", ".", "dot", "(", "v", ")", "return", "max", "(", "v", ")" ]
compute the 1-norm of a non-negative integer power of a non-negative matrix .
train
false
19,794
def get_socket(conf): try: bind_port = int(conf['bind_port']) except (ValueError, KeyError, TypeError): raise ConfigFilePortError() bind_addr = (conf.get('bind_ip', '0.0.0.0'), bind_port) address_family = [addr[0] for addr in socket.getaddrinfo(bind_addr[0], bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM) if (addr[0] in (socket.AF_INET, socket.AF_INET6))][0] sock = None bind_timeout = int(conf.get('bind_timeout', 30)) retry_until = (time.time() + bind_timeout) warn_ssl = False while ((not sock) and (time.time() < retry_until)): try: sock = listen(bind_addr, backlog=int(conf.get('backlog', 4096)), family=address_family) if ('cert_file' in conf): warn_ssl = True sock = ssl.wrap_socket(sock, certfile=conf['cert_file'], keyfile=conf['key_file']) except socket.error as err: if (err.args[0] != errno.EADDRINUSE): raise sleep(0.1) if (not sock): raise Exception((_('Could not bind to %(addr)s:%(port)s after trying for %(timeout)s seconds') % {'addr': bind_addr[0], 'port': bind_addr[1], 'timeout': bind_timeout})) sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if hasattr(socket, 'TCP_KEEPIDLE'): sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 600) if warn_ssl: ssl_warning_message = _('WARNING: SSL should only be enabled for testing purposes. Use external SSL termination for a production deployment.') get_logger(conf).warning(ssl_warning_message) print(ssl_warning_message) return sock
[ "def", "get_socket", "(", "conf", ")", ":", "try", ":", "bind_port", "=", "int", "(", "conf", "[", "'bind_port'", "]", ")", "except", "(", "ValueError", ",", "KeyError", ",", "TypeError", ")", ":", "raise", "ConfigFilePortError", "(", ")", "bind_addr", "=", "(", "conf", ".", "get", "(", "'bind_ip'", ",", "'0.0.0.0'", ")", ",", "bind_port", ")", "address_family", "=", "[", "addr", "[", "0", "]", "for", "addr", "in", "socket", ".", "getaddrinfo", "(", "bind_addr", "[", "0", "]", ",", "bind_addr", "[", "1", "]", ",", "socket", ".", "AF_UNSPEC", ",", "socket", ".", "SOCK_STREAM", ")", "if", "(", "addr", "[", "0", "]", "in", "(", "socket", ".", "AF_INET", ",", "socket", ".", "AF_INET6", ")", ")", "]", "[", "0", "]", "sock", "=", "None", "bind_timeout", "=", "int", "(", "conf", ".", "get", "(", "'bind_timeout'", ",", "30", ")", ")", "retry_until", "=", "(", "time", ".", "time", "(", ")", "+", "bind_timeout", ")", "warn_ssl", "=", "False", "while", "(", "(", "not", "sock", ")", "and", "(", "time", ".", "time", "(", ")", "<", "retry_until", ")", ")", ":", "try", ":", "sock", "=", "listen", "(", "bind_addr", ",", "backlog", "=", "int", "(", "conf", ".", "get", "(", "'backlog'", ",", "4096", ")", ")", ",", "family", "=", "address_family", ")", "if", "(", "'cert_file'", "in", "conf", ")", ":", "warn_ssl", "=", "True", "sock", "=", "ssl", ".", "wrap_socket", "(", "sock", ",", "certfile", "=", "conf", "[", "'cert_file'", "]", ",", "keyfile", "=", "conf", "[", "'key_file'", "]", ")", "except", "socket", ".", "error", "as", "err", ":", "if", "(", "err", ".", "args", "[", "0", "]", "!=", "errno", ".", "EADDRINUSE", ")", ":", "raise", "sleep", "(", "0.1", ")", "if", "(", "not", "sock", ")", ":", "raise", "Exception", "(", "(", "_", "(", "'Could not bind to %(addr)s:%(port)s after trying for %(timeout)s seconds'", ")", "%", "{", "'addr'", ":", "bind_addr", "[", "0", "]", ",", "'port'", ":", "bind_addr", "[", "1", "]", ",", "'timeout'", ":", "bind_timeout", "}", ")", ")", "sock", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_KEEPALIVE", ",", "1", ")", "sock", ".", "setsockopt", "(", "socket", ".", "IPPROTO_TCP", ",", "socket", ".", "TCP_NODELAY", ",", "1", ")", "if", "hasattr", "(", "socket", ",", "'TCP_KEEPIDLE'", ")", ":", "sock", ".", "setsockopt", "(", "socket", ".", "IPPROTO_TCP", ",", "socket", ".", "TCP_KEEPIDLE", ",", "600", ")", "if", "warn_ssl", ":", "ssl_warning_message", "=", "_", "(", "'WARNING: SSL should only be enabled for testing purposes. Use external SSL termination for a production deployment.'", ")", "get_logger", "(", "conf", ")", ".", "warning", "(", "ssl_warning_message", ")", "print", "(", "ssl_warning_message", ")", "return", "sock" ]
return a socket object for the addr ip-version agnostic .
train
false
19,796
def _sys_mgr(): thrift_port = str(__salt__['config.option']('cassandra.THRIFT_PORT')) host = __salt__['config.option']('cassandra.host') return SystemManager('{0}:{1}'.format(host, thrift_port))
[ "def", "_sys_mgr", "(", ")", ":", "thrift_port", "=", "str", "(", "__salt__", "[", "'config.option'", "]", "(", "'cassandra.THRIFT_PORT'", ")", ")", "host", "=", "__salt__", "[", "'config.option'", "]", "(", "'cassandra.host'", ")", "return", "SystemManager", "(", "'{0}:{1}'", ".", "format", "(", "host", ",", "thrift_port", ")", ")" ]
return a pycassa system manager connection object .
train
false
19,797
def arg_encoding(): try: return (locale.getdefaultlocale()[1] or 'utf-8') except ValueError: return 'utf-8'
[ "def", "arg_encoding", "(", ")", ":", "try", ":", "return", "(", "locale", ".", "getdefaultlocale", "(", ")", "[", "1", "]", "or", "'utf-8'", ")", "except", "ValueError", ":", "return", "'utf-8'" ]
get the encoding for command-line arguments .
train
false
19,798
def inputhook_wx2(context): try: app = wx.GetApp() if (app is not None): assert wx.Thread_IsMain() elr = EventLoopRunner() elr.Run(time=10, input_is_ready=context.input_is_ready) except KeyboardInterrupt: pass return 0
[ "def", "inputhook_wx2", "(", "context", ")", ":", "try", ":", "app", "=", "wx", ".", "GetApp", "(", ")", "if", "(", "app", "is", "not", "None", ")", ":", "assert", "wx", ".", "Thread_IsMain", "(", ")", "elr", "=", "EventLoopRunner", "(", ")", "elr", ".", "Run", "(", "time", "=", "10", ",", "input_is_ready", "=", "context", ".", "input_is_ready", ")", "except", "KeyboardInterrupt", ":", "pass", "return", "0" ]
run the wx event loop .
train
true
19,799
def get_idx_list(inputs, idx_list, get_count=False): n = (len(inputs) - 1) if (n == 0): return tuple(idx_list) indices = list(reversed(list(inputs[1:]))) def convert(entry): if isinstance(entry, gof.Type): return indices.pop() elif isinstance(entry, slice): return slice(convert(entry.start), convert(entry.stop), convert(entry.step)) else: return entry cdata = tuple(map(convert, idx_list)) if get_count: return (n - len(indices)) else: return cdata
[ "def", "get_idx_list", "(", "inputs", ",", "idx_list", ",", "get_count", "=", "False", ")", ":", "n", "=", "(", "len", "(", "inputs", ")", "-", "1", ")", "if", "(", "n", "==", "0", ")", ":", "return", "tuple", "(", "idx_list", ")", "indices", "=", "list", "(", "reversed", "(", "list", "(", "inputs", "[", "1", ":", "]", ")", ")", ")", "def", "convert", "(", "entry", ")", ":", "if", "isinstance", "(", "entry", ",", "gof", ".", "Type", ")", ":", "return", "indices", ".", "pop", "(", ")", "elif", "isinstance", "(", "entry", ",", "slice", ")", ":", "return", "slice", "(", "convert", "(", "entry", ".", "start", ")", ",", "convert", "(", "entry", ".", "stop", ")", ",", "convert", "(", "entry", ".", "step", ")", ")", "else", ":", "return", "entry", "cdata", "=", "tuple", "(", "map", "(", "convert", ",", "idx_list", ")", ")", "if", "get_count", ":", "return", "(", "n", "-", "len", "(", "indices", ")", ")", "else", ":", "return", "cdata" ]
given a list of inputs to the subtensor and its idx_list reorders the inputs according to the idx list to get the right values .
train
false
19,800
def allocate_lock(): return LockType()
[ "def", "allocate_lock", "(", ")", ":", "return", "LockType", "(", ")" ]
dummy implementation of _thread .
train
false
19,801
def number_of_isolates(G): return sum((1 for v in isolates(G)))
[ "def", "number_of_isolates", "(", "G", ")", ":", "return", "sum", "(", "(", "1", "for", "v", "in", "isolates", "(", "G", ")", ")", ")" ]
returns the number of isolates in the graph .
train
false
19,802
def check_fields_past_bounds(header, mapping_data, warnings): desc_field = 'Description' correction = 1 try: desc_field_ix = header.index(desc_field) except ValueError: return warnings for curr_row in range(len(mapping_data)): for curr_col in range(len(mapping_data[curr_row])): if (curr_col > desc_field_ix): warnings.append(('Data field ' + ('%s found after Description column DCTB %d,%d' % (mapping_data[curr_row][curr_col].replace('\n', ''), (curr_row + correction), curr_col)))) return warnings
[ "def", "check_fields_past_bounds", "(", "header", ",", "mapping_data", ",", "warnings", ")", ":", "desc_field", "=", "'Description'", "correction", "=", "1", "try", ":", "desc_field_ix", "=", "header", ".", "index", "(", "desc_field", ")", "except", "ValueError", ":", "return", "warnings", "for", "curr_row", "in", "range", "(", "len", "(", "mapping_data", ")", ")", ":", "for", "curr_col", "in", "range", "(", "len", "(", "mapping_data", "[", "curr_row", "]", ")", ")", ":", "if", "(", "curr_col", ">", "desc_field_ix", ")", ":", "warnings", ".", "append", "(", "(", "'Data field '", "+", "(", "'%s found after Description column DCTB %d,%d'", "%", "(", "mapping_data", "[", "curr_row", "]", "[", "curr_col", "]", ".", "replace", "(", "'\\n'", ",", "''", ")", ",", "(", "curr_row", "+", "correction", ")", ",", "curr_col", ")", ")", ")", ")", "return", "warnings" ]
checks for fields after description header .
train
false
19,804
def intcomma(value): orig = force_unicode(value) new = re.sub('^(-?\\d+)(\\d{3})', '\\g<1>,\\g<2>', orig) if (orig == new): return new else: return intcomma(new)
[ "def", "intcomma", "(", "value", ")", ":", "orig", "=", "force_unicode", "(", "value", ")", "new", "=", "re", ".", "sub", "(", "'^(-?\\\\d+)(\\\\d{3})'", ",", "'\\\\g<1>,\\\\g<2>'", ",", "orig", ")", "if", "(", "orig", "==", "new", ")", ":", "return", "new", "else", ":", "return", "intcomma", "(", "new", ")" ]
converts an integer to a string containing commas every three digits .
train
false
19,805
def certs(): return __salt__['file.readdir'](LE_LIVE)[2:]
[ "def", "certs", "(", ")", ":", "return", "__salt__", "[", "'file.readdir'", "]", "(", "LE_LIVE", ")", "[", "2", ":", "]" ]
return a list of active certificates cli example: .
train
false
19,806
def test_simple_box(): box = Box() box.add('test1', [(-1), 2, 3, 3.1, 3.2, 4, 5]) box.add('test2', [2, 3, 5, 6, 6, 4]) box.title = 'Box test' q = box.render_pyquery() assert (len(q('.axis.y')) == 1) assert (len(q('.legend')) == 2) assert (len(q('.plot .series rect')) == 2)
[ "def", "test_simple_box", "(", ")", ":", "box", "=", "Box", "(", ")", "box", ".", "add", "(", "'test1'", ",", "[", "(", "-", "1", ")", ",", "2", ",", "3", ",", "3.1", ",", "3.2", ",", "4", ",", "5", "]", ")", "box", ".", "add", "(", "'test2'", ",", "[", "2", ",", "3", ",", "5", ",", "6", ",", "6", ",", "4", "]", ")", "box", ".", "title", "=", "'Box test'", "q", "=", "box", ".", "render_pyquery", "(", ")", "assert", "(", "len", "(", "q", "(", "'.axis.y'", ")", ")", "==", "1", ")", "assert", "(", "len", "(", "q", "(", "'.legend'", ")", ")", "==", "2", ")", "assert", "(", "len", "(", "q", "(", "'.plot .series rect'", ")", ")", "==", "2", ")" ]
simple box test .
train
false
19,808
def _write_config(config, newlines=2): text = config if (isinstance(config, dict) and (len(list(list(config.keys()))) == 1)): key = next(six.iterkeys(config)) text = config[key] try: with salt.utils.fopen(__SYSLOG_NG_CONFIG_FILE, 'a') as fha: fha.write(text) for _ in range(0, newlines): fha.write(os.linesep) return True except Exception as err: log.error(str(err)) return False
[ "def", "_write_config", "(", "config", ",", "newlines", "=", "2", ")", ":", "text", "=", "config", "if", "(", "isinstance", "(", "config", ",", "dict", ")", "and", "(", "len", "(", "list", "(", "list", "(", "config", ".", "keys", "(", ")", ")", ")", ")", "==", "1", ")", ")", ":", "key", "=", "next", "(", "six", ".", "iterkeys", "(", "config", ")", ")", "text", "=", "config", "[", "key", "]", "try", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "__SYSLOG_NG_CONFIG_FILE", ",", "'a'", ")", "as", "fha", ":", "fha", ".", "write", "(", "text", ")", "for", "_", "in", "range", "(", "0", ",", "newlines", ")", ":", "fha", ".", "write", "(", "os", ".", "linesep", ")", "return", "True", "except", "Exception", "as", "err", ":", "log", ".", "error", "(", "str", "(", "err", ")", ")", "return", "False" ]
writes /usbkey/config .
train
true
19,811
def setTZ(name): if (tzset is None): return if (name is None): try: del environ['TZ'] except KeyError: pass else: environ['TZ'] = name tzset()
[ "def", "setTZ", "(", "name", ")", ":", "if", "(", "tzset", "is", "None", ")", ":", "return", "if", "(", "name", "is", "None", ")", ":", "try", ":", "del", "environ", "[", "'TZ'", "]", "except", "KeyError", ":", "pass", "else", ":", "environ", "[", "'TZ'", "]", "=", "name", "tzset", "(", ")" ]
set time zone .
train
false
19,812
def get_api_url(base, parts=[], params=None): if (('api' in parts) and (parts.index('api') != 0)): parts.pop(parts.index('api')) parts.insert(0, 'api') elif ('api' not in parts): parts.insert(0, 'api') url = util.build_url(base, pathspec=parts, params=params) return url
[ "def", "get_api_url", "(", "base", ",", "parts", "=", "[", "]", ",", "params", "=", "None", ")", ":", "if", "(", "(", "'api'", "in", "parts", ")", "and", "(", "parts", ".", "index", "(", "'api'", ")", "!=", "0", ")", ")", ":", "parts", ".", "pop", "(", "parts", ".", "index", "(", "'api'", ")", ")", "parts", ".", "insert", "(", "0", ",", "'api'", ")", "elif", "(", "'api'", "not", "in", "parts", ")", ":", "parts", ".", "insert", "(", "0", ",", "'api'", ")", "url", "=", "util", ".", "build_url", "(", "base", ",", "pathspec", "=", "parts", ",", "params", "=", "params", ")", "return", "url" ]
compose and return a url for the tool shed api .
train
false
19,813
def get_keyname(): return get_db_info()[':keyname']
[ "def", "get_keyname", "(", ")", ":", "return", "get_db_info", "(", ")", "[", "':keyname'", "]" ]
returns the keyname for this deployment .
train
false
19,814
def degree_histogram(G): counts = Counter((d for (n, d) in G.degree())) return [counts.get(i, 0) for i in range((max(counts) + 1))]
[ "def", "degree_histogram", "(", "G", ")", ":", "counts", "=", "Counter", "(", "(", "d", "for", "(", "n", ",", "d", ")", "in", "G", ".", "degree", "(", ")", ")", ")", "return", "[", "counts", ".", "get", "(", "i", ",", "0", ")", "for", "i", "in", "range", "(", "(", "max", "(", "counts", ")", "+", "1", ")", ")", "]" ]
return a list of the frequency of each degree value .
train
true
19,816
def NR_ioprio_set(): architecture = os.uname()[4] arch_bits = platform.architecture()[0] if ((architecture == 'x86_64') and (arch_bits == '64bit')): return 251 raise OSError(("Swift doesn't support ionice priority for %s %s" % (architecture, arch_bits)))
[ "def", "NR_ioprio_set", "(", ")", ":", "architecture", "=", "os", ".", "uname", "(", ")", "[", "4", "]", "arch_bits", "=", "platform", ".", "architecture", "(", ")", "[", "0", "]", "if", "(", "(", "architecture", "==", "'x86_64'", ")", "and", "(", "arch_bits", "==", "'64bit'", ")", ")", ":", "return", "251", "raise", "OSError", "(", "(", "\"Swift doesn't support ionice priority for %s %s\"", "%", "(", "architecture", ",", "arch_bits", ")", ")", ")" ]
give __nr_ioprio_set value for your system .
train
false
19,817
def get_mixed_stores(mixed_setting): return mixed_setting['default']['OPTIONS']['stores']
[ "def", "get_mixed_stores", "(", "mixed_setting", ")", ":", "return", "mixed_setting", "[", "'default'", "]", "[", "'OPTIONS'", "]", "[", "'stores'", "]" ]
helper for accessing stores in a configuration setting for the mixed modulestore .
train
false
19,818
def format_table(table, extra_space=1): if (not table): return [[]] max_widths = [max([len(str(val)) for val in col]) for col in table] ftable = [] for irow in range(len(table[0])): ftable.append([(str(col[irow]).ljust(max_widths[icol]) + (' ' * extra_space)) for (icol, col) in enumerate(table)]) return ftable
[ "def", "format_table", "(", "table", ",", "extra_space", "=", "1", ")", ":", "if", "(", "not", "table", ")", ":", "return", "[", "[", "]", "]", "max_widths", "=", "[", "max", "(", "[", "len", "(", "str", "(", "val", ")", ")", "for", "val", "in", "col", "]", ")", "for", "col", "in", "table", "]", "ftable", "=", "[", "]", "for", "irow", "in", "range", "(", "len", "(", "table", "[", "0", "]", ")", ")", ":", "ftable", ".", "append", "(", "[", "(", "str", "(", "col", "[", "irow", "]", ")", ".", "ljust", "(", "max_widths", "[", "icol", "]", ")", "+", "(", "' '", "*", "extra_space", ")", ")", "for", "(", "icol", ",", "col", ")", "in", "enumerate", "(", "table", ")", "]", ")", "return", "ftable" ]
note: evennia .
train
false
19,820
def get_array_module(*args): for arg in args: if isinstance(arg, ndarray): return _cupy return numpy
[ "def", "get_array_module", "(", "*", "args", ")", ":", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "ndarray", ")", ":", "return", "_cupy", "return", "numpy" ]
gets an appropriate one from :mod:numpy or :mod:cupy .
train
false
19,821
def args_split(text): for bit in args_split_re.finditer(text): bit = bit.group(0) if ((bit[0] == u'"') and (bit[(-1)] == u'"')): (yield ((u'"' + bit[1:(-1)].replace(u'\\"', u'"').replace(u'\\\\', u'\\')) + u'"')) elif ((bit[0] == u"'") and (bit[(-1)] == u"'")): (yield ((u"'" + bit[1:(-1)].replace(u"\\'", u"'").replace(u'\\\\', u'\\')) + u"'")) else: (yield bit)
[ "def", "args_split", "(", "text", ")", ":", "for", "bit", "in", "args_split_re", ".", "finditer", "(", "text", ")", ":", "bit", "=", "bit", ".", "group", "(", "0", ")", "if", "(", "(", "bit", "[", "0", "]", "==", "u'\"'", ")", "and", "(", "bit", "[", "(", "-", "1", ")", "]", "==", "u'\"'", ")", ")", ":", "(", "yield", "(", "(", "u'\"'", "+", "bit", "[", "1", ":", "(", "-", "1", ")", "]", ".", "replace", "(", "u'\\\\\"'", ",", "u'\"'", ")", ".", "replace", "(", "u'\\\\\\\\'", ",", "u'\\\\'", ")", ")", "+", "u'\"'", ")", ")", "elif", "(", "(", "bit", "[", "0", "]", "==", "u\"'\"", ")", "and", "(", "bit", "[", "(", "-", "1", ")", "]", "==", "u\"'\"", ")", ")", ":", "(", "yield", "(", "(", "u\"'\"", "+", "bit", "[", "1", ":", "(", "-", "1", ")", "]", ".", "replace", "(", "u\"\\\\'\"", ",", "u\"'\"", ")", ".", "replace", "(", "u'\\\\\\\\'", ",", "u'\\\\'", ")", ")", "+", "u\"'\"", ")", ")", "else", ":", "(", "yield", "bit", ")" ]
split space-separated key=value arguments .
train
false
19,824
def code_feed_reader(project, mode='div'): url = ('http://code.google.com/feeds/p/%s/hgchanges/basic' % project) from gluon.contrib import feedparser g = feedparser.parse(url) if (mode == 'div'): html = XML(DIV(UL(*[LI(A(entry['title'], _href=entry['link'], _target='_blank')) for entry in g['entries'][0:5]]), _class='boxInfo', _style='padding-bottom:5px;')) else: html = XML(UL(*[LI(A(entry['title'], _href=entry['link'], _target='_blank')) for entry in g['entries'][0:5]])) return html
[ "def", "code_feed_reader", "(", "project", ",", "mode", "=", "'div'", ")", ":", "url", "=", "(", "'http://code.google.com/feeds/p/%s/hgchanges/basic'", "%", "project", ")", "from", "gluon", ".", "contrib", "import", "feedparser", "g", "=", "feedparser", ".", "parse", "(", "url", ")", "if", "(", "mode", "==", "'div'", ")", ":", "html", "=", "XML", "(", "DIV", "(", "UL", "(", "*", "[", "LI", "(", "A", "(", "entry", "[", "'title'", "]", ",", "_href", "=", "entry", "[", "'link'", "]", ",", "_target", "=", "'_blank'", ")", ")", "for", "entry", "in", "g", "[", "'entries'", "]", "[", "0", ":", "5", "]", "]", ")", ",", "_class", "=", "'boxInfo'", ",", "_style", "=", "'padding-bottom:5px;'", ")", ")", "else", ":", "html", "=", "XML", "(", "UL", "(", "*", "[", "LI", "(", "A", "(", "entry", "[", "'title'", "]", ",", "_href", "=", "entry", "[", "'link'", "]", ",", "_target", "=", "'_blank'", ")", ")", "for", "entry", "in", "g", "[", "'entries'", "]", "[", "0", ":", "5", "]", "]", ")", ")", "return", "html" ]
parse code feeds .
train
false
19,825
def get_default_proxy(): return socksocket.default_proxy
[ "def", "get_default_proxy", "(", ")", ":", "return", "socksocket", ".", "default_proxy" ]
returns the default proxy .
train
false
19,826
def _safe_regex_mysql(raw_string): return raw_string.replace('|', '\\|')
[ "def", "_safe_regex_mysql", "(", "raw_string", ")", ":", "return", "raw_string", ".", "replace", "(", "'|'", ",", "'\\\\|'", ")" ]
make regex safe to mysql .
train
false
19,827
def download_and_unzip_files(source_url, target_parent_dir, zip_root_name, target_root_name): if (not os.path.exists(os.path.join(target_parent_dir, target_root_name))): print ('Downloading and unzipping file %s to %s' % (zip_root_name, target_parent_dir)) common.ensure_directory_exists(target_parent_dir) urllib.urlretrieve(source_url, TMP_UNZIP_PATH) try: with zipfile.ZipFile(TMP_UNZIP_PATH, 'r') as zfile: zfile.extractall(target_parent_dir) os.remove(TMP_UNZIP_PATH) except Exception: if os.path.exists(TMP_UNZIP_PATH): os.remove(TMP_UNZIP_PATH) req = urllib2.Request(source_url) req.add_header('User-agent', 'python') file_stream = StringIO.StringIO(urllib2.urlopen(req).read()) with zipfile.ZipFile(file_stream, 'r') as zfile: zfile.extractall(target_parent_dir) os.rename(os.path.join(target_parent_dir, zip_root_name), os.path.join(target_parent_dir, target_root_name))
[ "def", "download_and_unzip_files", "(", "source_url", ",", "target_parent_dir", ",", "zip_root_name", ",", "target_root_name", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "target_parent_dir", ",", "target_root_name", ")", ")", ")", ":", "print", "(", "'Downloading and unzipping file %s to %s'", "%", "(", "zip_root_name", ",", "target_parent_dir", ")", ")", "common", ".", "ensure_directory_exists", "(", "target_parent_dir", ")", "urllib", ".", "urlretrieve", "(", "source_url", ",", "TMP_UNZIP_PATH", ")", "try", ":", "with", "zipfile", ".", "ZipFile", "(", "TMP_UNZIP_PATH", ",", "'r'", ")", "as", "zfile", ":", "zfile", ".", "extractall", "(", "target_parent_dir", ")", "os", ".", "remove", "(", "TMP_UNZIP_PATH", ")", "except", "Exception", ":", "if", "os", ".", "path", ".", "exists", "(", "TMP_UNZIP_PATH", ")", ":", "os", ".", "remove", "(", "TMP_UNZIP_PATH", ")", "req", "=", "urllib2", ".", "Request", "(", "source_url", ")", "req", ".", "add_header", "(", "'User-agent'", ",", "'python'", ")", "file_stream", "=", "StringIO", ".", "StringIO", "(", "urllib2", ".", "urlopen", "(", "req", ")", ".", "read", "(", ")", ")", "with", "zipfile", ".", "ZipFile", "(", "file_stream", ",", "'r'", ")", "as", "zfile", ":", "zfile", ".", "extractall", "(", "target_parent_dir", ")", "os", ".", "rename", "(", "os", ".", "path", ".", "join", "(", "target_parent_dir", ",", "zip_root_name", ")", ",", "os", ".", "path", ".", "join", "(", "target_parent_dir", ",", "target_root_name", ")", ")" ]
downloads a zip file .
train
false