id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
4,047
@core_helper def redirect_to(*args, **kw): if are_there_flash_messages(): kw['__no_cache__'] = True uargs = map((lambda arg: (str(arg) if isinstance(arg, unicode) else arg)), args) _url = url_for(*uargs, **kw) if _url.startswith('/'): _url = str((config['ckan.site_url'].rstrip('/') + _url)) if is_flask_request(): return _flask_redirect(_url) else: return _routes_redirect_to(_url)
[ "@", "core_helper", "def", "redirect_to", "(", "*", "args", ",", "**", "kw", ")", ":", "if", "are_there_flash_messages", "(", ")", ":", "kw", "[", "'__no_cache__'", "]", "=", "True", "uargs", "=", "map", "(", "(", "lambda", "arg", ":", "(", "str", "(", "arg", ")", "if", "isinstance", "(", "arg", ",", "unicode", ")", "else", "arg", ")", ")", ",", "args", ")", "_url", "=", "url_for", "(", "*", "uargs", ",", "**", "kw", ")", "if", "_url", ".", "startswith", "(", "'/'", ")", ":", "_url", "=", "str", "(", "(", "config", "[", "'ckan.site_url'", "]", ".", "rstrip", "(", "'/'", ")", "+", "_url", ")", ")", "if", "is_flask_request", "(", ")", ":", "return", "_flask_redirect", "(", "_url", ")", "else", ":", "return", "_routes_redirect_to", "(", "_url", ")" ]
convenience function mixing :func:redirect and :func:uri_for .
train
false
4,048
def get_favorite_info(user, content_object): result = {} url_content_type = type(content_object).__name__.lower() result['add_url'] = reverse('add_favorite_{}'.format(url_content_type), args=[content_object.pk]) existing_favorite = models.Favorite.objects.favorite_for_user_and_content_object(user, content_object) if existing_favorite: result['has_favorite'] = 'true' result['delete_url'] = reverse('delete_favorite', args=[existing_favorite.pk]) else: result['has_favorite'] = 'false' return result
[ "def", "get_favorite_info", "(", "user", ",", "content_object", ")", ":", "result", "=", "{", "}", "url_content_type", "=", "type", "(", "content_object", ")", ".", "__name__", ".", "lower", "(", ")", "result", "[", "'add_url'", "]", "=", "reverse", "(", "'add_favorite_{}'", ".", "format", "(", "url_content_type", ")", ",", "args", "=", "[", "content_object", ".", "pk", "]", ")", "existing_favorite", "=", "models", ".", "Favorite", ".", "objects", ".", "favorite_for_user_and_content_object", "(", "user", ",", "content_object", ")", "if", "existing_favorite", ":", "result", "[", "'has_favorite'", "]", "=", "'true'", "result", "[", "'delete_url'", "]", "=", "reverse", "(", "'delete_favorite'", ",", "args", "=", "[", "existing_favorite", ".", "pk", "]", ")", "else", ":", "result", "[", "'has_favorite'", "]", "=", "'false'", "return", "result" ]
return favorite info dict containing: a .
train
false
4,049
def has_no_date(at): if isinstance(at, datetime): return False return ((at.year is None) and (at.month is None) and (at.day is None))
[ "def", "has_no_date", "(", "at", ")", ":", "if", "isinstance", "(", "at", ",", "datetime", ")", ":", "return", "False", "return", "(", "(", "at", ".", "year", "is", "None", ")", "and", "(", "at", ".", "month", "is", "None", ")", "and", "(", "at", ".", "day", "is", "None", ")", ")" ]
returns true if the given object is an adatetime where year .
train
false
4,050
def pc(key): return ''.join([token.capitalize() for token in key.split('_')])
[ "def", "pc", "(", "key", ")", ":", "return", "''", ".", "join", "(", "[", "token", ".", "capitalize", "(", ")", "for", "token", "in", "key", ".", "split", "(", "'_'", ")", "]", ")" ]
changes python key into pascale case equivalent .
train
false
4,051
@treeio_login_required def ajax_contact_lookup(request, response_format='html'): contacts = [] if (request.GET and ('term' in request.GET)): user = request.user.profile contacts = Object.filter_permitted(user, Contact.objects, mode='x').filter(Q(name__icontains=request.GET['term']))[:10] return render_to_response('identities/ajax_contact_lookup', {'contacts': contacts}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "def", "ajax_contact_lookup", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "contacts", "=", "[", "]", "if", "(", "request", ".", "GET", "and", "(", "'term'", "in", "request", ".", "GET", ")", ")", ":", "user", "=", "request", ".", "user", ".", "profile", "contacts", "=", "Object", ".", "filter_permitted", "(", "user", ",", "Contact", ".", "objects", ",", "mode", "=", "'x'", ")", ".", "filter", "(", "Q", "(", "name__icontains", "=", "request", ".", "GET", "[", "'term'", "]", ")", ")", "[", ":", "10", "]", "return", "render_to_response", "(", "'identities/ajax_contact_lookup'", ",", "{", "'contacts'", ":", "contacts", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
returns a list of matching contacts .
train
false
4,052
def dir(suffix='', prefix='tmp', parent=None): return tempfile.mkdtemp(suffix, prefix, parent)
[ "def", "dir", "(", "suffix", "=", "''", ",", "prefix", "=", "'tmp'", ",", "parent", "=", "None", ")", ":", "return", "tempfile", ".", "mkdtemp", "(", "suffix", ",", "prefix", ",", "parent", ")" ]
a version of dir() that supports netproxies .
train
false
4,053
def subscription_check(): subscriptions = Subscription.objects.all() for subscription in subscriptions: subscription.check_status()
[ "def", "subscription_check", "(", ")", ":", "subscriptions", "=", "Subscription", ".", "objects", ".", "all", "(", ")", "for", "subscription", "in", "subscriptions", ":", "subscription", ".", "check_status", "(", ")" ]
automatically depreciate assets as per their depreciation rate .
train
false
4,054
def make_psycopg_green(): if (not hasattr(extensions, 'set_wait_callback')): raise ImportError(('support for coroutines not available in this Psycopg version (%s)' % psycopg2.__version__)) extensions.set_wait_callback(gevent_wait_callback)
[ "def", "make_psycopg_green", "(", ")", ":", "if", "(", "not", "hasattr", "(", "extensions", ",", "'set_wait_callback'", ")", ")", ":", "raise", "ImportError", "(", "(", "'support for coroutines not available in this Psycopg version (%s)'", "%", "psycopg2", ".", "__version__", ")", ")", "extensions", ".", "set_wait_callback", "(", "gevent_wait_callback", ")" ]
configure psycopg to be used with gevent in non-blocking way .
train
false
4,055
def crop_async(image_data, left_x, top_y, right_x, bottom_y, output_encoding=PNG, quality=None, correct_orientation=UNCHANGED_ORIENTATION, rpc=None, transparent_substitution_rgb=None): image = Image(image_data) image.crop(left_x, top_y, right_x, bottom_y) image.set_correct_orientation(correct_orientation) return image.execute_transforms_async(output_encoding=output_encoding, quality=quality, rpc=rpc, transparent_substitution_rgb=transparent_substitution_rgb)
[ "def", "crop_async", "(", "image_data", ",", "left_x", ",", "top_y", ",", "right_x", ",", "bottom_y", ",", "output_encoding", "=", "PNG", ",", "quality", "=", "None", ",", "correct_orientation", "=", "UNCHANGED_ORIENTATION", ",", "rpc", "=", "None", ",", "transparent_substitution_rgb", "=", "None", ")", ":", "image", "=", "Image", "(", "image_data", ")", "image", ".", "crop", "(", "left_x", ",", "top_y", ",", "right_x", ",", "bottom_y", ")", "image", ".", "set_correct_orientation", "(", "correct_orientation", ")", "return", "image", ".", "execute_transforms_async", "(", "output_encoding", "=", "output_encoding", ",", "quality", "=", "quality", ",", "rpc", "=", "rpc", ",", "transparent_substitution_rgb", "=", "transparent_substitution_rgb", ")" ]
crop the given image - async version .
train
false
4,056
def _setwindowsize(folder_alias, (w, h)): finder = _getfinder() args = {} attrs = {} _code = 'core' _subcode = 'setd' aevar00 = [w, h] aeobj_0 = aetypes.ObjectSpecifier(want=aetypes.Type('cfol'), form='alis', seld=folder_alias, fr=None) aeobj_1 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='prop', seld=aetypes.Type('cwnd'), fr=aeobj_0) aeobj_2 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='prop', seld=aetypes.Type('ptsz'), fr=aeobj_1) args['----'] = aeobj_2 args['data'] = aevar00 (_reply, args, attrs) = finder.send(_code, _subcode, args, attrs) if ('errn' in args): raise Error, aetools.decodeerror(args) return (w, h)
[ "def", "_setwindowsize", "(", "folder_alias", ",", "(", "w", ",", "h", ")", ")", ":", "finder", "=", "_getfinder", "(", ")", "args", "=", "{", "}", "attrs", "=", "{", "}", "_code", "=", "'core'", "_subcode", "=", "'setd'", "aevar00", "=", "[", "w", ",", "h", "]", "aeobj_0", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'cfol'", ")", ",", "form", "=", "'alis'", ",", "seld", "=", "folder_alias", ",", "fr", "=", "None", ")", "aeobj_1", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'prop'", ")", ",", "form", "=", "'prop'", ",", "seld", "=", "aetypes", ".", "Type", "(", "'cwnd'", ")", ",", "fr", "=", "aeobj_0", ")", "aeobj_2", "=", "aetypes", ".", "ObjectSpecifier", "(", "want", "=", "aetypes", ".", "Type", "(", "'prop'", ")", ",", "form", "=", "'prop'", ",", "seld", "=", "aetypes", ".", "Type", "(", "'ptsz'", ")", ",", "fr", "=", "aeobj_1", ")", "args", "[", "'----'", "]", "=", "aeobj_2", "args", "[", "'data'", "]", "=", "aevar00", "(", "_reply", ",", "args", ",", "attrs", ")", "=", "finder", ".", "send", "(", "_code", ",", "_subcode", ",", "args", ",", "attrs", ")", "if", "(", "'errn'", "in", "args", ")", ":", "raise", "Error", ",", "aetools", ".", "decodeerror", "(", "args", ")", "return", "(", "w", ",", "h", ")" ]
set the size of a finder window for folder to .
train
false
4,057
def hash_napiprojekt(video_path): readsize = ((1024 * 1024) * 10) with open(video_path, 'rb') as f: data = f.read(readsize) return hashlib.md5(data).hexdigest()
[ "def", "hash_napiprojekt", "(", "video_path", ")", ":", "readsize", "=", "(", "(", "1024", "*", "1024", ")", "*", "10", ")", "with", "open", "(", "video_path", ",", "'rb'", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", "readsize", ")", "return", "hashlib", ".", "md5", "(", "data", ")", ".", "hexdigest", "(", ")" ]
compute a hash using napiprojekts algorithm .
train
true
4,059
def _get_gcp_ansible_credentials(module): service_account_email = module.params.get('service_account_email', None) credentials_file = (module.params.get('pem_file', None) or module.params.get('credentials_file', None)) project_id = module.params.get('project_id', None) return (service_account_email, credentials_file, project_id)
[ "def", "_get_gcp_ansible_credentials", "(", "module", ")", ":", "service_account_email", "=", "module", ".", "params", ".", "get", "(", "'service_account_email'", ",", "None", ")", "credentials_file", "=", "(", "module", ".", "params", ".", "get", "(", "'pem_file'", ",", "None", ")", "or", "module", ".", "params", ".", "get", "(", "'credentials_file'", ",", "None", ")", ")", "project_id", "=", "module", ".", "params", ".", "get", "(", "'project_id'", ",", "None", ")", "return", "(", "service_account_email", ",", "credentials_file", ",", "project_id", ")" ]
helper to fetch creds from ansiblemodule object .
train
false
4,061
def _get_current_task(): return current_task
[ "def", "_get_current_task", "(", ")", ":", "return", "current_task" ]
stub to make it easier to test without actually running celery .
train
false
4,062
def replace_static_urls(text, data_directory=None, course_id=None, static_asset_path=''): def replace_static_url(original, prefix, quote, rest): '\n Replace a single matched url.\n ' if rest.endswith('?raw'): return original if (settings.DEBUG and finders.find(rest, True)): return original elif ((not static_asset_path) and course_id): exists_in_staticfiles_storage = False try: exists_in_staticfiles_storage = staticfiles_storage.exists(rest) except Exception as err: log.warning("staticfiles_storage couldn't find path {0}: {1}".format(rest, str(err))) if exists_in_staticfiles_storage: url = staticfiles_storage.url(rest) else: base_url = AssetBaseUrlConfig.get_base_url() excluded_exts = AssetExcludedExtensionsConfig.get_excluded_extensions() url = StaticContent.get_canonicalized_asset_path(course_id, rest, base_url, excluded_exts) if (AssetLocator.CANONICAL_NAMESPACE in url): url = url.replace('block@', 'block/', 1) else: course_path = '/'.join(((static_asset_path or data_directory), rest)) try: if staticfiles_storage.exists(rest): url = staticfiles_storage.url(rest) else: url = staticfiles_storage.url(course_path) except Exception as err: log.warning("staticfiles_storage couldn't find path {0}: {1}".format(rest, str(err))) url = ''.join([prefix, course_path]) return ''.join([quote, url, quote]) return process_static_urls(text, replace_static_url, data_dir=(static_asset_path or data_directory))
[ "def", "replace_static_urls", "(", "text", ",", "data_directory", "=", "None", ",", "course_id", "=", "None", ",", "static_asset_path", "=", "''", ")", ":", "def", "replace_static_url", "(", "original", ",", "prefix", ",", "quote", ",", "rest", ")", ":", "if", "rest", ".", "endswith", "(", "'?raw'", ")", ":", "return", "original", "if", "(", "settings", ".", "DEBUG", "and", "finders", ".", "find", "(", "rest", ",", "True", ")", ")", ":", "return", "original", "elif", "(", "(", "not", "static_asset_path", ")", "and", "course_id", ")", ":", "exists_in_staticfiles_storage", "=", "False", "try", ":", "exists_in_staticfiles_storage", "=", "staticfiles_storage", ".", "exists", "(", "rest", ")", "except", "Exception", "as", "err", ":", "log", ".", "warning", "(", "\"staticfiles_storage couldn't find path {0}: {1}\"", ".", "format", "(", "rest", ",", "str", "(", "err", ")", ")", ")", "if", "exists_in_staticfiles_storage", ":", "url", "=", "staticfiles_storage", ".", "url", "(", "rest", ")", "else", ":", "base_url", "=", "AssetBaseUrlConfig", ".", "get_base_url", "(", ")", "excluded_exts", "=", "AssetExcludedExtensionsConfig", ".", "get_excluded_extensions", "(", ")", "url", "=", "StaticContent", ".", "get_canonicalized_asset_path", "(", "course_id", ",", "rest", ",", "base_url", ",", "excluded_exts", ")", "if", "(", "AssetLocator", ".", "CANONICAL_NAMESPACE", "in", "url", ")", ":", "url", "=", "url", ".", "replace", "(", "'block@'", ",", "'block/'", ",", "1", ")", "else", ":", "course_path", "=", "'/'", ".", "join", "(", "(", "(", "static_asset_path", "or", "data_directory", ")", ",", "rest", ")", ")", "try", ":", "if", "staticfiles_storage", ".", "exists", "(", "rest", ")", ":", "url", "=", "staticfiles_storage", ".", "url", "(", "rest", ")", "else", ":", "url", "=", "staticfiles_storage", ".", "url", "(", "course_path", ")", "except", "Exception", "as", "err", ":", "log", ".", "warning", "(", "\"staticfiles_storage couldn't find path {0}: {1}\"", ".", "format", "(", "rest", ",", "str", "(", "err", ")", ")", ")", "url", "=", "''", ".", "join", "(", "[", "prefix", ",", "course_path", "]", ")", "return", "''", ".", "join", "(", "[", "quote", ",", "url", ",", "quote", "]", ")", "return", "process_static_urls", "(", "text", ",", "replace_static_url", ",", "data_dir", "=", "(", "static_asset_path", "or", "data_directory", ")", ")" ]
updates the supplied module with a new get_html function that wraps the old get_html function and substitutes urls of the form /static/ .
train
false
4,063
def mp_icon(filename): try: import pkg_resources name = __name__ if (name == '__main__'): name = 'MAVProxy.modules.mavproxy_map.mp_tile' stream = pkg_resources.resource_stream(name, ('data/%s' % filename)).read() raw = np.fromstring(stream, dtype=np.uint8) except Exception: stream = open(os.path.join(__file__, 'data', filename)).read() raw = np.fromstring(stream, dtype=np.uint8) img = cv2.imdecode(raw, cv2.IMREAD_COLOR) return img
[ "def", "mp_icon", "(", "filename", ")", ":", "try", ":", "import", "pkg_resources", "name", "=", "__name__", "if", "(", "name", "==", "'__main__'", ")", ":", "name", "=", "'MAVProxy.modules.mavproxy_map.mp_tile'", "stream", "=", "pkg_resources", ".", "resource_stream", "(", "name", ",", "(", "'data/%s'", "%", "filename", ")", ")", ".", "read", "(", ")", "raw", "=", "np", ".", "fromstring", "(", "stream", ",", "dtype", "=", "np", ".", "uint8", ")", "except", "Exception", ":", "stream", "=", "open", "(", "os", ".", "path", ".", "join", "(", "__file__", ",", "'data'", ",", "filename", ")", ")", ".", "read", "(", ")", "raw", "=", "np", ".", "fromstring", "(", "stream", ",", "dtype", "=", "np", ".", "uint8", ")", "img", "=", "cv2", ".", "imdecode", "(", "raw", ",", "cv2", ".", "IMREAD_COLOR", ")", "return", "img" ]
load an icon from the data directory .
train
true
4,064
def getSettingString(lines, procedureName, settingNameStart): settingNameStart = settingNameStart.replace(' ', '_') for line in lines: splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line) firstWord = None if (len(splitLine) > 0): firstWord = splitLine[0] if (firstWord == '(<setting>'): if (len(splitLine) > 4): if ((splitLine[1] == procedureName) and splitLine[2].startswith(settingNameStart)): return splitLine[3] elif (firstWord == '(</settings>)'): return None return None
[ "def", "getSettingString", "(", "lines", ",", "procedureName", ",", "settingNameStart", ")", ":", "settingNameStart", "=", "settingNameStart", ".", "replace", "(", "' '", ",", "'_'", ")", "for", "line", "in", "lines", ":", "splitLine", "=", "gcodec", ".", "getSplitLineBeforeBracketSemicolon", "(", "line", ")", "firstWord", "=", "None", "if", "(", "len", "(", "splitLine", ")", ">", "0", ")", ":", "firstWord", "=", "splitLine", "[", "0", "]", "if", "(", "firstWord", "==", "'(<setting>'", ")", ":", "if", "(", "len", "(", "splitLine", ")", ">", "4", ")", ":", "if", "(", "(", "splitLine", "[", "1", "]", "==", "procedureName", ")", "and", "splitLine", "[", "2", "]", ".", "startswith", "(", "settingNameStart", ")", ")", ":", "return", "splitLine", "[", "3", "]", "elif", "(", "firstWord", "==", "'(</settings>)'", ")", ":", "return", "None", "return", "None" ]
get the setting value from the lines .
train
false
4,065
def find_pylintrc(): if exists('pylintrc'): return abspath('pylintrc') if isfile('__init__.py'): curdir = abspath(os.getcwd()) while isfile(join(curdir, '__init__.py')): curdir = abspath(join(curdir, '..')) if isfile(join(curdir, 'pylintrc')): return join(curdir, 'pylintrc') if (('PYLINTRC' in os.environ) and exists(os.environ['PYLINTRC'])): pylintrc = os.environ['PYLINTRC'] else: user_home = expanduser('~') if ((user_home == '~') or (user_home == '/root')): pylintrc = '.pylintrc' else: pylintrc = join(user_home, '.pylintrc') if (not isfile(pylintrc)): pylintrc = join(user_home, '.config', 'pylintrc') if (not isfile(pylintrc)): if isfile('/etc/pylintrc'): pylintrc = '/etc/pylintrc' else: pylintrc = None return pylintrc
[ "def", "find_pylintrc", "(", ")", ":", "if", "exists", "(", "'pylintrc'", ")", ":", "return", "abspath", "(", "'pylintrc'", ")", "if", "isfile", "(", "'__init__.py'", ")", ":", "curdir", "=", "abspath", "(", "os", ".", "getcwd", "(", ")", ")", "while", "isfile", "(", "join", "(", "curdir", ",", "'__init__.py'", ")", ")", ":", "curdir", "=", "abspath", "(", "join", "(", "curdir", ",", "'..'", ")", ")", "if", "isfile", "(", "join", "(", "curdir", ",", "'pylintrc'", ")", ")", ":", "return", "join", "(", "curdir", ",", "'pylintrc'", ")", "if", "(", "(", "'PYLINTRC'", "in", "os", ".", "environ", ")", "and", "exists", "(", "os", ".", "environ", "[", "'PYLINTRC'", "]", ")", ")", ":", "pylintrc", "=", "os", ".", "environ", "[", "'PYLINTRC'", "]", "else", ":", "user_home", "=", "expanduser", "(", "'~'", ")", "if", "(", "(", "user_home", "==", "'~'", ")", "or", "(", "user_home", "==", "'/root'", ")", ")", ":", "pylintrc", "=", "'.pylintrc'", "else", ":", "pylintrc", "=", "join", "(", "user_home", ",", "'.pylintrc'", ")", "if", "(", "not", "isfile", "(", "pylintrc", ")", ")", ":", "pylintrc", "=", "join", "(", "user_home", ",", "'.config'", ",", "'pylintrc'", ")", "if", "(", "not", "isfile", "(", "pylintrc", ")", ")", ":", "if", "isfile", "(", "'/etc/pylintrc'", ")", ":", "pylintrc", "=", "'/etc/pylintrc'", "else", ":", "pylintrc", "=", "None", "return", "pylintrc" ]
search the pylint rc file and return its path if it find it .
train
false
4,066
@task @needs('pavelib.prereqs.install_coverage_prereqs') @cmdopts([('compare-branch=', 'b', 'Branch to compare against, defaults to origin/master'), ('compare_branch=', None, 'deprecated in favor of compare-branch')], share_with=['coverage']) @timed def diff_coverage(options): compare_branch = options.get('compare_branch', 'origin/master') xml_reports = [] for filepath in Env.REPORT_DIR.walk(): if bool(re.match('^coverage.*\\.xml$', filepath.basename())): xml_reports.append(filepath) if (not xml_reports): err_msg = colorize('red', 'No coverage info found. Run `paver test` before running `paver coverage`.\n') sys.stderr.write(err_msg) else: xml_report_str = ' '.join(xml_reports) diff_html_path = os.path.join(Env.REPORT_DIR, 'diff_coverage_combined.html') sh('diff-cover {xml_report_str} --compare-branch={compare_branch} --html-report {diff_html_path}'.format(xml_report_str=xml_report_str, compare_branch=compare_branch, diff_html_path=diff_html_path)) print '\n'
[ "@", "task", "@", "needs", "(", "'pavelib.prereqs.install_coverage_prereqs'", ")", "@", "cmdopts", "(", "[", "(", "'compare-branch='", ",", "'b'", ",", "'Branch to compare against, defaults to origin/master'", ")", ",", "(", "'compare_branch='", ",", "None", ",", "'deprecated in favor of compare-branch'", ")", "]", ",", "share_with", "=", "[", "'coverage'", "]", ")", "@", "timed", "def", "diff_coverage", "(", "options", ")", ":", "compare_branch", "=", "options", ".", "get", "(", "'compare_branch'", ",", "'origin/master'", ")", "xml_reports", "=", "[", "]", "for", "filepath", "in", "Env", ".", "REPORT_DIR", ".", "walk", "(", ")", ":", "if", "bool", "(", "re", ".", "match", "(", "'^coverage.*\\\\.xml$'", ",", "filepath", ".", "basename", "(", ")", ")", ")", ":", "xml_reports", ".", "append", "(", "filepath", ")", "if", "(", "not", "xml_reports", ")", ":", "err_msg", "=", "colorize", "(", "'red'", ",", "'No coverage info found. Run `paver test` before running `paver coverage`.\\n'", ")", "sys", ".", "stderr", ".", "write", "(", "err_msg", ")", "else", ":", "xml_report_str", "=", "' '", ".", "join", "(", "xml_reports", ")", "diff_html_path", "=", "os", ".", "path", ".", "join", "(", "Env", ".", "REPORT_DIR", ",", "'diff_coverage_combined.html'", ")", "sh", "(", "'diff-cover {xml_report_str} --compare-branch={compare_branch} --html-report {diff_html_path}'", ".", "format", "(", "xml_report_str", "=", "xml_report_str", ",", "compare_branch", "=", "compare_branch", ",", "diff_html_path", "=", "diff_html_path", ")", ")", "print", "'\\n'" ]
build the diff coverage reports .
train
false
4,067
def save_json(filename, data): mode = u'w' if (sys.version_info[0] < 3): mode = u'wb' with open(filename, mode) as fp: json.dump(data, fp, sort_keys=True, indent=4)
[ "def", "save_json", "(", "filename", ",", "data", ")", ":", "mode", "=", "u'w'", "if", "(", "sys", ".", "version_info", "[", "0", "]", "<", "3", ")", ":", "mode", "=", "u'wb'", "with", "open", "(", "filename", ",", "mode", ")", "as", "fp", ":", "json", ".", "dump", "(", "data", ",", "fp", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ")" ]
save data to a json file parameters filename : str filename to save data in .
train
false
4,068
def sys_encode(thing): if isinstance(thing, unicode_str): return thing.encode(ENCODING) return thing
[ "def", "sys_encode", "(", "thing", ")", ":", "if", "isinstance", "(", "thing", ",", "unicode_str", ")", ":", "return", "thing", ".", "encode", "(", "ENCODING", ")", "return", "thing" ]
return bytes encoded in the systems encoding .
train
false
4,069
def _api_queue_rename(output, value, kwargs): value2 = kwargs.get('value2') value3 = kwargs.get('value3') if (value and value2): ret = NzbQueue.do.change_name(value, special_fixer(value2), special_fixer(value3)) return report(output, keyword='', data={'status': ret}) else: return report(output, _MSG_NO_VALUE2)
[ "def", "_api_queue_rename", "(", "output", ",", "value", ",", "kwargs", ")", ":", "value2", "=", "kwargs", ".", "get", "(", "'value2'", ")", "value3", "=", "kwargs", ".", "get", "(", "'value3'", ")", "if", "(", "value", "and", "value2", ")", ":", "ret", "=", "NzbQueue", ".", "do", ".", "change_name", "(", "value", ",", "special_fixer", "(", "value2", ")", ",", "special_fixer", "(", "value3", ")", ")", "return", "report", "(", "output", ",", "keyword", "=", "''", ",", "data", "=", "{", "'status'", ":", "ret", "}", ")", "else", ":", "return", "report", "(", "output", ",", "_MSG_NO_VALUE2", ")" ]
api: accepts output .
train
false
4,070
@skipif((not is_installed('networkx'))) def test_ncut_stable_subgraph(): img = np.zeros((100, 100, 3), dtype='uint8') labels = np.zeros((100, 100), dtype='uint8') labels[...] = 0 labels[:50, :50] = 1 labels[:50, 50:] = 2 rag = graph.rag_mean_color(img, labels, mode='similarity') new_labels = graph.cut_normalized(labels, rag, in_place=False) (new_labels, _, _) = segmentation.relabel_sequential(new_labels) assert (new_labels.max() == 0)
[ "@", "skipif", "(", "(", "not", "is_installed", "(", "'networkx'", ")", ")", ")", "def", "test_ncut_stable_subgraph", "(", ")", ":", "img", "=", "np", ".", "zeros", "(", "(", "100", ",", "100", ",", "3", ")", ",", "dtype", "=", "'uint8'", ")", "labels", "=", "np", ".", "zeros", "(", "(", "100", ",", "100", ")", ",", "dtype", "=", "'uint8'", ")", "labels", "[", "...", "]", "=", "0", "labels", "[", ":", "50", ",", ":", "50", "]", "=", "1", "labels", "[", ":", "50", ",", "50", ":", "]", "=", "2", "rag", "=", "graph", ".", "rag_mean_color", "(", "img", ",", "labels", ",", "mode", "=", "'similarity'", ")", "new_labels", "=", "graph", ".", "cut_normalized", "(", "labels", ",", "rag", ",", "in_place", "=", "False", ")", "(", "new_labels", ",", "_", ",", "_", ")", "=", "segmentation", ".", "relabel_sequential", "(", "new_labels", ")", "assert", "(", "new_labels", ".", "max", "(", ")", "==", "0", ")" ]
test to catch an error thrown when subgraph has all equal edges .
train
false
4,071
def system_hibernate(): logging.info('Performing system hybernation') if sabnzbd.WIN32: powersup.win_hibernate() elif DARWIN: powersup.osx_hibernate() else: powersup.linux_hibernate()
[ "def", "system_hibernate", "(", ")", ":", "logging", ".", "info", "(", "'Performing system hybernation'", ")", "if", "sabnzbd", ".", "WIN32", ":", "powersup", ".", "win_hibernate", "(", ")", "elif", "DARWIN", ":", "powersup", ".", "osx_hibernate", "(", ")", "else", ":", "powersup", ".", "linux_hibernate", "(", ")" ]
hibernate system .
train
false
4,072
def munge_catalog_program(catalog_program): return {'id': catalog_program['uuid'], 'name': catalog_program['title'], 'subtitle': catalog_program['subtitle'], 'category': catalog_program['type'], 'marketing_slug': catalog_program['marketing_slug'], 'organizations': [{'display_name': organization['name'], 'key': organization['key']} for organization in catalog_program['authoring_organizations']], 'course_codes': [{'display_name': course['title'], 'key': course['key'], 'organization': ({'display_name': course['owners'][0]['name'], 'key': course['owners'][0]['key']} if course['owners'] else {}), 'run_modes': [{'course_key': run['key'], 'run_key': CourseKey.from_string(run['key']).run, 'mode_slug': 'verified'} for run in course['course_runs']]} for course in catalog_program['courses']], 'banner_image_urls': {'w1440h480': catalog_program['banner_image']['large']['url'], 'w726h242': catalog_program['banner_image']['medium']['url'], 'w435h145': catalog_program['banner_image']['small']['url'], 'w348h116': catalog_program['banner_image']['x-small']['url']}}
[ "def", "munge_catalog_program", "(", "catalog_program", ")", ":", "return", "{", "'id'", ":", "catalog_program", "[", "'uuid'", "]", ",", "'name'", ":", "catalog_program", "[", "'title'", "]", ",", "'subtitle'", ":", "catalog_program", "[", "'subtitle'", "]", ",", "'category'", ":", "catalog_program", "[", "'type'", "]", ",", "'marketing_slug'", ":", "catalog_program", "[", "'marketing_slug'", "]", ",", "'organizations'", ":", "[", "{", "'display_name'", ":", "organization", "[", "'name'", "]", ",", "'key'", ":", "organization", "[", "'key'", "]", "}", "for", "organization", "in", "catalog_program", "[", "'authoring_organizations'", "]", "]", ",", "'course_codes'", ":", "[", "{", "'display_name'", ":", "course", "[", "'title'", "]", ",", "'key'", ":", "course", "[", "'key'", "]", ",", "'organization'", ":", "(", "{", "'display_name'", ":", "course", "[", "'owners'", "]", "[", "0", "]", "[", "'name'", "]", ",", "'key'", ":", "course", "[", "'owners'", "]", "[", "0", "]", "[", "'key'", "]", "}", "if", "course", "[", "'owners'", "]", "else", "{", "}", ")", ",", "'run_modes'", ":", "[", "{", "'course_key'", ":", "run", "[", "'key'", "]", ",", "'run_key'", ":", "CourseKey", ".", "from_string", "(", "run", "[", "'key'", "]", ")", ".", "run", ",", "'mode_slug'", ":", "'verified'", "}", "for", "run", "in", "course", "[", "'course_runs'", "]", "]", "}", "for", "course", "in", "catalog_program", "[", "'courses'", "]", "]", ",", "'banner_image_urls'", ":", "{", "'w1440h480'", ":", "catalog_program", "[", "'banner_image'", "]", "[", "'large'", "]", "[", "'url'", "]", ",", "'w726h242'", ":", "catalog_program", "[", "'banner_image'", "]", "[", "'medium'", "]", "[", "'url'", "]", ",", "'w435h145'", ":", "catalog_program", "[", "'banner_image'", "]", "[", "'small'", "]", "[", "'url'", "]", ",", "'w348h116'", ":", "catalog_program", "[", "'banner_image'", "]", "[", "'x-small'", "]", "[", "'url'", "]", "}", "}" ]
make a program from the catalog service look like it came from the programs service .
train
false
4,074
def log_output(stream): for chunk in stream: logger.debug(chunk) if (u'"error"' in chunk.lower()): raise docker.errors.DockerException(chunk)
[ "def", "log_output", "(", "stream", ")", ":", "for", "chunk", "in", "stream", ":", "logger", ".", "debug", "(", "chunk", ")", "if", "(", "u'\"error\"'", "in", "chunk", ".", "lower", "(", ")", ")", ":", "raise", "docker", ".", "errors", ".", "DockerException", "(", "chunk", ")" ]
log a stream at debug level .
train
false
4,075
def test_labeller(): assert (labeller(2) == ['q_1', 'q_0']) assert (labeller(3, 'j') == ['j_2', 'j_1', 'j_0'])
[ "def", "test_labeller", "(", ")", ":", "assert", "(", "labeller", "(", "2", ")", "==", "[", "'q_1'", ",", "'q_0'", "]", ")", "assert", "(", "labeller", "(", "3", ",", "'j'", ")", "==", "[", "'j_2'", ",", "'j_1'", ",", "'j_0'", "]", ")" ]
test the labeller utility .
train
false
4,076
def started(): if (not is_started()): start('shorewall')
[ "def", "started", "(", ")", ":", "if", "(", "not", "is_started", "(", ")", ")", ":", "start", "(", "'shorewall'", ")" ]
require a service to be started .
train
false
4,077
def res_json(res, jsontype='JSON', exception=PluginError): try: jsondata = res.json() except ValueError as err: if (len(res.text) > 35): snippet = (res.text[:35] + '...') else: snippet = res.text raise exception('Unable to parse {0}: {1} ({2})'.format(jsontype, err, snippet)) return jsondata
[ "def", "res_json", "(", "res", ",", "jsontype", "=", "'JSON'", ",", "exception", "=", "PluginError", ")", ":", "try", ":", "jsondata", "=", "res", ".", "json", "(", ")", "except", "ValueError", "as", "err", ":", "if", "(", "len", "(", "res", ".", "text", ")", ">", "35", ")", ":", "snippet", "=", "(", "res", ".", "text", "[", ":", "35", "]", "+", "'...'", ")", "else", ":", "snippet", "=", "res", ".", "text", "raise", "exception", "(", "'Unable to parse {0}: {1} ({2})'", ".", "format", "(", "jsontype", ",", "err", ",", "snippet", ")", ")", "return", "jsondata" ]
this function is deprecated .
train
false
4,078
def setAttributeDictionaryMatrixToMatrix(matrix4X4, xmlElement): setAttributeDictionaryToMatrix(xmlElement.attributeDictionary, matrix4X4) if (xmlElement.object != None): xmlElement.object.matrix4X4 = matrix4X4
[ "def", "setAttributeDictionaryMatrixToMatrix", "(", "matrix4X4", ",", "xmlElement", ")", ":", "setAttributeDictionaryToMatrix", "(", "xmlElement", ".", "attributeDictionary", ",", "matrix4X4", ")", "if", "(", "xmlElement", ".", "object", "!=", "None", ")", ":", "xmlElement", ".", "object", ".", "matrix4X4", "=", "matrix4X4" ]
set the element attribute dictionary and element matrix to the matrix .
train
false
4,079
def _load_result(response, ret): if (response['code'] is None): ret['comment'] = response['content'] elif (response['code'] == 401): ret['comment'] = '401 Forbidden: Authentication required!' elif (response['code'] == 404): ret['comment'] = response['content']['message'] elif (response['code'] == 200): ret['result'] = True ret['comment'] = 'Listing Current Configuration Only. Not action or changes occurred during the execution of this state.' ret['changes'] = response['content'] else: ret['comment'] = response['content']['message'] return ret
[ "def", "_load_result", "(", "response", ",", "ret", ")", ":", "if", "(", "response", "[", "'code'", "]", "is", "None", ")", ":", "ret", "[", "'comment'", "]", "=", "response", "[", "'content'", "]", "elif", "(", "response", "[", "'code'", "]", "==", "401", ")", ":", "ret", "[", "'comment'", "]", "=", "'401 Forbidden: Authentication required!'", "elif", "(", "response", "[", "'code'", "]", "==", "404", ")", ":", "ret", "[", "'comment'", "]", "=", "response", "[", "'content'", "]", "[", "'message'", "]", "elif", "(", "response", "[", "'code'", "]", "==", "200", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Listing Current Configuration Only. Not action or changes occurred during the execution of this state.'", "ret", "[", "'changes'", "]", "=", "response", "[", "'content'", "]", "else", ":", "ret", "[", "'comment'", "]", "=", "response", "[", "'content'", "]", "[", "'message'", "]", "return", "ret" ]
format the results of listing functions .
train
true
4,081
def channel_shift(x, intensity, is_random=False, channel_index=2): if is_random: factor = np.random.uniform((- intensity), intensity) else: factor = intensity x = np.rollaxis(x, channel_index, 0) (min_x, max_x) = (np.min(x), np.max(x)) channel_images = [np.clip((x_channel + factor), min_x, max_x) for x_channel in x] x = np.stack(channel_images, axis=0) x = np.rollaxis(x, 0, (channel_index + 1)) return x
[ "def", "channel_shift", "(", "x", ",", "intensity", ",", "is_random", "=", "False", ",", "channel_index", "=", "2", ")", ":", "if", "is_random", ":", "factor", "=", "np", ".", "random", ".", "uniform", "(", "(", "-", "intensity", ")", ",", "intensity", ")", "else", ":", "factor", "=", "intensity", "x", "=", "np", ".", "rollaxis", "(", "x", ",", "channel_index", ",", "0", ")", "(", "min_x", ",", "max_x", ")", "=", "(", "np", ".", "min", "(", "x", ")", ",", "np", ".", "max", "(", "x", ")", ")", "channel_images", "=", "[", "np", ".", "clip", "(", "(", "x_channel", "+", "factor", ")", ",", "min_x", ",", "max_x", ")", "for", "x_channel", "in", "x", "]", "x", "=", "np", ".", "stack", "(", "channel_images", ",", "axis", "=", "0", ")", "x", "=", "np", ".", "rollaxis", "(", "x", ",", "0", ",", "(", "channel_index", "+", "1", ")", ")", "return", "x" ]
shift the channels of an image .
train
true
4,082
def get_request_state(request_id): with _request_states_lock: return _request_states[request_id]
[ "def", "get_request_state", "(", "request_id", ")", ":", "with", "_request_states_lock", ":", "return", "_request_states", "[", "request_id", "]" ]
returns the requeststate for the provided request id .
train
false
4,083
def make_wiki(): from simplewiki import SimpleWiki database_uri = os.environ.get('SIMPLEWIKI_DATABASE_URI') return SimpleWiki((database_uri or 'sqlite:////tmp/simplewiki.db'))
[ "def", "make_wiki", "(", ")", ":", "from", "simplewiki", "import", "SimpleWiki", "database_uri", "=", "os", ".", "environ", ".", "get", "(", "'SIMPLEWIKI_DATABASE_URI'", ")", "return", "SimpleWiki", "(", "(", "database_uri", "or", "'sqlite:////tmp/simplewiki.db'", ")", ")" ]
helper function that creates a new wiki instance .
train
false
4,084
def check_arguments_for_rescoring(usage_key): descriptor = modulestore().get_item(usage_key) if ((not hasattr(descriptor, 'module_class')) or (not hasattr(descriptor.module_class, 'rescore_problem'))): msg = 'Specified module does not support rescoring.' raise NotImplementedError(msg)
[ "def", "check_arguments_for_rescoring", "(", "usage_key", ")", ":", "descriptor", "=", "modulestore", "(", ")", ".", "get_item", "(", "usage_key", ")", "if", "(", "(", "not", "hasattr", "(", "descriptor", ",", "'module_class'", ")", ")", "or", "(", "not", "hasattr", "(", "descriptor", ".", "module_class", ",", "'rescore_problem'", ")", ")", ")", ":", "msg", "=", "'Specified module does not support rescoring.'", "raise", "NotImplementedError", "(", "msg", ")" ]
do simple checks on the descriptor to confirm that it supports rescoring .
train
false
4,085
def get_logging_level(): levels_dict = {10: 'DEBUG', 20: 'INFO', 30: 'WARNING', 40: 'ERROR', 50: 'CRITICAL'} print 'The current logging level is:', levels_dict[logger.getEffectiveLevel()]
[ "def", "get_logging_level", "(", ")", ":", "levels_dict", "=", "{", "10", ":", "'DEBUG'", ",", "20", ":", "'INFO'", ",", "30", ":", "'WARNING'", ",", "40", ":", "'ERROR'", ",", "50", ":", "'CRITICAL'", "}", "print", "'The current logging level is:'", ",", "levels_dict", "[", "logger", ".", "getEffectiveLevel", "(", ")", "]" ]
this function prints the current logging level of the main logger .
train
false
4,086
def closeSerial(): snap.closeSerial()
[ "def", "closeSerial", "(", ")", ":", "snap", ".", "closeSerial", "(", ")" ]
close serial port for snap reprap communications .
train
false
4,087
def include_library(libname): if exclude_list: if (exclude_list.search(libname) and (not include_list.search(libname))): return False else: return True else: return True
[ "def", "include_library", "(", "libname", ")", ":", "if", "exclude_list", ":", "if", "(", "exclude_list", ".", "search", "(", "libname", ")", "and", "(", "not", "include_list", ".", "search", "(", "libname", ")", ")", ")", ":", "return", "False", "else", ":", "return", "True", "else", ":", "return", "True" ]
check if a dynamic library should be included with application or not .
train
true
4,088
def login_as_admin(context, admin_name='admin', admin_pass='abc123'): if (not User.objects.filter(username=admin_name)): class ContextWithMixin(CreateAdminMixin, ): def __init__(self): self.browser = context.browser context_wm = ContextWithMixin() context_wm.create_admin(username=admin_name, password=admin_pass) _login_user(context, admin_name, admin_pass)
[ "def", "login_as_admin", "(", "context", ",", "admin_name", "=", "'admin'", ",", "admin_pass", "=", "'abc123'", ")", ":", "if", "(", "not", "User", ".", "objects", ".", "filter", "(", "username", "=", "admin_name", ")", ")", ":", "class", "ContextWithMixin", "(", "CreateAdminMixin", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "browser", "=", "context", ".", "browser", "context_wm", "=", "ContextWithMixin", "(", ")", "context_wm", ".", "create_admin", "(", "username", "=", "admin_name", ",", "password", "=", "admin_pass", ")", "_login_user", "(", "context", ",", "admin_name", ",", "admin_pass", ")" ]
log in as an admin specified by the optional arguments .
train
false
4,089
@register.simple_tag def no_params(): return 'no_params - Expected result'
[ "@", "register", ".", "simple_tag", "def", "no_params", "(", ")", ":", "return", "'no_params - Expected result'" ]
expected no_params __doc__ .
train
false
4,091
def test_parse_login(): args = ('username', 'password', '') expected_output = {'runhandler': '_login_handler', 'args': {'username': 'username', 'password': 'password', 'submit': False}} actual_output = screenshot._parse_login(*args) assert_equal(expected_output, actual_output) args = ('username', 'password', 'submit') expected_output = {'runhandler': '_login_handler', 'args': {'username': 'username', 'password': 'password', 'submit': True}} actual_output = screenshot._parse_login(*args) assert_equal(expected_output, actual_output)
[ "def", "test_parse_login", "(", ")", ":", "args", "=", "(", "'username'", ",", "'password'", ",", "''", ")", "expected_output", "=", "{", "'runhandler'", ":", "'_login_handler'", ",", "'args'", ":", "{", "'username'", ":", "'username'", ",", "'password'", ":", "'password'", ",", "'submit'", ":", "False", "}", "}", "actual_output", "=", "screenshot", ".", "_parse_login", "(", "*", "args", ")", "assert_equal", "(", "expected_output", ",", "actual_output", ")", "args", "=", "(", "'username'", ",", "'password'", ",", "'submit'", ")", "expected_output", "=", "{", "'runhandler'", ":", "'_login_handler'", ",", "'args'", ":", "{", "'username'", ":", "'username'", ",", "'password'", ":", "'password'", ",", "'submit'", ":", "True", "}", "}", "actual_output", "=", "screenshot", ".", "_parse_login", "(", "*", "args", ")", "assert_equal", "(", "expected_output", ",", "actual_output", ")" ]
test screenshot .
train
false
4,093
def is_valid_dot_atom(value): return (isinstance(value, six.string_types) and (not (value[0] == '.')) and (not (value[(-1)] == '.')) and set(value).issubset(valid_dot_atom_characters))
[ "def", "is_valid_dot_atom", "(", "value", ")", ":", "return", "(", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", "and", "(", "not", "(", "value", "[", "0", "]", "==", "'.'", ")", ")", "and", "(", "not", "(", "value", "[", "(", "-", "1", ")", "]", "==", "'.'", ")", ")", "and", "set", "(", "value", ")", ".", "issubset", "(", "valid_dot_atom_characters", ")", ")" ]
validate an input string as an rfc 2822 dot-atom-text value .
train
false
4,094
def does_not_modify_errors_dict(validator): def call_and_assert(key, data, errors, context=None): if (context is None): context = {} original_data = copy.deepcopy(data) original_errors = copy.deepcopy(errors) original_context = copy.deepcopy(context) result = validator(key, data, errors, context=context) assert (errors == original_errors), 'Should not modify errors dict when called with key: {key}, data: {data}, errors: {errors}, context: {context}'.format(key=key, data=original_data, errors=original_errors, context=original_context) return result return call_and_assert
[ "def", "does_not_modify_errors_dict", "(", "validator", ")", ":", "def", "call_and_assert", "(", "key", ",", "data", ",", "errors", ",", "context", "=", "None", ")", ":", "if", "(", "context", "is", "None", ")", ":", "context", "=", "{", "}", "original_data", "=", "copy", ".", "deepcopy", "(", "data", ")", "original_errors", "=", "copy", ".", "deepcopy", "(", "errors", ")", "original_context", "=", "copy", ".", "deepcopy", "(", "context", ")", "result", "=", "validator", "(", "key", ",", "data", ",", "errors", ",", "context", "=", "context", ")", "assert", "(", "errors", "==", "original_errors", ")", ",", "'Should not modify errors dict when called with key: {key}, data: {data}, errors: {errors}, context: {context}'", ".", "format", "(", "key", "=", "key", ",", "data", "=", "original_data", ",", "errors", "=", "original_errors", ",", "context", "=", "original_context", ")", "return", "result", "return", "call_and_assert" ]
a decorator that asserts that the decorated validator doesnt modify its errors dict param .
train
false
4,095
def test_show_with_files_from_wheel(script, data): wheel_file = data.packages.join('simple.dist-0.1-py2.py3-none-any.whl') script.pip('install', '--no-index', wheel_file) result = script.pip('show', '-f', 'simple.dist') lines = result.stdout.splitlines() assert ('Name: simple.dist' in lines) assert ('Cannot locate installed-files.txt' not in lines[6]), lines[6] assert re.search('Files:\\n( .+\\n)+', result.stdout)
[ "def", "test_show_with_files_from_wheel", "(", "script", ",", "data", ")", ":", "wheel_file", "=", "data", ".", "packages", ".", "join", "(", "'simple.dist-0.1-py2.py3-none-any.whl'", ")", "script", ".", "pip", "(", "'install'", ",", "'--no-index'", ",", "wheel_file", ")", "result", "=", "script", ".", "pip", "(", "'show'", ",", "'-f'", ",", "'simple.dist'", ")", "lines", "=", "result", ".", "stdout", ".", "splitlines", "(", ")", "assert", "(", "'Name: simple.dist'", "in", "lines", ")", "assert", "(", "'Cannot locate installed-files.txt'", "not", "in", "lines", "[", "6", "]", ")", ",", "lines", "[", "6", "]", "assert", "re", ".", "search", "(", "'Files:\\\\n( .+\\\\n)+'", ",", "result", ".", "stdout", ")" ]
test that a wheels files can be listed .
train
false
4,096
def get_vmdk_path_and_adapter_type(hardware_devices): if (hardware_devices.__class__.__name__ == 'ArrayOfVirtualDevice'): hardware_devices = hardware_devices.VirtualDevice vmdk_file_path = None vmdk_controler_key = None disk_type = None unit_number = 0 adapter_type_dict = {} for device in hardware_devices: if (device.__class__.__name__ == 'VirtualDisk'): if (device.backing.__class__.__name__ == 'VirtualDiskFlatVer2BackingInfo'): vmdk_file_path = device.backing.fileName vmdk_controler_key = device.controllerKey if getattr(device.backing, 'thinProvisioned', False): disk_type = 'thin' elif getattr(device.backing, 'eagerlyScrub', False): disk_type = 'eagerZeroedThick' else: disk_type = 'preallocated' if (device.unitNumber > unit_number): unit_number = device.unitNumber elif (device.__class__.__name__ == 'VirtualLsiLogicController'): adapter_type_dict[device.key] = 'lsiLogic' elif (device.__class__.__name__ == 'VirtualBusLogicController'): adapter_type_dict[device.key] = 'busLogic' elif (device.__class__.__name__ == 'VirtualIDEController'): adapter_type_dict[device.key] = 'ide' elif (device.__class__.__name__ == 'VirtualLsiLogicSASController'): adapter_type_dict[device.key] = 'lsiLogic' adapter_type = adapter_type_dict.get(vmdk_controler_key, '') return (vmdk_file_path, vmdk_controler_key, adapter_type, disk_type, unit_number)
[ "def", "get_vmdk_path_and_adapter_type", "(", "hardware_devices", ")", ":", "if", "(", "hardware_devices", ".", "__class__", ".", "__name__", "==", "'ArrayOfVirtualDevice'", ")", ":", "hardware_devices", "=", "hardware_devices", ".", "VirtualDevice", "vmdk_file_path", "=", "None", "vmdk_controler_key", "=", "None", "disk_type", "=", "None", "unit_number", "=", "0", "adapter_type_dict", "=", "{", "}", "for", "device", "in", "hardware_devices", ":", "if", "(", "device", ".", "__class__", ".", "__name__", "==", "'VirtualDisk'", ")", ":", "if", "(", "device", ".", "backing", ".", "__class__", ".", "__name__", "==", "'VirtualDiskFlatVer2BackingInfo'", ")", ":", "vmdk_file_path", "=", "device", ".", "backing", ".", "fileName", "vmdk_controler_key", "=", "device", ".", "controllerKey", "if", "getattr", "(", "device", ".", "backing", ",", "'thinProvisioned'", ",", "False", ")", ":", "disk_type", "=", "'thin'", "elif", "getattr", "(", "device", ".", "backing", ",", "'eagerlyScrub'", ",", "False", ")", ":", "disk_type", "=", "'eagerZeroedThick'", "else", ":", "disk_type", "=", "'preallocated'", "if", "(", "device", ".", "unitNumber", ">", "unit_number", ")", ":", "unit_number", "=", "device", ".", "unitNumber", "elif", "(", "device", ".", "__class__", ".", "__name__", "==", "'VirtualLsiLogicController'", ")", ":", "adapter_type_dict", "[", "device", ".", "key", "]", "=", "'lsiLogic'", "elif", "(", "device", ".", "__class__", ".", "__name__", "==", "'VirtualBusLogicController'", ")", ":", "adapter_type_dict", "[", "device", ".", "key", "]", "=", "'busLogic'", "elif", "(", "device", ".", "__class__", ".", "__name__", "==", "'VirtualIDEController'", ")", ":", "adapter_type_dict", "[", "device", ".", "key", "]", "=", "'ide'", "elif", "(", "device", ".", "__class__", ".", "__name__", "==", "'VirtualLsiLogicSASController'", ")", ":", "adapter_type_dict", "[", "device", ".", "key", "]", "=", "'lsiLogic'", "adapter_type", "=", "adapter_type_dict", ".", "get", "(", "vmdk_controler_key", ",", "''", ")", "return", "(", "vmdk_file_path", ",", "vmdk_controler_key", ",", "adapter_type", ",", "disk_type", ",", "unit_number", ")" ]
gets the vmdk file path and the storage adapter type .
train
false
4,097
def _num_cpus_windows(): return os.environ.get('NUMBER_OF_PROCESSORS')
[ "def", "_num_cpus_windows", "(", ")", ":", "return", "os", ".", "environ", ".", "get", "(", "'NUMBER_OF_PROCESSORS'", ")" ]
return the number of active cpus on a windows system .
train
false
4,098
def make_tensor(dim): raise NotImplementedError('TODO: implement this function.')
[ "def", "make_tensor", "(", "dim", ")", ":", "raise", "NotImplementedError", "(", "'TODO: implement this function.'", ")" ]
returns a new theano tensor with no broadcastable dimensions .
train
false
4,099
def filesystem_absent(name, force=False, recursive=False): return _absent(name, 'filesystem', force, recursive)
[ "def", "filesystem_absent", "(", "name", ",", "force", "=", "False", ",", "recursive", "=", "False", ")", ":", "return", "_absent", "(", "name", ",", "'filesystem'", ",", "force", ",", "recursive", ")" ]
ensure filesystem is absent on the system name : string name of filesystem force : boolean try harder to destroy the dataset recursive : boolean also destroy all the child datasets .
train
false
4,100
def cib_create(cibfile, scope='configuration', extra_args=None): cmd = ['pcs', 'cluster', 'cib', cibfile] if isinstance(scope, six.string_types): cmd += ['scope={0}'.format(scope)] if isinstance(extra_args, (list, tuple)): cmd += extra_args return __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False)
[ "def", "cib_create", "(", "cibfile", ",", "scope", "=", "'configuration'", ",", "extra_args", "=", "None", ")", ":", "cmd", "=", "[", "'pcs'", ",", "'cluster'", ",", "'cib'", ",", "cibfile", "]", "if", "isinstance", "(", "scope", ",", "six", ".", "string_types", ")", ":", "cmd", "+=", "[", "'scope={0}'", ".", "format", "(", "scope", ")", "]", "if", "isinstance", "(", "extra_args", ",", "(", "list", ",", "tuple", ")", ")", ":", "cmd", "+=", "extra_args", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "output_loglevel", "=", "'trace'", ",", "python_shell", "=", "False", ")" ]
create a cib-file from the current cib of the cluster cibfile name/path of the file containing the cib scope specific section of the cib extra_args additional options for creating the cib-file cli example: .
train
true
4,101
def _get_home(): try: if ((not PY3min) and (sys.platform == 'win32')): path = os.path.expanduser('~').decode(sys.getfilesystemencoding()) else: path = os.path.expanduser('~') except ImportError: pass else: if os.path.isdir(path): return path for evar in ('HOME', 'USERPROFILE', 'TMP'): path = os.environ.get(evar) if ((path is not None) and os.path.isdir(path)): return path return None
[ "def", "_get_home", "(", ")", ":", "try", ":", "if", "(", "(", "not", "PY3min", ")", "and", "(", "sys", ".", "platform", "==", "'win32'", ")", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ".", "decode", "(", "sys", ".", "getfilesystemencoding", "(", ")", ")", "else", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "except", "ImportError", ":", "pass", "else", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "path", "for", "evar", "in", "(", "'HOME'", ",", "'USERPROFILE'", ",", "'TMP'", ")", ":", "path", "=", "os", ".", "environ", ".", "get", "(", "evar", ")", "if", "(", "(", "path", "is", "not", "None", ")", "and", "os", ".", "path", ".", "isdir", "(", "path", ")", ")", ":", "return", "path", "return", "None" ]
find users home directory if possible .
train
false
4,102
def get_public_task_data(request, task_id): try: (task, state, info) = get_task_data(task_id) except TaskNotFound: raise TaskNoPermission(task_id) if (not hasattr(task, 'check_permission')): raise TaskNoPermission(task_id) context = info.get('context', {}) if (not task.check_permission(request, state, context)): raise TaskNoPermission(task_id) public_name = task.public_name return (public_name, state, info.get('public_data', {}), info.get('error', None))
[ "def", "get_public_task_data", "(", "request", ",", "task_id", ")", ":", "try", ":", "(", "task", ",", "state", ",", "info", ")", "=", "get_task_data", "(", "task_id", ")", "except", "TaskNotFound", ":", "raise", "TaskNoPermission", "(", "task_id", ")", "if", "(", "not", "hasattr", "(", "task", ",", "'check_permission'", ")", ")", ":", "raise", "TaskNoPermission", "(", "task_id", ")", "context", "=", "info", ".", "get", "(", "'context'", ",", "{", "}", ")", "if", "(", "not", "task", ".", "check_permission", "(", "request", ",", "state", ",", "context", ")", ")", ":", "raise", "TaskNoPermission", "(", "task_id", ")", "public_name", "=", "task", ".", "public_name", "return", "(", "public_name", ",", "state", ",", "info", ".", "get", "(", "'public_data'", ",", "{", "}", ")", ",", "info", ".", "get", "(", "'error'", ",", "None", ")", ")" ]
return task details as tuple will raise tasknopermission if request has no permission to access info of the task with id task_id .
train
false
4,103
def generate_gjrgarch(nobs, ar, ma, mu=1.0, scale=0.1, varinnovation=None): if (varinnovation is None): eta = (scale * np.random.randn(nobs)) else: eta = varinnovation etax = np.empty((nobs, 3)) etax[:, 0] = mu etax[:, 1:] = (eta ** 2)[:, None] etax[((eta > 0), 2)] = 0 h = miso_lfilter(ar, ma, etax)[0] err = (np.sqrt(h[:len(eta)]) * eta) return (err, h, etax)
[ "def", "generate_gjrgarch", "(", "nobs", ",", "ar", ",", "ma", ",", "mu", "=", "1.0", ",", "scale", "=", "0.1", ",", "varinnovation", "=", "None", ")", ":", "if", "(", "varinnovation", "is", "None", ")", ":", "eta", "=", "(", "scale", "*", "np", ".", "random", ".", "randn", "(", "nobs", ")", ")", "else", ":", "eta", "=", "varinnovation", "etax", "=", "np", ".", "empty", "(", "(", "nobs", ",", "3", ")", ")", "etax", "[", ":", ",", "0", "]", "=", "mu", "etax", "[", ":", ",", "1", ":", "]", "=", "(", "eta", "**", "2", ")", "[", ":", ",", "None", "]", "etax", "[", "(", "(", "eta", ">", "0", ")", ",", "2", ")", "]", "=", "0", "h", "=", "miso_lfilter", "(", "ar", ",", "ma", ",", "etax", ")", "[", "0", "]", "err", "=", "(", "np", ".", "sqrt", "(", "h", "[", ":", "len", "(", "eta", ")", "]", ")", "*", "eta", ")", "return", "(", "err", ",", "h", ",", "etax", ")" ]
simulate gjr garch process parameters ar : array_like .
train
false
4,104
def _check_X(X, n_components=None, n_features=None): X = check_array(X, dtype=[np.float64, np.float32]) if ((n_components is not None) and (X.shape[0] < n_components)): raise ValueError(('Expected n_samples >= n_components but got n_components = %d, n_samples = %d' % (n_components, X.shape[0]))) if ((n_features is not None) and (X.shape[1] != n_features)): raise ValueError(('Expected the input data X have %d features, but got %d features' % (n_features, X.shape[1]))) return X
[ "def", "_check_X", "(", "X", ",", "n_components", "=", "None", ",", "n_features", "=", "None", ")", ":", "X", "=", "check_array", "(", "X", ",", "dtype", "=", "[", "np", ".", "float64", ",", "np", ".", "float32", "]", ")", "if", "(", "(", "n_components", "is", "not", "None", ")", "and", "(", "X", ".", "shape", "[", "0", "]", "<", "n_components", ")", ")", ":", "raise", "ValueError", "(", "(", "'Expected n_samples >= n_components but got n_components = %d, n_samples = %d'", "%", "(", "n_components", ",", "X", ".", "shape", "[", "0", "]", ")", ")", ")", "if", "(", "(", "n_features", "is", "not", "None", ")", "and", "(", "X", ".", "shape", "[", "1", "]", "!=", "n_features", ")", ")", ":", "raise", "ValueError", "(", "(", "'Expected the input data X have %d features, but got %d features'", "%", "(", "n_features", ",", "X", ".", "shape", "[", "1", "]", ")", ")", ")", "return", "X" ]
check the input data x .
train
false
4,105
def test_nested_iteration(): t = table.Table([[0, 1]], names=['a']) out = [] for r1 in t: for r2 in t: out.append((r1['a'], r2['a'])) assert (out == [(0, 0), (0, 1), (1, 0), (1, 1)])
[ "def", "test_nested_iteration", "(", ")", ":", "t", "=", "table", ".", "Table", "(", "[", "[", "0", ",", "1", "]", "]", ",", "names", "=", "[", "'a'", "]", ")", "out", "=", "[", "]", "for", "r1", "in", "t", ":", "for", "r2", "in", "t", ":", "out", ".", "append", "(", "(", "r1", "[", "'a'", "]", ",", "r2", "[", "'a'", "]", ")", ")", "assert", "(", "out", "==", "[", "(", "0", ",", "0", ")", ",", "(", "0", ",", "1", ")", ",", "(", "1", ",", "0", ")", ",", "(", "1", ",", "1", ")", "]", ")" ]
regression test for issue 3358 where nested iteration over a single table fails .
train
false
4,108
def get_class_alias(klass_or_alias): if isinstance(klass_or_alias, python.str_types): try: return CLASS_CACHE[klass_or_alias] except KeyError: return load_class(klass_or_alias) try: return CLASS_CACHE[klass_or_alias] except KeyError: raise UnknownClassAlias(('Unknown alias for %r' % (klass_or_alias,)))
[ "def", "get_class_alias", "(", "klass_or_alias", ")", ":", "if", "isinstance", "(", "klass_or_alias", ",", "python", ".", "str_types", ")", ":", "try", ":", "return", "CLASS_CACHE", "[", "klass_or_alias", "]", "except", "KeyError", ":", "return", "load_class", "(", "klass_or_alias", ")", "try", ":", "return", "CLASS_CACHE", "[", "klass_or_alias", "]", "except", "KeyError", ":", "raise", "UnknownClassAlias", "(", "(", "'Unknown alias for %r'", "%", "(", "klass_or_alias", ",", ")", ")", ")" ]
tries to find a suitable l{pyamf .
train
true
4,109
@contextfilter def group_by_letter(context, object_list): res = {} for x in object_list: r = re.search('^[a-zA-Z]', x.name) if r: key = r.group().lower() if (key not in res): res[key] = [x] else: res[key].append(x) n = re.search('^[0-9_]', x.name) if n: if ('#' not in res): res['#'] = [x] else: res['#'].append(x) if ((not n) and (not r)): if ('#' not in res): res['#'] = [x] else: res['#'].append(x) l = [] for (k, v) in res.items(): l.append((k, v)) l.sort(cmp=(lambda x, y: cmp(x, y))) return l
[ "@", "contextfilter", "def", "group_by_letter", "(", "context", ",", "object_list", ")", ":", "res", "=", "{", "}", "for", "x", "in", "object_list", ":", "r", "=", "re", ".", "search", "(", "'^[a-zA-Z]'", ",", "x", ".", "name", ")", "if", "r", ":", "key", "=", "r", ".", "group", "(", ")", ".", "lower", "(", ")", "if", "(", "key", "not", "in", "res", ")", ":", "res", "[", "key", "]", "=", "[", "x", "]", "else", ":", "res", "[", "key", "]", ".", "append", "(", "x", ")", "n", "=", "re", ".", "search", "(", "'^[0-9_]'", ",", "x", ".", "name", ")", "if", "n", ":", "if", "(", "'#'", "not", "in", "res", ")", ":", "res", "[", "'#'", "]", "=", "[", "x", "]", "else", ":", "res", "[", "'#'", "]", ".", "append", "(", "x", ")", "if", "(", "(", "not", "n", ")", "and", "(", "not", "r", ")", ")", ":", "if", "(", "'#'", "not", "in", "res", ")", ":", "res", "[", "'#'", "]", "=", "[", "x", "]", "else", ":", "res", "[", "'#'", "]", ".", "append", "(", "x", ")", "l", "=", "[", "]", "for", "(", "k", ",", "v", ")", "in", "res", ".", "items", "(", ")", ":", "l", ".", "append", "(", "(", "k", ",", "v", ")", ")", "l", ".", "sort", "(", "cmp", "=", "(", "lambda", "x", ",", "y", ":", "cmp", "(", "x", ",", "y", ")", ")", ")", "return", "l" ]
group contacts by letter .
train
false
4,110
def get_non_generated_file_lines(): lines_to_copy = [] flag_found = False with open('./plotly/graph_objs/graph_objs.py', 'r') as f: for line_to_copy in f: if line_to_copy.startswith(FLAG): flag_found = True break lines_to_copy.append(line_to_copy) if (not flag_found): raise ValueError('Failed to find flag:\n"{}"\nin graph_objs_tools.py.'.format(FLAG)) return lines_to_copy
[ "def", "get_non_generated_file_lines", "(", ")", ":", "lines_to_copy", "=", "[", "]", "flag_found", "=", "False", "with", "open", "(", "'./plotly/graph_objs/graph_objs.py'", ",", "'r'", ")", "as", "f", ":", "for", "line_to_copy", "in", "f", ":", "if", "line_to_copy", ".", "startswith", "(", "FLAG", ")", ":", "flag_found", "=", "True", "break", "lines_to_copy", ".", "append", "(", "line_to_copy", ")", "if", "(", "not", "flag_found", ")", ":", "raise", "ValueError", "(", "'Failed to find flag:\\n\"{}\"\\nin graph_objs_tools.py.'", ".", "format", "(", "FLAG", ")", ")", "return", "lines_to_copy" ]
copy each line up to our special flag line and return .
train
false
4,112
def _linux_os_release(): pretty_name = '' ashtray = {} keys = ['NAME', 'VERSION_ID'] try: with open(os.path.join('/etc', 'os-release')) as f: for line in f: for key in keys: if line.startswith(key): ashtray[key] = re.sub('^"|"$', '', line.strip().split('=')[1]) except (OSError, IOError): return pretty_name if ashtray: if ('NAME' in ashtray): pretty_name = ashtray['NAME'] if ('VERSION_ID' in ashtray): pretty_name += ' {}'.format(ashtray['VERSION_ID']) return pretty_name
[ "def", "_linux_os_release", "(", ")", ":", "pretty_name", "=", "''", "ashtray", "=", "{", "}", "keys", "=", "[", "'NAME'", ",", "'VERSION_ID'", "]", "try", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "'/etc'", ",", "'os-release'", ")", ")", "as", "f", ":", "for", "line", "in", "f", ":", "for", "key", "in", "keys", ":", "if", "line", ".", "startswith", "(", "key", ")", ":", "ashtray", "[", "key", "]", "=", "re", ".", "sub", "(", "'^\"|\"$'", ",", "''", ",", "line", ".", "strip", "(", ")", ".", "split", "(", "'='", ")", "[", "1", "]", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "pretty_name", "if", "ashtray", ":", "if", "(", "'NAME'", "in", "ashtray", ")", ":", "pretty_name", "=", "ashtray", "[", "'NAME'", "]", "if", "(", "'VERSION_ID'", "in", "ashtray", ")", ":", "pretty_name", "+=", "' {}'", ".", "format", "(", "ashtray", "[", "'VERSION_ID'", "]", ")", "return", "pretty_name" ]
try to determine the name of a linux distribution .
train
true
4,114
@block_user_agents @require_GET def with_localization_tag(request, tag=None): tag_obj = ((tag and get_object_or_404(LocalizationTag, name=tag)) or None) docs = Document.objects.filter_with_localization_tag(locale=request.LANGUAGE_CODE, tag=tag_obj) paginated_docs = paginate(request, docs, per_page=DOCUMENTS_PER_PAGE) context = {'documents': paginated_docs, 'count': docs.count(), 'tag': tag_obj, 'tag_name': tag} return render(request, 'wiki/list/with_localization_tags.html', context)
[ "@", "block_user_agents", "@", "require_GET", "def", "with_localization_tag", "(", "request", ",", "tag", "=", "None", ")", ":", "tag_obj", "=", "(", "(", "tag", "and", "get_object_or_404", "(", "LocalizationTag", ",", "name", "=", "tag", ")", ")", "or", "None", ")", "docs", "=", "Document", ".", "objects", ".", "filter_with_localization_tag", "(", "locale", "=", "request", ".", "LANGUAGE_CODE", ",", "tag", "=", "tag_obj", ")", "paginated_docs", "=", "paginate", "(", "request", ",", "docs", ",", "per_page", "=", "DOCUMENTS_PER_PAGE", ")", "context", "=", "{", "'documents'", ":", "paginated_docs", ",", "'count'", ":", "docs", ".", "count", "(", ")", ",", "'tag'", ":", "tag_obj", ",", "'tag_name'", ":", "tag", "}", "return", "render", "(", "request", ",", "'wiki/list/with_localization_tags.html'", ",", "context", ")" ]
lists wiki documents with localization tag .
train
false
4,115
def get_plugin_updates_available(raise_error=False): if (not has_external_plugins()): return None display_plugins = read_available_plugins(raise_error=raise_error) if display_plugins: update_plugins = filter(filter_upgradeable_plugins, display_plugins) if (len(update_plugins) > 0): return update_plugins return None
[ "def", "get_plugin_updates_available", "(", "raise_error", "=", "False", ")", ":", "if", "(", "not", "has_external_plugins", "(", ")", ")", ":", "return", "None", "display_plugins", "=", "read_available_plugins", "(", "raise_error", "=", "raise_error", ")", "if", "display_plugins", ":", "update_plugins", "=", "filter", "(", "filter_upgradeable_plugins", ",", "display_plugins", ")", "if", "(", "len", "(", "update_plugins", ")", ">", "0", ")", ":", "return", "update_plugins", "return", "None" ]
api exposed to read whether there are updates available for any of the installed user plugins .
train
false
4,116
def test_octarine_srv(dcos_api_session, timeout=30): test_uuid = uuid.uuid4().hex[:16] octarine_id = uuid.uuid4().hex proxy = ('"http://127.0.0.1:$(/opt/mesosphere/bin/octarine ' + '--client --port {})"'.format(octarine_id)) port_name = 'pinger' cmd = ('/opt/mesosphere/bin/octarine {} & '.format(octarine_id) + '/opt/mesosphere/bin/python -m http.server ${PORT0}') raw_app_id = 'integration-test-app-octarine-srv-{}'.format(test_uuid) check_command = 'curl --fail --proxy {} _{}._{}._tcp.marathon.mesos.mydcos.directory'.format(proxy, port_name, raw_app_id) app_definition = {'id': '/{}'.format(raw_app_id), 'cpus': 0.1, 'mem': 128, 'cmd': cmd, 'disk': 0, 'instances': 1, 'portDefinitions': [{'port': 0, 'protocol': 'tcp', 'name': port_name, 'labels': {}}], 'healthChecks': [{'protocol': 'COMMAND', 'command': {'value': check_command}, 'gracePeriodSeconds': 5, 'intervalSeconds': 10, 'timeoutSeconds': 10, 'maxConsecutiveFailures': 3}]} dcos_api_session.marathon.deploy_and_cleanup(app_definition)
[ "def", "test_octarine_srv", "(", "dcos_api_session", ",", "timeout", "=", "30", ")", ":", "test_uuid", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "[", ":", "16", "]", "octarine_id", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "proxy", "=", "(", "'\"http://127.0.0.1:$(/opt/mesosphere/bin/octarine '", "+", "'--client --port {})\"'", ".", "format", "(", "octarine_id", ")", ")", "port_name", "=", "'pinger'", "cmd", "=", "(", "'/opt/mesosphere/bin/octarine {} & '", ".", "format", "(", "octarine_id", ")", "+", "'/opt/mesosphere/bin/python -m http.server ${PORT0}'", ")", "raw_app_id", "=", "'integration-test-app-octarine-srv-{}'", ".", "format", "(", "test_uuid", ")", "check_command", "=", "'curl --fail --proxy {} _{}._{}._tcp.marathon.mesos.mydcos.directory'", ".", "format", "(", "proxy", ",", "port_name", ",", "raw_app_id", ")", "app_definition", "=", "{", "'id'", ":", "'/{}'", ".", "format", "(", "raw_app_id", ")", ",", "'cpus'", ":", "0.1", ",", "'mem'", ":", "128", ",", "'cmd'", ":", "cmd", ",", "'disk'", ":", "0", ",", "'instances'", ":", "1", ",", "'portDefinitions'", ":", "[", "{", "'port'", ":", "0", ",", "'protocol'", ":", "'tcp'", ",", "'name'", ":", "port_name", ",", "'labels'", ":", "{", "}", "}", "]", ",", "'healthChecks'", ":", "[", "{", "'protocol'", ":", "'COMMAND'", ",", "'command'", ":", "{", "'value'", ":", "check_command", "}", ",", "'gracePeriodSeconds'", ":", "5", ",", "'intervalSeconds'", ":", "10", ",", "'timeoutSeconds'", ":", "10", ",", "'maxConsecutiveFailures'", ":", "3", "}", "]", "}", "dcos_api_session", ".", "marathon", ".", "deploy_and_cleanup", "(", "app_definition", ")" ]
test resolving srv records through octarine .
train
false
4,117
def list_remove_repeat(l=None): l2 = [] [l2.append(i) for i in l if (not (i in l2))] return l2
[ "def", "list_remove_repeat", "(", "l", "=", "None", ")", ":", "l2", "=", "[", "]", "[", "l2", ".", "append", "(", "i", ")", "for", "i", "in", "l", "if", "(", "not", "(", "i", "in", "l2", ")", ")", "]", "return", "l2" ]
remove the repeated items in a list .
train
false
4,118
def primarykeys(conn, table): rows = query(conn, "\n SELECT c.column_name\n FROM information_schema.table_constraints t\n INNER JOIN information_schema.constraint_column_usage c\n ON (t.CONSTRAINT_CATALOG = c.CONSTRAINT_CATALOG\n AND t.CONSTRAINT_NAME = c.CONSTRAINT_NAME\n AND t.CONSTRAINT_SCHEMA = c.CONSTRAINT_SCHEMA\n AND t.TABLE_CATALOG = c.TABLE_CATALOG\n AND t.TABLE_NAME = c.TABLE_NAME\n AND t.TABLE_SCHEMA = c.TABLE_SCHEMA)\n WHERE t.table_name='%s'\n AND t.constraint_type='PRIMARY KEY'\n ;", table) return [row['column_name'] for row in rows]
[ "def", "primarykeys", "(", "conn", ",", "table", ")", ":", "rows", "=", "query", "(", "conn", ",", "\"\\n SELECT c.column_name\\n FROM information_schema.table_constraints t\\n INNER JOIN information_schema.constraint_column_usage c\\n ON (t.CONSTRAINT_CATALOG = c.CONSTRAINT_CATALOG\\n AND t.CONSTRAINT_NAME = c.CONSTRAINT_NAME\\n AND t.CONSTRAINT_SCHEMA = c.CONSTRAINT_SCHEMA\\n AND t.TABLE_CATALOG = c.TABLE_CATALOG\\n AND t.TABLE_NAME = c.TABLE_NAME\\n AND t.TABLE_SCHEMA = c.TABLE_SCHEMA)\\n WHERE t.table_name='%s'\\n AND t.constraint_type='PRIMARY KEY'\\n ;\"", ",", "table", ")", "return", "[", "row", "[", "'column_name'", "]", "for", "row", "in", "rows", "]" ]
find primary keys .
train
false
4,119
def metric_cleanup(): logging.debug('metric_cleanup') pass
[ "def", "metric_cleanup", "(", ")", ":", "logging", ".", "debug", "(", "'metric_cleanup'", ")", "pass" ]
clean up the metric module .
train
false
4,120
def device_writer(queue): while True: (device, value) = queue.get() scaled = rescale_value(value[0]) log.debug(('Write(%s) = %s' % (device, value))) if (not device): continue
[ "def", "device_writer", "(", "queue", ")", ":", "while", "True", ":", "(", "device", ",", "value", ")", "=", "queue", ".", "get", "(", ")", "scaled", "=", "rescale_value", "(", "value", "[", "0", "]", ")", "log", ".", "debug", "(", "(", "'Write(%s) = %s'", "%", "(", "device", ",", "value", ")", ")", ")", "if", "(", "not", "device", ")", ":", "continue" ]
a worker process that processes new messages from a queue to write to device outputs .
train
false
4,121
def create_evaluated_sequence_set(evaluator, *types_order, **kwargs): sequence_type = kwargs.pop('sequence_type') assert (not kwargs) sets = tuple((AlreadyEvaluated(types) for types in types_order)) return set([FakeSequence(evaluator, sets, sequence_type)])
[ "def", "create_evaluated_sequence_set", "(", "evaluator", ",", "*", "types_order", ",", "**", "kwargs", ")", ":", "sequence_type", "=", "kwargs", ".", "pop", "(", "'sequence_type'", ")", "assert", "(", "not", "kwargs", ")", "sets", "=", "tuple", "(", "(", "AlreadyEvaluated", "(", "types", ")", "for", "types", "in", "types_order", ")", ")", "return", "set", "(", "[", "FakeSequence", "(", "evaluator", ",", "sets", ",", "sequence_type", ")", "]", ")" ]
sequence_type is a named argument .
train
false
4,123
def apply_units(string, units, inter=None, final=float, blank_reg=_BLANK_RE, value_reg=_VALUE_RE): if (inter is None): inter = final fstring = _BLANK_RE.sub('', string) if (not (fstring and _VALIDATION_RE.match(fstring))): raise ValueError(('Invalid unit string: %r.' % string)) values = [] for match in value_reg.finditer(fstring): dic = match.groupdict() (lit, unit) = (dic['value'], dic.get('unit')) value = inter(lit) if (unit is not None): try: value *= units[unit.lower()] except KeyError: raise KeyError(('invalid unit %s. valid units are %s' % (unit, units.keys()))) values.append(value) return final(sum(values))
[ "def", "apply_units", "(", "string", ",", "units", ",", "inter", "=", "None", ",", "final", "=", "float", ",", "blank_reg", "=", "_BLANK_RE", ",", "value_reg", "=", "_VALUE_RE", ")", ":", "if", "(", "inter", "is", "None", ")", ":", "inter", "=", "final", "fstring", "=", "_BLANK_RE", ".", "sub", "(", "''", ",", "string", ")", "if", "(", "not", "(", "fstring", "and", "_VALIDATION_RE", ".", "match", "(", "fstring", ")", ")", ")", ":", "raise", "ValueError", "(", "(", "'Invalid unit string: %r.'", "%", "string", ")", ")", "values", "=", "[", "]", "for", "match", "in", "value_reg", ".", "finditer", "(", "fstring", ")", ":", "dic", "=", "match", ".", "groupdict", "(", ")", "(", "lit", ",", "unit", ")", "=", "(", "dic", "[", "'value'", "]", ",", "dic", ".", "get", "(", "'unit'", ")", ")", "value", "=", "inter", "(", "lit", ")", "if", "(", "unit", "is", "not", "None", ")", ":", "try", ":", "value", "*=", "units", "[", "unit", ".", "lower", "(", ")", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "(", "'invalid unit %s. valid units are %s'", "%", "(", "unit", ",", "units", ".", "keys", "(", ")", ")", ")", ")", "values", ".", "append", "(", "value", ")", "return", "final", "(", "sum", "(", "values", ")", ")" ]
parse the string applying the units defined in units .
train
false
4,124
def generate_verification_key(verification_type=None): token = security.random_string(30) if (not verification_type): return token expires = (timezone.now() + dt.timedelta(minutes=settings.EXPIRATION_TIME_DICT[verification_type])) return {'token': token, 'expires': expires}
[ "def", "generate_verification_key", "(", "verification_type", "=", "None", ")", ":", "token", "=", "security", ".", "random_string", "(", "30", ")", "if", "(", "not", "verification_type", ")", ":", "return", "token", "expires", "=", "(", "timezone", ".", "now", "(", ")", "+", "dt", ".", "timedelta", "(", "minutes", "=", "settings", ".", "EXPIRATION_TIME_DICT", "[", "verification_type", "]", ")", ")", "return", "{", "'token'", ":", "token", ",", "'expires'", ":", "expires", "}" ]
generate a one-time verification key with an optional expiration time .
train
false
4,125
def check_token(token, attr_name, config=None, pos=None): if ((token is not None) and (not xml_check.check_token(token))): return False return True
[ "def", "check_token", "(", "token", ",", "attr_name", ",", "config", "=", "None", ",", "pos", "=", "None", ")", ":", "if", "(", "(", "token", "is", "not", "None", ")", "and", "(", "not", "xml_check", ".", "check_token", "(", "token", ")", ")", ")", ":", "return", "False", "return", "True" ]
make sure that all tokens appears in code in order .
train
false
4,127
def collecting_callback(): calls = [] def cb(**kwargs): calls.append(kwargs) return (cb, calls)
[ "def", "collecting_callback", "(", ")", ":", "calls", "=", "[", "]", "def", "cb", "(", "**", "kwargs", ")", ":", "calls", ".", "append", "(", "kwargs", ")", "return", "(", "cb", ",", "calls", ")" ]
create and return a callback and a list populated with call args .
train
false
4,128
def polynomial(order): powers = np.asarray(order) if (powers.shape == ()): powers = np.arange(1, (powers + 1)) powers.shape = (len(powers), 1) len_beta = (len(powers) + 1) def _poly_est(data, len_beta=len_beta): return np.ones((len_beta,), float) return Model(_poly_fcn, fjacd=_poly_fjacd, fjacb=_poly_fjacb, estimate=_poly_est, extra_args=(powers,), meta={'name': 'Sorta-general Polynomial', 'equ': ('y = B_0 + Sum[i=1..%s, B_i * (x**i)]' % (len_beta - 1)), 'TeXequ': ('$y=\\beta_0 + \\sum_{i=1}^{%s} \\beta_i x^i$' % (len_beta - 1))})
[ "def", "polynomial", "(", "order", ")", ":", "powers", "=", "np", ".", "asarray", "(", "order", ")", "if", "(", "powers", ".", "shape", "==", "(", ")", ")", ":", "powers", "=", "np", ".", "arange", "(", "1", ",", "(", "powers", "+", "1", ")", ")", "powers", ".", "shape", "=", "(", "len", "(", "powers", ")", ",", "1", ")", "len_beta", "=", "(", "len", "(", "powers", ")", "+", "1", ")", "def", "_poly_est", "(", "data", ",", "len_beta", "=", "len_beta", ")", ":", "return", "np", ".", "ones", "(", "(", "len_beta", ",", ")", ",", "float", ")", "return", "Model", "(", "_poly_fcn", ",", "fjacd", "=", "_poly_fjacd", ",", "fjacb", "=", "_poly_fjacb", ",", "estimate", "=", "_poly_est", ",", "extra_args", "=", "(", "powers", ",", ")", ",", "meta", "=", "{", "'name'", ":", "'Sorta-general Polynomial'", ",", "'equ'", ":", "(", "'y = B_0 + Sum[i=1..%s, B_i * (x**i)]'", "%", "(", "len_beta", "-", "1", ")", ")", ",", "'TeXequ'", ":", "(", "'$y=\\\\beta_0 + \\\\sum_{i=1}^{%s} \\\\beta_i x^i$'", "%", "(", "len_beta", "-", "1", ")", ")", "}", ")" ]
very simple embedding of a polynomial chart .
train
false
4,129
def get_vm_size(vm_): vm_size = config.get_cloud_config_value('size', vm_, __opts__) ram = avail_sizes()[vm_size]['RAM'] if vm_size.startswith('Linode'): vm_size = vm_size.replace('Linode ', '') if (ram == int(vm_size)): return ram else: raise SaltCloudNotFound('The specified size, {0}, could not be found.'.format(vm_size))
[ "def", "get_vm_size", "(", "vm_", ")", ":", "vm_size", "=", "config", ".", "get_cloud_config_value", "(", "'size'", ",", "vm_", ",", "__opts__", ")", "ram", "=", "avail_sizes", "(", ")", "[", "vm_size", "]", "[", "'RAM'", "]", "if", "vm_size", ".", "startswith", "(", "'Linode'", ")", ":", "vm_size", "=", "vm_size", ".", "replace", "(", "'Linode '", ",", "''", ")", "if", "(", "ram", "==", "int", "(", "vm_size", ")", ")", ":", "return", "ram", "else", ":", "raise", "SaltCloudNotFound", "(", "'The specified size, {0}, could not be found.'", ".", "format", "(", "vm_size", ")", ")" ]
returns the vms size .
train
true
4,130
def reset_password_token_status(token): (expired, invalid, user, data) = get_token_status(token, 'reset', 'RESET_PASSWORD', return_data=True) if (not invalid): if user.password: password_hash = md5(user.password) if (not safe_str_cmp(password_hash, data[1])): invalid = True return (expired, invalid, user)
[ "def", "reset_password_token_status", "(", "token", ")", ":", "(", "expired", ",", "invalid", ",", "user", ",", "data", ")", "=", "get_token_status", "(", "token", ",", "'reset'", ",", "'RESET_PASSWORD'", ",", "return_data", "=", "True", ")", "if", "(", "not", "invalid", ")", ":", "if", "user", ".", "password", ":", "password_hash", "=", "md5", "(", "user", ".", "password", ")", "if", "(", "not", "safe_str_cmp", "(", "password_hash", ",", "data", "[", "1", "]", ")", ")", ":", "invalid", "=", "True", "return", "(", "expired", ",", "invalid", ",", "user", ")" ]
returns the expired status .
train
false
4,131
def test_decompose_regression(): q = ((np.array([1, 2, 3]) * u.m) / (2.0 * u.km)) assert np.all((q.decompose().value == np.array([0.0005, 0.001, 0.0015]))) assert np.all((q == ((np.array([1, 2, 3]) * u.m) / (2.0 * u.km)))) assert np.all((q.decompose().value == np.array([0.0005, 0.001, 0.0015])))
[ "def", "test_decompose_regression", "(", ")", ":", "q", "=", "(", "(", "np", ".", "array", "(", "[", "1", ",", "2", ",", "3", "]", ")", "*", "u", ".", "m", ")", "/", "(", "2.0", "*", "u", ".", "km", ")", ")", "assert", "np", ".", "all", "(", "(", "q", ".", "decompose", "(", ")", ".", "value", "==", "np", ".", "array", "(", "[", "0.0005", ",", "0.001", ",", "0.0015", "]", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "q", "==", "(", "(", "np", ".", "array", "(", "[", "1", ",", "2", ",", "3", "]", ")", "*", "u", ".", "m", ")", "/", "(", "2.0", "*", "u", ".", "km", ")", ")", ")", ")", "assert", "np", ".", "all", "(", "(", "q", ".", "decompose", "(", ")", ".", "value", "==", "np", ".", "array", "(", "[", "0.0005", ",", "0.001", ",", "0.0015", "]", ")", ")", ")" ]
regression test for bug #1163 if decompose was called multiple times on a quantity with an array and a scale != 1 .
train
false
4,132
def buildHL0aTrainingSet(numOnes=5): numPatterns = 23 p = getSimplePatterns(numOnes, numPatterns) s = [] s.append(p[rgen.randint(3, 23)]) for _ in xrange(20): s.append(p[rgen.randint(3, 23)]) s.append(p[0]) s.append(p[1]) s.append(p[2]) s.append(p[rgen.randint(3, 23)]) return ([s], [[p[0], p[1], p[2]]])
[ "def", "buildHL0aTrainingSet", "(", "numOnes", "=", "5", ")", ":", "numPatterns", "=", "23", "p", "=", "getSimplePatterns", "(", "numOnes", ",", "numPatterns", ")", "s", "=", "[", "]", "s", ".", "append", "(", "p", "[", "rgen", ".", "randint", "(", "3", ",", "23", ")", "]", ")", "for", "_", "in", "xrange", "(", "20", ")", ":", "s", ".", "append", "(", "p", "[", "rgen", ".", "randint", "(", "3", ",", "23", ")", "]", ")", "s", ".", "append", "(", "p", "[", "0", "]", ")", "s", ".", "append", "(", "p", "[", "1", "]", ")", "s", ".", "append", "(", "p", "[", "2", "]", ")", "s", ".", "append", "(", "p", "[", "rgen", ".", "randint", "(", "3", ",", "23", ")", "]", ")", "return", "(", "[", "s", "]", ",", "[", "[", "p", "[", "0", "]", ",", "p", "[", "1", "]", ",", "p", "[", "2", "]", "]", "]", ")" ]
simple sequences for hl0 .
train
false
4,133
def _partition_estimators(n_estimators, n_jobs): n_jobs = min(_get_n_jobs(n_jobs), n_estimators) n_estimators_per_job = ((n_estimators // n_jobs) * np.ones(n_jobs, dtype=np.int)) n_estimators_per_job[:(n_estimators % n_jobs)] += 1 starts = np.cumsum(n_estimators_per_job) return (n_jobs, n_estimators_per_job.tolist(), ([0] + starts.tolist()))
[ "def", "_partition_estimators", "(", "n_estimators", ",", "n_jobs", ")", ":", "n_jobs", "=", "min", "(", "_get_n_jobs", "(", "n_jobs", ")", ",", "n_estimators", ")", "n_estimators_per_job", "=", "(", "(", "n_estimators", "//", "n_jobs", ")", "*", "np", ".", "ones", "(", "n_jobs", ",", "dtype", "=", "np", ".", "int", ")", ")", "n_estimators_per_job", "[", ":", "(", "n_estimators", "%", "n_jobs", ")", "]", "+=", "1", "starts", "=", "np", ".", "cumsum", "(", "n_estimators_per_job", ")", "return", "(", "n_jobs", ",", "n_estimators_per_job", ".", "tolist", "(", ")", ",", "(", "[", "0", "]", "+", "starts", ".", "tolist", "(", ")", ")", ")" ]
private function used to partition estimators between jobs .
train
true
4,135
def rischDE(fa, fd, ga, gd, DE): (_, (fa, fd)) = weak_normalizer(fa, fd, DE) (a, (ba, bd), (ca, cd), hn) = normal_denom(fa, fd, ga, gd, DE) (A, B, C, hs) = special_denom(a, ba, bd, ca, cd, DE) try: n = bound_degree(A, B, C, DE) except NotImplementedError: n = oo (B, C, m, alpha, beta) = spde(A, B, C, n, DE) if C.is_zero: y = C else: y = solve_poly_rde(B, C, m, DE) return (((alpha * y) + beta), (hn * hs))
[ "def", "rischDE", "(", "fa", ",", "fd", ",", "ga", ",", "gd", ",", "DE", ")", ":", "(", "_", ",", "(", "fa", ",", "fd", ")", ")", "=", "weak_normalizer", "(", "fa", ",", "fd", ",", "DE", ")", "(", "a", ",", "(", "ba", ",", "bd", ")", ",", "(", "ca", ",", "cd", ")", ",", "hn", ")", "=", "normal_denom", "(", "fa", ",", "fd", ",", "ga", ",", "gd", ",", "DE", ")", "(", "A", ",", "B", ",", "C", ",", "hs", ")", "=", "special_denom", "(", "a", ",", "ba", ",", "bd", ",", "ca", ",", "cd", ",", "DE", ")", "try", ":", "n", "=", "bound_degree", "(", "A", ",", "B", ",", "C", ",", "DE", ")", "except", "NotImplementedError", ":", "n", "=", "oo", "(", "B", ",", "C", ",", "m", ",", "alpha", ",", "beta", ")", "=", "spde", "(", "A", ",", "B", ",", "C", ",", "n", ",", "DE", ")", "if", "C", ".", "is_zero", ":", "y", "=", "C", "else", ":", "y", "=", "solve_poly_rde", "(", "B", ",", "C", ",", "m", ",", "DE", ")", "return", "(", "(", "(", "alpha", "*", "y", ")", "+", "beta", ")", ",", "(", "hn", "*", "hs", ")", ")" ]
solve a risch differential equation: dy + f*y == g .
train
false
4,136
def get_user_password(sockfile): return ('root', '')
[ "def", "get_user_password", "(", "sockfile", ")", ":", "return", "(", "'root'", ",", "''", ")" ]
returns a tuple .
train
false
4,139
@LiquidTags.register('pygal') def pygal_parser(preprocessor, tag, markup): data = loads(markup) if ((tag == 'pygal') and (data is not None)): output = run_pygal(data) return ('<div class="pygal" style="text-align: center;"><embed type="image/svg+xml" src=%s style="max-width:1000px"/></div>' % output) else: raise ValueError('Error processing input. \nExpected syntax: {0}'.format(SYNTAX))
[ "@", "LiquidTags", ".", "register", "(", "'pygal'", ")", "def", "pygal_parser", "(", "preprocessor", ",", "tag", ",", "markup", ")", ":", "data", "=", "loads", "(", "markup", ")", "if", "(", "(", "tag", "==", "'pygal'", ")", "and", "(", "data", "is", "not", "None", ")", ")", ":", "output", "=", "run_pygal", "(", "data", ")", "return", "(", "'<div class=\"pygal\" style=\"text-align: center;\"><embed type=\"image/svg+xml\" src=%s style=\"max-width:1000px\"/></div>'", "%", "output", ")", "else", ":", "raise", "ValueError", "(", "'Error processing input. \\nExpected syntax: {0}'", ".", "format", "(", "SYNTAX", ")", ")" ]
simple pygal parser .
train
true
4,140
def get_request_and_user_id(): from framework.sessions import get_session req = get_cache_key() user_id = None if isinstance(req, FlaskRequest): session = get_session() user_id = session.data.get('auth_user_id') elif hasattr(req, 'user'): user_id = getattr(req.user, '_id', None) return (req, user_id)
[ "def", "get_request_and_user_id", "(", ")", ":", "from", "framework", ".", "sessions", "import", "get_session", "req", "=", "get_cache_key", "(", ")", "user_id", "=", "None", "if", "isinstance", "(", "req", ",", "FlaskRequest", ")", ":", "session", "=", "get_session", "(", ")", "user_id", "=", "session", ".", "data", ".", "get", "(", "'auth_user_id'", ")", "elif", "hasattr", "(", "req", ",", "'user'", ")", ":", "user_id", "=", "getattr", "(", "req", ".", "user", ",", "'_id'", ",", "None", ")", "return", "(", "req", ",", "user_id", ")" ]
fetch a request and user id from either a django or flask request .
train
false
4,141
@theano.gof.local_optimizer([DiagonalSubtensor, IncDiagonalSubtensor]) def local_inplace_DiagonalSubtensor(node): if (isinstance(node.op, (DiagonalSubtensor, IncDiagonalSubtensor)) and (not node.op.inplace)): new_op = node.op.__class__(inplace=True) new_node = new_op(*node.inputs) copy_stack_trace(node.outputs[0], new_node) return [new_node] return False
[ "@", "theano", ".", "gof", ".", "local_optimizer", "(", "[", "DiagonalSubtensor", ",", "IncDiagonalSubtensor", "]", ")", "def", "local_inplace_DiagonalSubtensor", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "(", "DiagonalSubtensor", ",", "IncDiagonalSubtensor", ")", ")", "and", "(", "not", "node", ".", "op", ".", "inplace", ")", ")", ":", "new_op", "=", "node", ".", "op", ".", "__class__", "(", "inplace", "=", "True", ")", "new_node", "=", "new_op", "(", "*", "node", ".", "inputs", ")", "copy_stack_trace", "(", "node", ".", "outputs", "[", "0", "]", ",", "new_node", ")", "return", "[", "new_node", "]", "return", "False" ]
also work for incdiagonalsubtensor .
train
false
4,142
def hotpatch_oswrite(conn): conn.execute(textwrap.dedent('\n import sys\n import os\n def patched_write(fd, s):\n if fd==1:\n return sys.stdout.write(s)\n elif fd==2:\n return sys.stdout.write(s)\n else:\n return os.write(fd, s)\n os.write=patched_write\n '))
[ "def", "hotpatch_oswrite", "(", "conn", ")", ":", "conn", ".", "execute", "(", "textwrap", ".", "dedent", "(", "'\\n import sys\\n import os\\n def patched_write(fd, s):\\n if fd==1:\\n return sys.stdout.write(s)\\n elif fd==2:\\n return sys.stdout.write(s)\\n else:\\n return os.write(fd, s)\\n os.write=patched_write\\n '", ")", ")" ]
some scripts/libraries use os .
train
false
4,143
def test_install_package_conflict_prefix_and_user(script, data): prefix_path = (script.scratch_path / 'prefix') result = script.pip('install', '-f', data.find_links, '--no-index', '--user', '--prefix', prefix_path, 'simple==1.0', expect_error=True, quiet=True) assert ("Can not combine '--user' and '--prefix'" in result.stderr)
[ "def", "test_install_package_conflict_prefix_and_user", "(", "script", ",", "data", ")", ":", "prefix_path", "=", "(", "script", ".", "scratch_path", "/", "'prefix'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'--no-index'", ",", "'--user'", ",", "'--prefix'", ",", "prefix_path", ",", "'simple==1.0'", ",", "expect_error", "=", "True", ",", "quiet", "=", "True", ")", "assert", "(", "\"Can not combine '--user' and '--prefix'\"", "in", "result", ".", "stderr", ")" ]
test installing a package using pip install --prefix --user errors out .
train
false
4,145
def runTestSlow(test_name, mode): (output, errors, ec) = run_one_command(mode, test_name) return (ec, errors)
[ "def", "runTestSlow", "(", "test_name", ",", "mode", ")", ":", "(", "output", ",", "errors", ",", "ec", ")", "=", "run_one_command", "(", "mode", ",", "test_name", ")", "return", "(", "ec", ",", "errors", ")" ]
helper function runs a test as a separate process .
train
false
4,146
def _create_module(name): module = new.module(name) sys.modules[name] = module return module
[ "def", "_create_module", "(", "name", ")", ":", "module", "=", "new", ".", "module", "(", "name", ")", "sys", ".", "modules", "[", "name", "]", "=", "module", "return", "module" ]
create a single top-level module and add it to sys .
train
false
4,148
def validate_and_find_master_dns(session, parsed_globals, cluster_id): cluster_state = emrutils.get_cluster_state(session, parsed_globals, cluster_id) if (cluster_state in constants.TERMINATED_STATES): raise exceptions.ClusterTerminatedError emr = emrutils.get_client(session, parsed_globals) try: cluster_running_waiter = emr.get_waiter('cluster_running') if (cluster_state in constants.STARTING_STATES): print 'Waiting for the cluster to start.' cluster_running_waiter.wait(ClusterId=cluster_id) except WaiterError: raise exceptions.MasterDNSNotAvailableError return emrutils.find_master_dns(session=session, cluster_id=cluster_id, parsed_globals=parsed_globals)
[ "def", "validate_and_find_master_dns", "(", "session", ",", "parsed_globals", ",", "cluster_id", ")", ":", "cluster_state", "=", "emrutils", ".", "get_cluster_state", "(", "session", ",", "parsed_globals", ",", "cluster_id", ")", "if", "(", "cluster_state", "in", "constants", ".", "TERMINATED_STATES", ")", ":", "raise", "exceptions", ".", "ClusterTerminatedError", "emr", "=", "emrutils", ".", "get_client", "(", "session", ",", "parsed_globals", ")", "try", ":", "cluster_running_waiter", "=", "emr", ".", "get_waiter", "(", "'cluster_running'", ")", "if", "(", "cluster_state", "in", "constants", ".", "STARTING_STATES", ")", ":", "print", "'Waiting for the cluster to start.'", "cluster_running_waiter", ".", "wait", "(", "ClusterId", "=", "cluster_id", ")", "except", "WaiterError", ":", "raise", "exceptions", ".", "MasterDNSNotAvailableError", "return", "emrutils", ".", "find_master_dns", "(", "session", "=", "session", ",", "cluster_id", "=", "cluster_id", ",", "parsed_globals", "=", "parsed_globals", ")" ]
utility method for ssh .
train
false
4,149
def _AppendOrReturn(append, element): if ((append is not None) and (element is not None)): if (isinstance(element, list) or isinstance(element, tuple)): append.extend(element) else: append.append(element) else: return element
[ "def", "_AppendOrReturn", "(", "append", ",", "element", ")", ":", "if", "(", "(", "append", "is", "not", "None", ")", "and", "(", "element", "is", "not", "None", ")", ")", ":", "if", "(", "isinstance", "(", "element", ",", "list", ")", "or", "isinstance", "(", "element", ",", "tuple", ")", ")", ":", "append", ".", "extend", "(", "element", ")", "else", ":", "append", ".", "append", "(", "element", ")", "else", ":", "return", "element" ]
if |append| is none .
train
false
4,150
def commit_manually(using=None): warnings.warn('commit_manually is deprecated in favor of set_autocommit.', PendingDeprecationWarning, stacklevel=2) def entering(using): enter_transaction_management(using=using) def exiting(exc_type, using): leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
[ "def", "commit_manually", "(", "using", "=", "None", ")", ":", "warnings", ".", "warn", "(", "'commit_manually is deprecated in favor of set_autocommit.'", ",", "PendingDeprecationWarning", ",", "stacklevel", "=", "2", ")", "def", "entering", "(", "using", ")", ":", "enter_transaction_management", "(", "using", "=", "using", ")", "def", "exiting", "(", "exc_type", ",", "using", ")", ":", "leave_transaction_management", "(", "using", "=", "using", ")", "return", "_transaction_func", "(", "entering", ",", "exiting", ",", "using", ")" ]
decorator that activates manual transaction control .
train
false
4,151
def morsel_to_cookie(morsel): expires = None if morsel['max-age']: expires = (time.time() + morsel['max-age']) elif morsel['expires']: expires = morsel['expires'] if (type(expires) == type('')): time_template = '%a, %d-%b-%Y %H:%M:%S GMT' expires = time.mktime(time.strptime(expires, time_template)) c = create_cookie(name=morsel.key, value=morsel.value, version=(morsel['version'] or 0), port=None, domain=morsel['domain'], path=morsel['path'], secure=bool(morsel['secure']), expires=expires, discard=False, comment=morsel['comment'], comment_url=bool(morsel['comment']), rest={'HttpOnly': morsel['httponly']}, rfc2109=False) return c
[ "def", "morsel_to_cookie", "(", "morsel", ")", ":", "expires", "=", "None", "if", "morsel", "[", "'max-age'", "]", ":", "expires", "=", "(", "time", ".", "time", "(", ")", "+", "morsel", "[", "'max-age'", "]", ")", "elif", "morsel", "[", "'expires'", "]", ":", "expires", "=", "morsel", "[", "'expires'", "]", "if", "(", "type", "(", "expires", ")", "==", "type", "(", "''", ")", ")", ":", "time_template", "=", "'%a, %d-%b-%Y %H:%M:%S GMT'", "expires", "=", "time", ".", "mktime", "(", "time", ".", "strptime", "(", "expires", ",", "time_template", ")", ")", "c", "=", "create_cookie", "(", "name", "=", "morsel", ".", "key", ",", "value", "=", "morsel", ".", "value", ",", "version", "=", "(", "morsel", "[", "'version'", "]", "or", "0", ")", ",", "port", "=", "None", ",", "domain", "=", "morsel", "[", "'domain'", "]", ",", "path", "=", "morsel", "[", "'path'", "]", ",", "secure", "=", "bool", "(", "morsel", "[", "'secure'", "]", ")", ",", "expires", "=", "expires", ",", "discard", "=", "False", ",", "comment", "=", "morsel", "[", "'comment'", "]", ",", "comment_url", "=", "bool", "(", "morsel", "[", "'comment'", "]", ")", ",", "rest", "=", "{", "'HttpOnly'", ":", "morsel", "[", "'httponly'", "]", "}", ",", "rfc2109", "=", "False", ")", "return", "c" ]
convert a morsel object into a cookie containing the one k/v pair .
train
true
4,153
def get_recursive_filelist(args): files = [] for arg in args: if os.path.isfile(arg): files.append(arg) continue if os.path.isdir(arg): newfiles = listFiles(arg, recurse=1, return_folders=1) files.extend(newfiles) return [f for f in files if (not os.path.islink(f))]
[ "def", "get_recursive_filelist", "(", "args", ")", ":", "files", "=", "[", "]", "for", "arg", "in", "args", ":", "if", "os", ".", "path", ".", "isfile", "(", "arg", ")", ":", "files", ".", "append", "(", "arg", ")", "continue", "if", "os", ".", "path", ".", "isdir", "(", "arg", ")", ":", "newfiles", "=", "listFiles", "(", "arg", ",", "recurse", "=", "1", ",", "return_folders", "=", "1", ")", "files", ".", "extend", "(", "newfiles", ")", "return", "[", "f", "for", "f", "in", "files", "if", "(", "not", "os", ".", "path", ".", "islink", "(", "f", ")", ")", "]" ]
recurs all the files and dirs in *args* ignoring symbolic links and return the files as a list of strings .
train
false
4,154
def fix_abundance_labels(output_consensus_fp, filtered_consensus_fp): consensus_f = open(output_consensus_fp, 'U') filtered_f = open(filtered_consensus_fp, 'w') for (label, seq) in parse_fasta(consensus_f): fasta_label = label.split()[0] size = ('size=' + label.split('size=')[1].replace(';', '')) final_label = ('%s;%s' % (fasta_label, size)) filtered_f.write(('>%s\n%s\n' % (final_label, seq))) consensus_f.close() filtered_f.close()
[ "def", "fix_abundance_labels", "(", "output_consensus_fp", ",", "filtered_consensus_fp", ")", ":", "consensus_f", "=", "open", "(", "output_consensus_fp", ",", "'U'", ")", "filtered_f", "=", "open", "(", "filtered_consensus_fp", ",", "'w'", ")", "for", "(", "label", ",", "seq", ")", "in", "parse_fasta", "(", "consensus_f", ")", ":", "fasta_label", "=", "label", ".", "split", "(", ")", "[", "0", "]", "size", "=", "(", "'size='", "+", "label", ".", "split", "(", "'size='", ")", "[", "1", "]", ".", "replace", "(", "';'", ",", "''", ")", ")", "final_label", "=", "(", "'%s;%s'", "%", "(", "fasta_label", ",", "size", ")", ")", "filtered_f", ".", "write", "(", "(", "'>%s\\n%s\\n'", "%", "(", "final_label", ",", "seq", ")", ")", ")", "consensus_f", ".", "close", "(", ")", "filtered_f", ".", "close", "(", ")" ]
puts size= part of label as second component after white space output_consensus_fp: consensus filepath with abundance data filtered_consensus_fp: output filepath name .
train
false
4,155
def dmp_exclude(f, u, K): if ((not u) or dmp_ground_p(f, None, u)): return ([], f, u) (J, F) = ([], dmp_to_dict(f, u)) for j in range(0, (u + 1)): for monom in F.keys(): if monom[j]: break else: J.append(j) if (not J): return ([], f, u) f = {} for (monom, coeff) in F.items(): monom = list(monom) for j in reversed(J): del monom[j] f[tuple(monom)] = coeff u -= len(J) return (J, dmp_from_dict(f, u, K), u)
[ "def", "dmp_exclude", "(", "f", ",", "u", ",", "K", ")", ":", "if", "(", "(", "not", "u", ")", "or", "dmp_ground_p", "(", "f", ",", "None", ",", "u", ")", ")", ":", "return", "(", "[", "]", ",", "f", ",", "u", ")", "(", "J", ",", "F", ")", "=", "(", "[", "]", ",", "dmp_to_dict", "(", "f", ",", "u", ")", ")", "for", "j", "in", "range", "(", "0", ",", "(", "u", "+", "1", ")", ")", ":", "for", "monom", "in", "F", ".", "keys", "(", ")", ":", "if", "monom", "[", "j", "]", ":", "break", "else", ":", "J", ".", "append", "(", "j", ")", "if", "(", "not", "J", ")", ":", "return", "(", "[", "]", ",", "f", ",", "u", ")", "f", "=", "{", "}", "for", "(", "monom", ",", "coeff", ")", "in", "F", ".", "items", "(", ")", ":", "monom", "=", "list", "(", "monom", ")", "for", "j", "in", "reversed", "(", "J", ")", ":", "del", "monom", "[", "j", "]", "f", "[", "tuple", "(", "monom", ")", "]", "=", "coeff", "u", "-=", "len", "(", "J", ")", "return", "(", "J", ",", "dmp_from_dict", "(", "f", ",", "u", ",", "K", ")", ",", "u", ")" ]
exclude useless levels from f .
train
false
4,156
def is_unresponsive(url): host = urlparse(url).hostname return (host in unresponsive_hosts)
[ "def", "is_unresponsive", "(", "url", ")", ":", "host", "=", "urlparse", "(", "url", ")", ".", "hostname", "return", "(", "host", "in", "unresponsive_hosts", ")" ]
checks if host of given url has timed out within wait_time .
train
false
4,157
@pytest.fixture def guest(): return Guest()
[ "@", "pytest", ".", "fixture", "def", "guest", "(", ")", ":", "return", "Guest", "(", ")" ]
return a guest user .
train
false
4,158
def energy(W, V, H): return (- (T.dot(V, W) * H).sum(axis=1))
[ "def", "energy", "(", "W", ",", "V", ",", "H", ")", ":", "return", "(", "-", "(", "T", ".", "dot", "(", "V", ",", "W", ")", "*", "H", ")", ".", "sum", "(", "axis", "=", "1", ")", ")" ]
compute the total energy for the hamiltonian at a given position/momentum .
train
false
4,159
def list_bindings(site): ret = dict() sites = list_sites() if (site not in sites): _LOG.warning('Site not found: %s', site) return ret ret = sites[site]['bindings'] if (not ret): _LOG.warning('No bindings found for site: %s', site) return ret
[ "def", "list_bindings", "(", "site", ")", ":", "ret", "=", "dict", "(", ")", "sites", "=", "list_sites", "(", ")", "if", "(", "site", "not", "in", "sites", ")", ":", "_LOG", ".", "warning", "(", "'Site not found: %s'", ",", "site", ")", "return", "ret", "ret", "=", "sites", "[", "site", "]", "[", "'bindings'", "]", "if", "(", "not", "ret", ")", ":", "_LOG", ".", "warning", "(", "'No bindings found for site: %s'", ",", "site", ")", "return", "ret" ]
get all configured iis bindings for the specified site .
train
false
4,161
def search_external_subtitles(path, directory=None): (dirpath, filename) = os.path.split(path) dirpath = (dirpath or '.') (fileroot, fileext) = os.path.splitext(filename) subtitles = {} for p in os.listdir((directory or dirpath)): if ((not p.startswith(fileroot)) or (not p.endswith(SUBTITLE_EXTENSIONS))): continue language = Language('und') language_code = p[len(fileroot):(- len(os.path.splitext(p)[1]))].replace(fileext, '').replace('_', '-')[1:] if language_code: try: language = Language.fromietf(language_code) except (ValueError, LanguageReverseError): logger.error('Cannot parse language code %r', language_code) subtitles[p] = language logger.debug('Found subtitles %r', subtitles) return subtitles
[ "def", "search_external_subtitles", "(", "path", ",", "directory", "=", "None", ")", ":", "(", "dirpath", ",", "filename", ")", "=", "os", ".", "path", ".", "split", "(", "path", ")", "dirpath", "=", "(", "dirpath", "or", "'.'", ")", "(", "fileroot", ",", "fileext", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "subtitles", "=", "{", "}", "for", "p", "in", "os", ".", "listdir", "(", "(", "directory", "or", "dirpath", ")", ")", ":", "if", "(", "(", "not", "p", ".", "startswith", "(", "fileroot", ")", ")", "or", "(", "not", "p", ".", "endswith", "(", "SUBTITLE_EXTENSIONS", ")", ")", ")", ":", "continue", "language", "=", "Language", "(", "'und'", ")", "language_code", "=", "p", "[", "len", "(", "fileroot", ")", ":", "(", "-", "len", "(", "os", ".", "path", ".", "splitext", "(", "p", ")", "[", "1", "]", ")", ")", "]", ".", "replace", "(", "fileext", ",", "''", ")", ".", "replace", "(", "'_'", ",", "'-'", ")", "[", "1", ":", "]", "if", "language_code", ":", "try", ":", "language", "=", "Language", ".", "fromietf", "(", "language_code", ")", "except", "(", "ValueError", ",", "LanguageReverseError", ")", ":", "logger", ".", "error", "(", "'Cannot parse language code %r'", ",", "language_code", ")", "subtitles", "[", "p", "]", "=", "language", "logger", ".", "debug", "(", "'Found subtitles %r'", ",", "subtitles", ")", "return", "subtitles" ]
search for external subtitles from a video path and their associated language .
train
true
4,162
def _encode_text(name, value, dummy0, dummy1): value = _utf_8_encode(value)[0] return (((('\x02' + name) + _PACK_INT((len(value) + 1))) + value) + '\x00')
[ "def", "_encode_text", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "value", "=", "_utf_8_encode", "(", "value", ")", "[", "0", "]", "return", "(", "(", "(", "(", "'\\x02'", "+", "name", ")", "+", "_PACK_INT", "(", "(", "len", "(", "value", ")", "+", "1", ")", ")", ")", "+", "value", ")", "+", "'\\x00'", ")" ]
encode a python unicode / str .
train
true
4,163
def numeric_collator(): global _numeric_collator if (_numeric_collator is None): _numeric_collator = collator().clone() _numeric_collator.strength = _icu.UCOL_SECONDARY _numeric_collator.numeric = True return _numeric_collator
[ "def", "numeric_collator", "(", ")", ":", "global", "_numeric_collator", "if", "(", "_numeric_collator", "is", "None", ")", ":", "_numeric_collator", "=", "collator", "(", ")", ".", "clone", "(", ")", "_numeric_collator", ".", "strength", "=", "_icu", ".", "UCOL_SECONDARY", "_numeric_collator", ".", "numeric", "=", "True", "return", "_numeric_collator" ]
uses natural sorting for numbers inside strings so something2 will sort before something10 .
train
false
4,165
def _get_returner(returner_types): for returner in returner_types: if returner: return returner
[ "def", "_get_returner", "(", "returner_types", ")", ":", "for", "returner", "in", "returner_types", ":", "if", "returner", ":", "return", "returner" ]
helper to iterate over returner_types and pick the first one .
train
false
4,167
def lookup_family(hostname): fallback = socket.AF_INET try: hostnames = socket.getaddrinfo((hostname or None), None, socket.AF_UNSPEC, socket.SOCK_STREAM) if (not hostnames): return fallback h = hostnames[0] return h[0] except socket.gaierror: return fallback
[ "def", "lookup_family", "(", "hostname", ")", ":", "fallback", "=", "socket", ".", "AF_INET", "try", ":", "hostnames", "=", "socket", ".", "getaddrinfo", "(", "(", "hostname", "or", "None", ")", ",", "None", ",", "socket", ".", "AF_UNSPEC", ",", "socket", ".", "SOCK_STREAM", ")", "if", "(", "not", "hostnames", ")", ":", "return", "fallback", "h", "=", "hostnames", "[", "0", "]", "return", "h", "[", "0", "]", "except", "socket", ".", "gaierror", ":", "return", "fallback" ]
lookup a hostname and determine its address family .
train
true