id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
49,101
def extract_step_data(step): step_data = {'name': step.sentence, 'duration': _get_duration(step), 'meta': {'success': bool(step.passed), 'failed': bool(step.failed), 'skipped': ((not step.passed) and (not step.failed) and step.has_definition), 'undefined': (not step.has_definition)}, 'failure': {}} if step.why: step_data['failure'] = {'exception': repr(step.why.exception), 'traceback': step.why.traceback} return step_data
[ "def", "extract_step_data", "(", "step", ")", ":", "step_data", "=", "{", "'name'", ":", "step", ".", "sentence", ",", "'duration'", ":", "_get_duration", "(", "step", ")", ",", "'meta'", ":", "{", "'success'", ":", "bool", "(", "step", ".", "passed", ")", ",", "'failed'", ":", "bool", "(", "step", ".", "failed", ")", ",", "'skipped'", ":", "(", "(", "not", "step", ".", "passed", ")", "and", "(", "not", "step", ".", "failed", ")", "and", "step", ".", "has_definition", ")", ",", "'undefined'", ":", "(", "not", "step", ".", "has_definition", ")", "}", ",", "'failure'", ":", "{", "}", "}", "if", "step", ".", "why", ":", "step_data", "[", "'failure'", "]", "=", "{", "'exception'", ":", "repr", "(", "step", ".", "why", ".", "exception", ")", ",", "'traceback'", ":", "step", ".", "why", ".", "traceback", "}", "return", "step_data" ]
extract data from a step instance .
train
false
49,104
def _has_fulfilled_all_milestones(user, course_id): return (MilestoneError() if any_unfulfilled_milestones(course_id, user.id) else ACCESS_GRANTED)
[ "def", "_has_fulfilled_all_milestones", "(", "user", ",", "course_id", ")", ":", "return", "(", "MilestoneError", "(", ")", "if", "any_unfulfilled_milestones", "(", "course_id", ",", "user", ".", "id", ")", "else", "ACCESS_GRANTED", ")" ]
returns whether the given user has fulfilled all milestones for the given course .
train
false
49,105
def evalcontextfilter(f): f.evalcontextfilter = True return f
[ "def", "evalcontextfilter", "(", "f", ")", ":", "f", ".", "evalcontextfilter", "=", "True", "return", "f" ]
decorator for marking eval-context dependent filters .
train
false
49,107
@pytest.mark.network def test_git_with_editable_where_egg_contains_dev_string(script, tmpdir): result = script.pip('install', '-e', ('%s#egg=django-devserver' % local_checkout('git+git://github.com/dcramer/django-devserver.git', tmpdir.join('cache')))) result.assert_installed('django-devserver', with_files=['.git'])
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_git_with_editable_where_egg_contains_dev_string", "(", "script", ",", "tmpdir", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'-e'", ",", "(", "'%s#egg=django-devserver'", "%", "local_checkout", "(", "'git+git://github.com/dcramer/django-devserver.git'", ",", "tmpdir", ".", "join", "(", "'cache'", ")", ")", ")", ")", "result", ".", "assert_installed", "(", "'django-devserver'", ",", "with_files", "=", "[", "'.git'", "]", ")" ]
test cloning a git repository from an editable url which contains "dev" string .
train
false
49,108
def unpack_url(link, location, download_dir=None, only_download=False, session=None, hashes=None): if is_vcs_url(link): unpack_vcs_link(link, location) elif is_file_url(link): unpack_file_url(link, location, download_dir, hashes=hashes) else: if (session is None): session = PipSession() unpack_http_url(link, location, download_dir, session, hashes=hashes) if only_download: write_delete_marker_file(location)
[ "def", "unpack_url", "(", "link", ",", "location", ",", "download_dir", "=", "None", ",", "only_download", "=", "False", ",", "session", "=", "None", ",", "hashes", "=", "None", ")", ":", "if", "is_vcs_url", "(", "link", ")", ":", "unpack_vcs_link", "(", "link", ",", "location", ")", "elif", "is_file_url", "(", "link", ")", ":", "unpack_file_url", "(", "link", ",", "location", ",", "download_dir", ",", "hashes", "=", "hashes", ")", "else", ":", "if", "(", "session", "is", "None", ")", ":", "session", "=", "PipSession", "(", ")", "unpack_http_url", "(", "link", ",", "location", ",", "download_dir", ",", "session", ",", "hashes", "=", "hashes", ")", "if", "only_download", ":", "write_delete_marker_file", "(", "location", ")" ]
unpack link .
train
true
49,110
def _skip_create_test_db(self, verbosity=1, autoclobber=False, serialize=True): if callable(getattr(self.connection.features, 'confirm', None)): self.connection.features.confirm() elif hasattr(self, '_rollback_works'): can_rollback = self._rollback_works() self.connection.settings_dict['SUPPORTS_TRANSACTIONS'] = can_rollback return self._get_test_db_name()
[ "def", "_skip_create_test_db", "(", "self", ",", "verbosity", "=", "1", ",", "autoclobber", "=", "False", ",", "serialize", "=", "True", ")", ":", "if", "callable", "(", "getattr", "(", "self", ".", "connection", ".", "features", ",", "'confirm'", ",", "None", ")", ")", ":", "self", ".", "connection", ".", "features", ".", "confirm", "(", ")", "elif", "hasattr", "(", "self", ",", "'_rollback_works'", ")", ":", "can_rollback", "=", "self", ".", "_rollback_works", "(", ")", "self", ".", "connection", ".", "settings_dict", "[", "'SUPPORTS_TRANSACTIONS'", "]", "=", "can_rollback", "return", "self", ".", "_get_test_db_name", "(", ")" ]
create_test_db implementation that skips both creation and flushing .
train
false
49,111
def validate_database_path(database): p = os.path.abspath(database) if (not os.path.exists(p)): raise ValueError('No such file or directory') if os.path.isfile(p): p = os.path.dirname(p) if (not os.path.isdir(p)): raise ValueError('Not a directory') return p
[ "def", "validate_database_path", "(", "database", ")", ":", "p", "=", "os", ".", "path", ".", "abspath", "(", "database", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ")", ":", "raise", "ValueError", "(", "'No such file or directory'", ")", "if", "os", ".", "path", ".", "isfile", "(", "p", ")", ":", "p", "=", "os", ".", "path", ".", "dirname", "(", "p", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "p", ")", ")", ":", "raise", "ValueError", "(", "'Not a directory'", ")", "return", "p" ]
returns a valid database path throws valueerrors .
train
false
49,112
def qt_plugins_dir(namespace): if (namespace not in ['PyQt4', 'PyQt5', 'PySide']): raise Exception('Invalid namespace: {0}'.format(namespace)) paths = eval_statement('\n from {0}.QtCore import QCoreApplication;\n app = QCoreApplication([]);\n # For Python 2 print would give <PyQt4.QtCore.QStringList\n # object at 0x....>", so we need to convert each element separately\n str = getattr(__builtins__, \'unicode\', str); # for Python 2\n print([str(p) for p in app.libraryPaths()])\n '.format(namespace)) if (not paths): raise Exception('Cannot find {0} plugin directories'.format(namespace)) else: valid_paths = [] for path in paths: if os.path.isdir(path): valid_paths.append(str(path)) qt_plugin_paths = valid_paths if (not qt_plugin_paths): raise Exception('\n Cannot find existing {0} plugin directories\n Paths checked: {1}\n '.format(namespace, ', '.join(paths))) return qt_plugin_paths
[ "def", "qt_plugins_dir", "(", "namespace", ")", ":", "if", "(", "namespace", "not", "in", "[", "'PyQt4'", ",", "'PyQt5'", ",", "'PySide'", "]", ")", ":", "raise", "Exception", "(", "'Invalid namespace: {0}'", ".", "format", "(", "namespace", ")", ")", "paths", "=", "eval_statement", "(", "'\\n from {0}.QtCore import QCoreApplication;\\n app = QCoreApplication([]);\\n # For Python 2 print would give <PyQt4.QtCore.QStringList\\n # object at 0x....>\", so we need to convert each element separately\\n str = getattr(__builtins__, \\'unicode\\', str); # for Python 2\\n print([str(p) for p in app.libraryPaths()])\\n '", ".", "format", "(", "namespace", ")", ")", "if", "(", "not", "paths", ")", ":", "raise", "Exception", "(", "'Cannot find {0} plugin directories'", ".", "format", "(", "namespace", ")", ")", "else", ":", "valid_paths", "=", "[", "]", "for", "path", "in", "paths", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "valid_paths", ".", "append", "(", "str", "(", "path", ")", ")", "qt_plugin_paths", "=", "valid_paths", "if", "(", "not", "qt_plugin_paths", ")", ":", "raise", "Exception", "(", "'\\n Cannot find existing {0} plugin directories\\n Paths checked: {1}\\n '", ".", "format", "(", "namespace", ",", "', '", ".", "join", "(", "paths", ")", ")", ")", "return", "qt_plugin_paths" ]
return list of paths searched for plugins .
train
false
49,114
def update_ref(refname, newval, oldval, repo_dir=None): if (not oldval): oldval = '' assert (refname.startswith('refs/heads/') or refname.startswith('refs/tags/')) p = subprocess.Popen(['git', 'update-ref', refname, newval.encode('hex'), oldval.encode('hex')], preexec_fn=_gitenv(repo_dir)) _git_wait('git update-ref', p)
[ "def", "update_ref", "(", "refname", ",", "newval", ",", "oldval", ",", "repo_dir", "=", "None", ")", ":", "if", "(", "not", "oldval", ")", ":", "oldval", "=", "''", "assert", "(", "refname", ".", "startswith", "(", "'refs/heads/'", ")", "or", "refname", ".", "startswith", "(", "'refs/tags/'", ")", ")", "p", "=", "subprocess", ".", "Popen", "(", "[", "'git'", ",", "'update-ref'", ",", "refname", ",", "newval", ".", "encode", "(", "'hex'", ")", ",", "oldval", ".", "encode", "(", "'hex'", ")", "]", ",", "preexec_fn", "=", "_gitenv", "(", "repo_dir", ")", ")", "_git_wait", "(", "'git update-ref'", ",", "p", ")" ]
update a repository reference .
train
false
49,115
def get_layout(name, *args, **kwargs): if (name not in _layout_map): raise KeyError(("Graph layout '%s' not found. Should be one of %s" % (name, AVAILABLE_LAYOUTS))) layout = _layout_map[name] if inspect.isclass(layout): layout = layout(*args, **kwargs) return layout
[ "def", "get_layout", "(", "name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "name", "not", "in", "_layout_map", ")", ":", "raise", "KeyError", "(", "(", "\"Graph layout '%s' not found. Should be one of %s\"", "%", "(", "name", ",", "AVAILABLE_LAYOUTS", ")", ")", ")", "layout", "=", "_layout_map", "[", "name", "]", "if", "inspect", ".", "isclass", "(", "layout", ")", ":", "layout", "=", "layout", "(", "*", "args", ",", "**", "kwargs", ")", "return", "layout" ]
retrieve a graph layout some graph layouts accept extra options .
train
true
49,116
def _check_init(path, mod_path): for part in mod_path: path = os.path.join(path, part) if (not _has_init(path)): return False return True
[ "def", "_check_init", "(", "path", ",", "mod_path", ")", ":", "for", "part", "in", "mod_path", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "part", ")", "if", "(", "not", "_has_init", "(", "path", ")", ")", ":", "return", "False", "return", "True" ]
check there are some __init__ .
train
false
49,117
def _get_dacl(path, objectType): try: dacl = win32security.GetNamedSecurityInfo(path, objectType, win32security.DACL_SECURITY_INFORMATION).GetSecurityDescriptorDacl() except Exception: dacl = None return dacl
[ "def", "_get_dacl", "(", "path", ",", "objectType", ")", ":", "try", ":", "dacl", "=", "win32security", ".", "GetNamedSecurityInfo", "(", "path", ",", "objectType", ",", "win32security", ".", "DACL_SECURITY_INFORMATION", ")", ".", "GetSecurityDescriptorDacl", "(", ")", "except", "Exception", ":", "dacl", "=", "None", "return", "dacl" ]
gets the dacl of a path .
train
true
49,118
def _init_caffe(cfg): import caffe np.random.seed(cfg.RNG_SEED) caffe.set_random_seed(cfg.RNG_SEED) caffe.set_mode_gpu() caffe.set_device(cfg.GPU_ID)
[ "def", "_init_caffe", "(", "cfg", ")", ":", "import", "caffe", "np", ".", "random", ".", "seed", "(", "cfg", ".", "RNG_SEED", ")", "caffe", ".", "set_random_seed", "(", "cfg", ".", "RNG_SEED", ")", "caffe", ".", "set_mode_gpu", "(", ")", "caffe", ".", "set_device", "(", "cfg", ".", "GPU_ID", ")" ]
initialize pycaffe in a training process .
train
false
49,119
def _attempt_id_to_task_id(attempt_id): return ('task_' + '_'.join(attempt_id.split('_')[1:5]))
[ "def", "_attempt_id_to_task_id", "(", "attempt_id", ")", ":", "return", "(", "'task_'", "+", "'_'", ".", "join", "(", "attempt_id", ".", "split", "(", "'_'", ")", "[", "1", ":", "5", "]", ")", ")" ]
convert e .
train
false
49,121
def eq_strict(a, b): if (type(a) is type(b)): return (a == b) return False
[ "def", "eq_strict", "(", "a", ",", "b", ")", ":", "if", "(", "type", "(", "a", ")", "is", "type", "(", "b", ")", ")", ":", "return", "(", "a", "==", "b", ")", "return", "False" ]
returns true if both values have the same type and are equal .
train
false
49,122
def _asarray_validated(a, check_finite=True, sparse_ok=False, objects_ok=False, mask_ok=False, as_inexact=False): if (not sparse_ok): import scipy.sparse if scipy.sparse.issparse(a): msg = 'Sparse matrices are not supported by this function. Perhaps one of the scipy.sparse.linalg functions would work instead.' raise ValueError(msg) if (not mask_ok): if np.ma.isMaskedArray(a): raise ValueError('masked arrays are not supported') toarray = (np.asarray_chkfinite if check_finite else np.asarray) a = toarray(a) if (not objects_ok): if (a.dtype is np.dtype('O')): raise ValueError('object arrays are not supported') if as_inexact: if (not np.issubdtype(a.dtype, np.inexact)): a = toarray(a, dtype=np.float_) return a
[ "def", "_asarray_validated", "(", "a", ",", "check_finite", "=", "True", ",", "sparse_ok", "=", "False", ",", "objects_ok", "=", "False", ",", "mask_ok", "=", "False", ",", "as_inexact", "=", "False", ")", ":", "if", "(", "not", "sparse_ok", ")", ":", "import", "scipy", ".", "sparse", "if", "scipy", ".", "sparse", ".", "issparse", "(", "a", ")", ":", "msg", "=", "'Sparse matrices are not supported by this function. Perhaps one of the scipy.sparse.linalg functions would work instead.'", "raise", "ValueError", "(", "msg", ")", "if", "(", "not", "mask_ok", ")", ":", "if", "np", ".", "ma", ".", "isMaskedArray", "(", "a", ")", ":", "raise", "ValueError", "(", "'masked arrays are not supported'", ")", "toarray", "=", "(", "np", ".", "asarray_chkfinite", "if", "check_finite", "else", "np", ".", "asarray", ")", "a", "=", "toarray", "(", "a", ")", "if", "(", "not", "objects_ok", ")", ":", "if", "(", "a", ".", "dtype", "is", "np", ".", "dtype", "(", "'O'", ")", ")", ":", "raise", "ValueError", "(", "'object arrays are not supported'", ")", "if", "as_inexact", ":", "if", "(", "not", "np", ".", "issubdtype", "(", "a", ".", "dtype", ",", "np", ".", "inexact", ")", ")", ":", "a", "=", "toarray", "(", "a", ",", "dtype", "=", "np", ".", "float_", ")", "return", "a" ]
helper function for scipy argument validation .
train
false
49,123
def _context(response): return {u'request': response.wsgi_request}
[ "def", "_context", "(", "response", ")", ":", "return", "{", "u'request'", ":", "response", ".", "wsgi_request", "}" ]
get a context dictionary for a serializer appropriate for the given response .
train
false
49,124
def sonar(registry, xml_parent, data): sonar = XML.SubElement(xml_parent, 'hudson.plugins.sonar.SonarPublisher') sonar.set('plugin', 'sonar') if ('jdk' in data): XML.SubElement(sonar, 'jdk').text = data['jdk'] mappings = [('branch', 'branch', ''), ('language', 'language', ''), ('root-pom', 'rootPom', 'pom.xml'), ('private-maven-repo', 'usePrivateRepository', False), ('maven-opts', 'mavenOpts', ''), ('additional-properties', 'jobAdditionalProperties', '')] helpers.convert_mapping_to_xml(sonar, data, mappings, fail_required=True) if ('skip-global-triggers' in data): data_triggers = data['skip-global-triggers'] triggers = XML.SubElement(sonar, 'triggers') triggers_mappings = [('skip-when-scm-change', 'skipScmCause', False), ('skip-when-upstream-build', 'skipUpstreamCause', False), ('skip-when-envvar-defined', 'envVar', '')] helpers.convert_mapping_to_xml(triggers, data_triggers, triggers_mappings, fail_required=True) helpers.config_file_provider_settings(sonar, data)
[ "def", "sonar", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "sonar", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.sonar.SonarPublisher'", ")", "sonar", ".", "set", "(", "'plugin'", ",", "'sonar'", ")", "if", "(", "'jdk'", "in", "data", ")", ":", "XML", ".", "SubElement", "(", "sonar", ",", "'jdk'", ")", ".", "text", "=", "data", "[", "'jdk'", "]", "mappings", "=", "[", "(", "'branch'", ",", "'branch'", ",", "''", ")", ",", "(", "'language'", ",", "'language'", ",", "''", ")", ",", "(", "'root-pom'", ",", "'rootPom'", ",", "'pom.xml'", ")", ",", "(", "'private-maven-repo'", ",", "'usePrivateRepository'", ",", "False", ")", ",", "(", "'maven-opts'", ",", "'mavenOpts'", ",", "''", ")", ",", "(", "'additional-properties'", ",", "'jobAdditionalProperties'", ",", "''", ")", "]", "helpers", ".", "convert_mapping_to_xml", "(", "sonar", ",", "data", ",", "mappings", ",", "fail_required", "=", "True", ")", "if", "(", "'skip-global-triggers'", "in", "data", ")", ":", "data_triggers", "=", "data", "[", "'skip-global-triggers'", "]", "triggers", "=", "XML", ".", "SubElement", "(", "sonar", ",", "'triggers'", ")", "triggers_mappings", "=", "[", "(", "'skip-when-scm-change'", ",", "'skipScmCause'", ",", "False", ")", ",", "(", "'skip-when-upstream-build'", ",", "'skipUpstreamCause'", ",", "False", ")", ",", "(", "'skip-when-envvar-defined'", ",", "'envVar'", ",", "''", ")", "]", "helpers", ".", "convert_mapping_to_xml", "(", "triggers", ",", "data_triggers", ",", "triggers_mappings", ",", "fail_required", "=", "True", ")", "helpers", ".", "config_file_provider_settings", "(", "sonar", ",", "data", ")" ]
yaml: sonar wrapper for sonarqube plugin requires :jenkins-wiki:sonarqube plugin <sonarqube+plugin> :arg str install-name: release goals and options minimal example: .
train
false
49,125
@pytest.fixture def session_manager_stub(stubs): stub = stubs.SessionManagerStub() objreg.register('session-manager', stub) (yield stub) objreg.delete('session-manager')
[ "@", "pytest", ".", "fixture", "def", "session_manager_stub", "(", "stubs", ")", ":", "stub", "=", "stubs", ".", "SessionManagerStub", "(", ")", "objreg", ".", "register", "(", "'session-manager'", ",", "stub", ")", "(", "yield", "stub", ")", "objreg", ".", "delete", "(", "'session-manager'", ")" ]
fixture which provides a fake web-history object .
train
false
49,129
def negotiate_locale_name(request): try: registry = request.registry except AttributeError: registry = get_current_registry() negotiator = registry.queryUtility(ILocaleNegotiator, default=default_locale_negotiator) locale_name = negotiator(request) if (locale_name is None): settings = (registry.settings or {}) locale_name = settings.get('default_locale_name', 'en') return locale_name
[ "def", "negotiate_locale_name", "(", "request", ")", ":", "try", ":", "registry", "=", "request", ".", "registry", "except", "AttributeError", ":", "registry", "=", "get_current_registry", "(", ")", "negotiator", "=", "registry", ".", "queryUtility", "(", "ILocaleNegotiator", ",", "default", "=", "default_locale_negotiator", ")", "locale_name", "=", "negotiator", "(", "request", ")", "if", "(", "locale_name", "is", "None", ")", ":", "settings", "=", "(", "registry", ".", "settings", "or", "{", "}", ")", "locale_name", "=", "settings", ".", "get", "(", "'default_locale_name'", ",", "'en'", ")", "return", "locale_name" ]
negotiate and return the :term:locale name associated with the current request .
train
false
49,130
def _random_salt(): salt_chars = _gen_candidate_chars(['ascii_letters', 'digits', './']) return _random_password(length=8, chars=salt_chars)
[ "def", "_random_salt", "(", ")", ":", "salt_chars", "=", "_gen_candidate_chars", "(", "[", "'ascii_letters'", ",", "'digits'", ",", "'./'", "]", ")", "return", "_random_password", "(", "length", "=", "8", ",", "chars", "=", "salt_chars", ")" ]
return a text string suitable for use as a salt for the hash functions we use to encrypt passwords .
train
false
49,131
def _download_diff_file(modified, request, review_request_id, revision, filediff_id, local_site=None): (review_request, response) = _find_review_request(request, review_request_id, local_site) if (not review_request): return response draft = review_request.get_draft(request.user) diffset = _query_for_diff(review_request, request.user, revision, draft) filediff = get_object_or_404(diffset.files, pk=filediff_id) encoding_list = diffset.repository.get_encoding_list() try: data = get_original_file(filediff, request, encoding_list) except FileNotFoundError: logging.exception(u'Could not retrieve file "%s" (revision %s) for filediff ID %s', filediff.dest_detail, revision, filediff_id) raise Http404 if modified: data = get_patched_file(data, filediff, request) data = convert_to_unicode(data, encoding_list)[1] return HttpResponse(data, content_type=u'text/plain; charset=utf-8')
[ "def", "_download_diff_file", "(", "modified", ",", "request", ",", "review_request_id", ",", "revision", ",", "filediff_id", ",", "local_site", "=", "None", ")", ":", "(", "review_request", ",", "response", ")", "=", "_find_review_request", "(", "request", ",", "review_request_id", ",", "local_site", ")", "if", "(", "not", "review_request", ")", ":", "return", "response", "draft", "=", "review_request", ".", "get_draft", "(", "request", ".", "user", ")", "diffset", "=", "_query_for_diff", "(", "review_request", ",", "request", ".", "user", ",", "revision", ",", "draft", ")", "filediff", "=", "get_object_or_404", "(", "diffset", ".", "files", ",", "pk", "=", "filediff_id", ")", "encoding_list", "=", "diffset", ".", "repository", ".", "get_encoding_list", "(", ")", "try", ":", "data", "=", "get_original_file", "(", "filediff", ",", "request", ",", "encoding_list", ")", "except", "FileNotFoundError", ":", "logging", ".", "exception", "(", "u'Could not retrieve file \"%s\" (revision %s) for filediff ID %s'", ",", "filediff", ".", "dest_detail", ",", "revision", ",", "filediff_id", ")", "raise", "Http404", "if", "modified", ":", "data", "=", "get_patched_file", "(", "data", ",", "filediff", ",", "request", ")", "data", "=", "convert_to_unicode", "(", "data", ",", "encoding_list", ")", "[", "1", "]", "return", "HttpResponse", "(", "data", ",", "content_type", "=", "u'text/plain; charset=utf-8'", ")" ]
downloads an original or modified file from a diff .
train
false
49,132
def asksaveasfile(mode='w', **options): filename = SaveAs(**options).show() if filename: return open(filename, mode) return None
[ "def", "asksaveasfile", "(", "mode", "=", "'w'", ",", "**", "options", ")", ":", "filename", "=", "SaveAs", "(", "**", "options", ")", ".", "show", "(", ")", "if", "filename", ":", "return", "open", "(", "filename", ",", "mode", ")", "return", "None" ]
ask for a filename to save as .
train
false
49,133
def register_database(name, clazz): _databases[name] = clazz
[ "def", "register_database", "(", "name", ",", "clazz", ")", ":", "_databases", "[", "name", "]", "=", "clazz" ]
register a database .
train
false
49,136
def num2num(num): if (isinstance(num, LongType) and (num < sys.maxint)): return int(num) else: return num
[ "def", "num2num", "(", "num", ")", ":", "if", "(", "isinstance", "(", "num", ",", "LongType", ")", "and", "(", "num", "<", "sys", ".", "maxint", ")", ")", ":", "return", "int", "(", "num", ")", "else", ":", "return", "num" ]
converts long to int if small enough to fit .
train
false
49,140
def _hv_switch(op, expected_function): class XStackTester(_HVStackTester, ): op_class = op def expected_f(self, a, format=None, dtype=None): return expected_function(a, format, dtype) XStackTester.__name__ = (op.__name__ + 'Tester') if hasattr(XStackTester, '__qualname__'): XStackTester.__qualname__ = XStackTester.__name__ return XStackTester
[ "def", "_hv_switch", "(", "op", ",", "expected_function", ")", ":", "class", "XStackTester", "(", "_HVStackTester", ",", ")", ":", "op_class", "=", "op", "def", "expected_f", "(", "self", ",", "a", ",", "format", "=", "None", ",", "dtype", "=", "None", ")", ":", "return", "expected_function", "(", "a", ",", "format", ",", "dtype", ")", "XStackTester", ".", "__name__", "=", "(", "op", ".", "__name__", "+", "'Tester'", ")", "if", "hasattr", "(", "XStackTester", ",", "'__qualname__'", ")", ":", "XStackTester", ".", "__qualname__", "=", "XStackTester", ".", "__name__", "return", "XStackTester" ]
return the right test class for hstack or vstack .
train
false
49,141
def lookupEncoding(encoding): if isinstance(encoding, binary_type): try: encoding = encoding.decode(u'ascii') except UnicodeDecodeError: return None if (encoding is not None): try: return webencodings.lookup(encoding) except AttributeError: return None else: return None
[ "def", "lookupEncoding", "(", "encoding", ")", ":", "if", "isinstance", "(", "encoding", ",", "binary_type", ")", ":", "try", ":", "encoding", "=", "encoding", ".", "decode", "(", "u'ascii'", ")", "except", "UnicodeDecodeError", ":", "return", "None", "if", "(", "encoding", "is", "not", "None", ")", ":", "try", ":", "return", "webencodings", ".", "lookup", "(", "encoding", ")", "except", "AttributeError", ":", "return", "None", "else", ":", "return", "None" ]
return the python codec name corresponding to an encoding or none if the string doesnt correspond to a valid encoding .
train
true
49,143
@should_profile_core def stop_core_profiling(w3af_core): cancel_thread(SAVE_THREAD_PTR) dump_data(w3af_core)
[ "@", "should_profile_core", "def", "stop_core_profiling", "(", "w3af_core", ")", ":", "cancel_thread", "(", "SAVE_THREAD_PTR", ")", "dump_data", "(", "w3af_core", ")" ]
save profiling information .
train
false
49,146
def prepare_topics(weights, factors, word_vectors, vocab, temperature=1.0, doc_lengths=None, term_frequency=None, normalize=False): topic_to_word = [] msg = 'Vocabulary size did not match size of word vectors' assert (len(vocab) == word_vectors.shape[0]), msg if normalize: word_vectors /= np.linalg.norm(word_vectors, axis=1)[:, None] for factor_vector in factors: factor_to_word = prob_words(factor_vector, word_vectors, temperature=temperature) topic_to_word.append(np.ravel(factor_to_word)) topic_to_word = np.array(topic_to_word) msg = 'Not all rows in topic_to_word sum to 1' assert np.allclose(np.sum(topic_to_word, axis=1), 1), msg doc_to_topic = _softmax_2d(weights) msg = 'Not all rows in doc_to_topic sum to 1' assert np.allclose(np.sum(doc_to_topic, axis=1), 1), msg data = {'topic_term_dists': topic_to_word, 'doc_topic_dists': doc_to_topic, 'doc_lengths': doc_lengths, 'vocab': vocab, 'term_frequency': term_frequency} return data
[ "def", "prepare_topics", "(", "weights", ",", "factors", ",", "word_vectors", ",", "vocab", ",", "temperature", "=", "1.0", ",", "doc_lengths", "=", "None", ",", "term_frequency", "=", "None", ",", "normalize", "=", "False", ")", ":", "topic_to_word", "=", "[", "]", "msg", "=", "'Vocabulary size did not match size of word vectors'", "assert", "(", "len", "(", "vocab", ")", "==", "word_vectors", ".", "shape", "[", "0", "]", ")", ",", "msg", "if", "normalize", ":", "word_vectors", "/=", "np", ".", "linalg", ".", "norm", "(", "word_vectors", ",", "axis", "=", "1", ")", "[", ":", ",", "None", "]", "for", "factor_vector", "in", "factors", ":", "factor_to_word", "=", "prob_words", "(", "factor_vector", ",", "word_vectors", ",", "temperature", "=", "temperature", ")", "topic_to_word", ".", "append", "(", "np", ".", "ravel", "(", "factor_to_word", ")", ")", "topic_to_word", "=", "np", ".", "array", "(", "topic_to_word", ")", "msg", "=", "'Not all rows in topic_to_word sum to 1'", "assert", "np", ".", "allclose", "(", "np", ".", "sum", "(", "topic_to_word", ",", "axis", "=", "1", ")", ",", "1", ")", ",", "msg", "doc_to_topic", "=", "_softmax_2d", "(", "weights", ")", "msg", "=", "'Not all rows in doc_to_topic sum to 1'", "assert", "np", ".", "allclose", "(", "np", ".", "sum", "(", "doc_to_topic", ",", "axis", "=", "1", ")", ",", "1", ")", ",", "msg", "data", "=", "{", "'topic_term_dists'", ":", "topic_to_word", ",", "'doc_topic_dists'", ":", "doc_to_topic", ",", "'doc_lengths'", ":", "doc_lengths", ",", "'vocab'", ":", "vocab", ",", "'term_frequency'", ":", "term_frequency", "}", "return", "data" ]
collects a dictionary of word .
train
false
49,148
def cnv_color(attribute, arg, element): return str(arg)
[ "def", "cnv_color", "(", "attribute", ",", "arg", ",", "element", ")", ":", "return", "str", "(", "arg", ")" ]
a rgb color in conformance with §5 .
train
false
49,149
def move_stored_file(src_path, dst_path, src_storage=private_storage, dst_storage=private_storage): copy_stored_file(src_path, dst_path, src_storage=src_storage, dst_storage=dst_storage) src_storage.delete(src_path)
[ "def", "move_stored_file", "(", "src_path", ",", "dst_path", ",", "src_storage", "=", "private_storage", ",", "dst_storage", "=", "private_storage", ")", ":", "copy_stored_file", "(", "src_path", ",", "dst_path", ",", "src_storage", "=", "src_storage", ",", "dst_storage", "=", "dst_storage", ")", "src_storage", ".", "delete", "(", "src_path", ")" ]
move a storage path to another storage path .
train
false
49,154
def mkl_threads_text(): header = '\n extern "C"\n {\n int MKL_Set_Num_Threads_Local(int);\n #define mkl_set_num_threads_local MKL_Set_Num_Threads_Local\n\n void MKL_Set_Num_Threads(int);\n #define mkl_set_num_threads MKL_Set_Num_Threads\n\n int MKL_Get_Max_Threads(void);\n #define mkl_get_max_threads MKL_Get_Max_Threads\n\n int MKL_Domain_Set_Num_Threads(int, int);\n #define mkl_domain_set_num_threads MKL_Domain_Set_Num_Threads\n\n int MKL_Domain_Get_Max_Threads(int);\n #define mkl_domain_get_max_threads MKL_Domain_Get_Max_Threads\n\n void MKL_Set_Dynamic(int);\n #define mkl_set_dynamic MKL_Set_Dynamic\n\n int MKL_Get_Dynamic(void);\n #define mkl_get_dynamic MKL_Get_Dynamic\n }\n ' return header
[ "def", "mkl_threads_text", "(", ")", ":", "header", "=", "'\\n extern \"C\"\\n {\\n int MKL_Set_Num_Threads_Local(int);\\n #define mkl_set_num_threads_local MKL_Set_Num_Threads_Local\\n\\n void MKL_Set_Num_Threads(int);\\n #define mkl_set_num_threads MKL_Set_Num_Threads\\n\\n int MKL_Get_Max_Threads(void);\\n #define mkl_get_max_threads MKL_Get_Max_Threads\\n\\n int MKL_Domain_Set_Num_Threads(int, int);\\n #define mkl_domain_set_num_threads MKL_Domain_Set_Num_Threads\\n\\n int MKL_Domain_Get_Max_Threads(int);\\n #define mkl_domain_get_max_threads MKL_Domain_Get_Max_Threads\\n\\n void MKL_Set_Dynamic(int);\\n #define mkl_set_dynamic MKL_Set_Dynamic\\n\\n int MKL_Get_Dynamic(void);\\n #define mkl_get_dynamic MKL_Get_Dynamic\\n }\\n '", "return", "header" ]
c header for mkl threads interface .
train
false
49,155
def file_path_finder(path): return finders.FileSystemFinder().find(path)
[ "def", "file_path_finder", "(", "path", ")", ":", "return", "finders", ".", "FileSystemFinder", "(", ")", ".", "find", "(", "path", ")" ]
return physical path of file if found .
train
false
49,156
@pytest.mark.parametrize('url, host, query', [('testfoo', 'www.example.com', 'q=testfoo'), ('test testfoo', 'www.qutebrowser.org', 'q=testfoo'), ('test testfoo bar foo', 'www.qutebrowser.org', 'q=testfoo bar foo'), ('test testfoo ', 'www.qutebrowser.org', 'q=testfoo'), ('!python testfoo', 'www.example.com', 'q=%21python testfoo'), ('blub testfoo', 'www.example.com', 'q=blub testfoo'), ('stripped ', 'www.example.com', 'q=stripped'), ('test-with-dash testfoo', 'www.example.org', 'q=testfoo')]) def test_get_search_url(urlutils_config_stub, url, host, query): url = urlutils._get_search_url(url) assert (url.host() == host) assert (url.query() == query)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'url, host, query'", ",", "[", "(", "'testfoo'", ",", "'www.example.com'", ",", "'q=testfoo'", ")", ",", "(", "'test testfoo'", ",", "'www.qutebrowser.org'", ",", "'q=testfoo'", ")", ",", "(", "'test testfoo bar foo'", ",", "'www.qutebrowser.org'", ",", "'q=testfoo bar foo'", ")", ",", "(", "'test testfoo '", ",", "'www.qutebrowser.org'", ",", "'q=testfoo'", ")", ",", "(", "'!python testfoo'", ",", "'www.example.com'", ",", "'q=%21python testfoo'", ")", ",", "(", "'blub testfoo'", ",", "'www.example.com'", ",", "'q=blub testfoo'", ")", ",", "(", "'stripped '", ",", "'www.example.com'", ",", "'q=stripped'", ")", ",", "(", "'test-with-dash testfoo'", ",", "'www.example.org'", ",", "'q=testfoo'", ")", "]", ")", "def", "test_get_search_url", "(", "urlutils_config_stub", ",", "url", ",", "host", ",", "query", ")", ":", "url", "=", "urlutils", ".", "_get_search_url", "(", "url", ")", "assert", "(", "url", ".", "host", "(", ")", "==", "host", ")", "assert", "(", "url", ".", "query", "(", ")", "==", "query", ")" ]
test _get_search_url() .
train
false
49,157
def add_p3p_header(view_func): @wraps(view_func) def inner(request, *args, **kwargs): '\n Helper function\n ' response = view_func(request, *args, **kwargs) response['P3P'] = settings.P3P_HEADER return response return inner
[ "def", "add_p3p_header", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ")", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "response", "[", "'P3P'", "]", "=", "settings", ".", "P3P_HEADER", "return", "response", "return", "inner" ]
this decorator should only be used with views which may be displayed through the iframe .
train
false
49,158
def make_fig(): fig = Figure() ax = fig.add_subplot(1, 1, 1) ax.plot([1, 2, 3], 'ro--', markersize=12, markerfacecolor='g') x = np.random.rand(100) y = np.random.rand(100) area = (np.pi * ((10 * np.random.rand(100)) ** 2)) c = ax.scatter(x, y, area) c.set_alpha(0.5) ax.set_title('My first image') ax.set_ylabel('Some numbers') ax.set_xticks((0.2, 0.4, 0.6, 0.8)) labels = ax.set_xticklabels(('Bill', 'Fred', 'Ted', 'Ed')) for label in labels: label.set_rotation(45) label.set_fontsize(12) FigureCanvasAgg(fig).print_png('webapp.png', dpi=150)
[ "def", "make_fig", "(", ")", ":", "fig", "=", "Figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "1", ",", "1", ",", "1", ")", "ax", ".", "plot", "(", "[", "1", ",", "2", ",", "3", "]", ",", "'ro--'", ",", "markersize", "=", "12", ",", "markerfacecolor", "=", "'g'", ")", "x", "=", "np", ".", "random", ".", "rand", "(", "100", ")", "y", "=", "np", ".", "random", ".", "rand", "(", "100", ")", "area", "=", "(", "np", ".", "pi", "*", "(", "(", "10", "*", "np", ".", "random", ".", "rand", "(", "100", ")", ")", "**", "2", ")", ")", "c", "=", "ax", ".", "scatter", "(", "x", ",", "y", ",", "area", ")", "c", ".", "set_alpha", "(", "0.5", ")", "ax", ".", "set_title", "(", "'My first image'", ")", "ax", ".", "set_ylabel", "(", "'Some numbers'", ")", "ax", ".", "set_xticks", "(", "(", "0.2", ",", "0.4", ",", "0.6", ",", "0.8", ")", ")", "labels", "=", "ax", ".", "set_xticklabels", "(", "(", "'Bill'", ",", "'Fred'", ",", "'Ted'", ",", "'Ed'", ")", ")", "for", "label", "in", "labels", ":", "label", ".", "set_rotation", "(", "45", ")", "label", ".", "set_fontsize", "(", "12", ")", "FigureCanvasAgg", "(", "fig", ")", ".", "print_png", "(", "'webapp.png'", ",", "dpi", "=", "150", ")" ]
make a figure and save it to "webagg .
train
false
49,161
def is_strong_lucas_prp(n): from sympy.ntheory.factor_ import trailing n = as_int(n) if (n == 2): return True if ((n < 2) or ((n % 2) == 0)): return False if is_square(n, False): return False (D, P, Q) = _lucas_selfridge_params(n) if (D == 0): return False s = trailing((n + 1)) k = ((n + 1) >> s) (U, V, Qk) = _lucas_sequence(n, P, Q, k) if ((U == 0) or (V == 0)): return True for r in range(1, s): V = (((V * V) - (2 * Qk)) % n) if (V == 0): return True Qk = pow(Qk, 2, n) return False
[ "def", "is_strong_lucas_prp", "(", "n", ")", ":", "from", "sympy", ".", "ntheory", ".", "factor_", "import", "trailing", "n", "=", "as_int", "(", "n", ")", "if", "(", "n", "==", "2", ")", ":", "return", "True", "if", "(", "(", "n", "<", "2", ")", "or", "(", "(", "n", "%", "2", ")", "==", "0", ")", ")", ":", "return", "False", "if", "is_square", "(", "n", ",", "False", ")", ":", "return", "False", "(", "D", ",", "P", ",", "Q", ")", "=", "_lucas_selfridge_params", "(", "n", ")", "if", "(", "D", "==", "0", ")", ":", "return", "False", "s", "=", "trailing", "(", "(", "n", "+", "1", ")", ")", "k", "=", "(", "(", "n", "+", "1", ")", ">>", "s", ")", "(", "U", ",", "V", ",", "Qk", ")", "=", "_lucas_sequence", "(", "n", ",", "P", ",", "Q", ",", "k", ")", "if", "(", "(", "U", "==", "0", ")", "or", "(", "V", "==", "0", ")", ")", ":", "return", "True", "for", "r", "in", "range", "(", "1", ",", "s", ")", ":", "V", "=", "(", "(", "(", "V", "*", "V", ")", "-", "(", "2", "*", "Qk", ")", ")", "%", "n", ")", "if", "(", "V", "==", "0", ")", ":", "return", "True", "Qk", "=", "pow", "(", "Qk", ",", "2", ",", "n", ")", "return", "False" ]
strong lucas compositeness test with selfridge parameters .
train
false
49,162
def make_hashable(data): return freezedicts(data)
[ "def", "make_hashable", "(", "data", ")", ":", "return", "freezedicts", "(", "data", ")" ]
ensures data can be hashed() .
train
false
49,163
def zfs_command(reactor, arguments): endpoint = ProcessEndpoint(reactor, 'zfs', (['zfs'] + arguments), os.environ) d = connectProtocol(endpoint, _AccumulatingProtocol()) d.addCallback((lambda protocol: protocol._result)) return d
[ "def", "zfs_command", "(", "reactor", ",", "arguments", ")", ":", "endpoint", "=", "ProcessEndpoint", "(", "reactor", ",", "'zfs'", ",", "(", "[", "'zfs'", "]", "+", "arguments", ")", ",", "os", ".", "environ", ")", "d", "=", "connectProtocol", "(", "endpoint", ",", "_AccumulatingProtocol", "(", ")", ")", "d", ".", "addCallback", "(", "(", "lambda", "protocol", ":", "protocol", ".", "_result", ")", ")", "return", "d" ]
asynchronously run the zfs command-line tool with the given arguments .
train
false
49,164
def parseRequest_pty_req(data): (term, rest) = common.getNS(data) (cols, rows, xpixel, ypixel) = struct.unpack('>4L', rest[:16]) (modes, ignored) = common.getNS(rest[16:]) winSize = (rows, cols, xpixel, ypixel) modes = [(ord(modes[i:(i + 1)]), struct.unpack('>L', modes[(i + 1):(i + 5)])[0]) for i in range(0, (len(modes) - 1), 5)] return (term, winSize, modes)
[ "def", "parseRequest_pty_req", "(", "data", ")", ":", "(", "term", ",", "rest", ")", "=", "common", ".", "getNS", "(", "data", ")", "(", "cols", ",", "rows", ",", "xpixel", ",", "ypixel", ")", "=", "struct", ".", "unpack", "(", "'>4L'", ",", "rest", "[", ":", "16", "]", ")", "(", "modes", ",", "ignored", ")", "=", "common", ".", "getNS", "(", "rest", "[", "16", ":", "]", ")", "winSize", "=", "(", "rows", ",", "cols", ",", "xpixel", ",", "ypixel", ")", "modes", "=", "[", "(", "ord", "(", "modes", "[", "i", ":", "(", "i", "+", "1", ")", "]", ")", ",", "struct", ".", "unpack", "(", "'>L'", ",", "modes", "[", "(", "i", "+", "1", ")", ":", "(", "i", "+", "5", ")", "]", ")", "[", "0", "]", ")", "for", "i", "in", "range", "(", "0", ",", "(", "len", "(", "modes", ")", "-", "1", ")", ",", "5", ")", "]", "return", "(", "term", ",", "winSize", ",", "modes", ")" ]
parse the data from a pty-req request into usable data .
train
false
49,165
def is_rfc1918_ip(ip): if isinstance(ip, basestring): ip = _ip_to_number(ip) for (net, mask) in _nets: if ((ip & mask) == net): return True return False
[ "def", "is_rfc1918_ip", "(", "ip", ")", ":", "if", "isinstance", "(", "ip", ",", "basestring", ")", ":", "ip", "=", "_ip_to_number", "(", "ip", ")", "for", "(", "net", ",", "mask", ")", "in", "_nets", ":", "if", "(", "(", "ip", "&", "mask", ")", "==", "net", ")", ":", "return", "True", "return", "False" ]
checks if the given ip address is a rfc1918 one .
train
false
49,166
def read_compressed(path): with gzip.open(path, 'rb') as f: return f.read()
[ "def", "read_compressed", "(", "path", ")", ":", "with", "gzip", ".", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
write a compressed file from path .
train
false
49,167
def nested_to_record(ds, prefix='', level=0): singleton = False if isinstance(ds, dict): ds = [ds] singleton = True new_ds = [] for d in ds: new_d = copy.deepcopy(d) for (k, v) in d.items(): if (not isinstance(k, compat.string_types)): k = str(k) if (level == 0): newkey = k else: newkey = ((prefix + '.') + k) if (not isinstance(v, dict)): if (level != 0): v = new_d.pop(k) new_d[newkey] = v continue else: v = new_d.pop(k) new_d.update(nested_to_record(v, newkey, (level + 1))) new_ds.append(new_d) if singleton: return new_ds[0] return new_ds
[ "def", "nested_to_record", "(", "ds", ",", "prefix", "=", "''", ",", "level", "=", "0", ")", ":", "singleton", "=", "False", "if", "isinstance", "(", "ds", ",", "dict", ")", ":", "ds", "=", "[", "ds", "]", "singleton", "=", "True", "new_ds", "=", "[", "]", "for", "d", "in", "ds", ":", "new_d", "=", "copy", ".", "deepcopy", "(", "d", ")", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "if", "(", "not", "isinstance", "(", "k", ",", "compat", ".", "string_types", ")", ")", ":", "k", "=", "str", "(", "k", ")", "if", "(", "level", "==", "0", ")", ":", "newkey", "=", "k", "else", ":", "newkey", "=", "(", "(", "prefix", "+", "'.'", ")", "+", "k", ")", "if", "(", "not", "isinstance", "(", "v", ",", "dict", ")", ")", ":", "if", "(", "level", "!=", "0", ")", ":", "v", "=", "new_d", ".", "pop", "(", "k", ")", "new_d", "[", "newkey", "]", "=", "v", "continue", "else", ":", "v", "=", "new_d", ".", "pop", "(", "k", ")", "new_d", ".", "update", "(", "nested_to_record", "(", "v", ",", "newkey", ",", "(", "level", "+", "1", ")", ")", ")", "new_ds", ".", "append", "(", "new_d", ")", "if", "singleton", ":", "return", "new_ds", "[", "0", "]", "return", "new_ds" ]
a simplified json_normalize converts a nested dict into a flat dict .
train
true
49,168
@property def fake_vim_prop(arg): return fake.get_fake_vim_object(arg)
[ "@", "property", "def", "fake_vim_prop", "(", "arg", ")", ":", "return", "fake", ".", "get_fake_vim_object", "(", "arg", ")" ]
stubs out the vmwareapisessions vim property access method .
train
false
49,169
def _AddPhotoUrls(obj_store, ph_dict): ph_dict['tn_get_url'] = photo_store.GeneratePhotoUrl(obj_store, ph_dict['photo_id'], '.t') ph_dict['med_get_url'] = photo_store.GeneratePhotoUrl(obj_store, ph_dict['photo_id'], '.m') ph_dict['full_get_url'] = photo_store.GeneratePhotoUrl(obj_store, ph_dict['photo_id'], '.f') ph_dict['orig_get_url'] = photo_store.GeneratePhotoUrl(obj_store, ph_dict['photo_id'], '.o')
[ "def", "_AddPhotoUrls", "(", "obj_store", ",", "ph_dict", ")", ":", "ph_dict", "[", "'tn_get_url'", "]", "=", "photo_store", ".", "GeneratePhotoUrl", "(", "obj_store", ",", "ph_dict", "[", "'photo_id'", "]", ",", "'.t'", ")", "ph_dict", "[", "'med_get_url'", "]", "=", "photo_store", ".", "GeneratePhotoUrl", "(", "obj_store", ",", "ph_dict", "[", "'photo_id'", "]", ",", "'.m'", ")", "ph_dict", "[", "'full_get_url'", "]", "=", "photo_store", ".", "GeneratePhotoUrl", "(", "obj_store", ",", "ph_dict", "[", "'photo_id'", "]", ",", "'.f'", ")", "ph_dict", "[", "'orig_get_url'", "]", "=", "photo_store", ".", "GeneratePhotoUrl", "(", "obj_store", ",", "ph_dict", "[", "'photo_id'", "]", ",", "'.o'", ")" ]
adds photo urls to the photo dict for each photo size: original .
train
false
49,170
def unicode_to_ascii_authority(authority): labels = label_split_regex.split(authority) asciiLabels = [] try: import encodings.idna for label in labels: if label: asciiLabels.append(to_native(encodings.idna.ToASCII(label))) else: asciiLabels.append('') except: asciiLabels = [str(label) for label in labels] return str(reduce((lambda x, y: ((x + unichr(46)) + y)), asciiLabels))
[ "def", "unicode_to_ascii_authority", "(", "authority", ")", ":", "labels", "=", "label_split_regex", ".", "split", "(", "authority", ")", "asciiLabels", "=", "[", "]", "try", ":", "import", "encodings", ".", "idna", "for", "label", "in", "labels", ":", "if", "label", ":", "asciiLabels", ".", "append", "(", "to_native", "(", "encodings", ".", "idna", ".", "ToASCII", "(", "label", ")", ")", ")", "else", ":", "asciiLabels", ".", "append", "(", "''", ")", "except", ":", "asciiLabels", "=", "[", "str", "(", "label", ")", "for", "label", "in", "labels", "]", "return", "str", "(", "reduce", "(", "(", "lambda", "x", ",", "y", ":", "(", "(", "x", "+", "unichr", "(", "46", ")", ")", "+", "y", ")", ")", ",", "asciiLabels", ")", ")" ]
follows the steps in rfc 3490 .
train
false
49,172
def _update_users(users): return __salt__['users.set_users'](users, commit=False)
[ "def", "_update_users", "(", "users", ")", ":", "return", "__salt__", "[", "'users.set_users'", "]", "(", "users", ",", "commit", "=", "False", ")" ]
calls users .
train
false
49,173
def try_iter_content(types, depth=0): if (depth > 10): return for typ in types: try: f = typ.py__iter__ except AttributeError: pass else: for iter_types in f(): try_iter_content(iter_types, (depth + 1))
[ "def", "try_iter_content", "(", "types", ",", "depth", "=", "0", ")", ":", "if", "(", "depth", ">", "10", ")", ":", "return", "for", "typ", "in", "types", ":", "try", ":", "f", "=", "typ", ".", "py__iter__", "except", "AttributeError", ":", "pass", "else", ":", "for", "iter_types", "in", "f", "(", ")", ":", "try_iter_content", "(", "iter_types", ",", "(", "depth", "+", "1", ")", ")" ]
helper method for static analysis .
train
false
49,174
def IsMacBundle(flavor, spec): is_mac_bundle = ((int(spec.get('mac_xctest_bundle', 0)) != 0) or ((int(spec.get('mac_bundle', 0)) != 0) and (flavor == 'mac'))) if is_mac_bundle: assert (spec['type'] != 'none'), ('mac_bundle targets cannot have type none (target "%s")' % spec['target_name']) return is_mac_bundle
[ "def", "IsMacBundle", "(", "flavor", ",", "spec", ")", ":", "is_mac_bundle", "=", "(", "(", "int", "(", "spec", ".", "get", "(", "'mac_xctest_bundle'", ",", "0", ")", ")", "!=", "0", ")", "or", "(", "(", "int", "(", "spec", ".", "get", "(", "'mac_bundle'", ",", "0", ")", ")", "!=", "0", ")", "and", "(", "flavor", "==", "'mac'", ")", ")", ")", "if", "is_mac_bundle", ":", "assert", "(", "spec", "[", "'type'", "]", "!=", "'none'", ")", ",", "(", "'mac_bundle targets cannot have type none (target \"%s\")'", "%", "spec", "[", "'target_name'", "]", ")", "return", "is_mac_bundle" ]
returns if |spec| should be treated as a bundle .
train
false
49,175
def setnmaplimits(): for (limit, value) in NMAP_LIMITS.iteritems(): resource.setrlimit(limit, value)
[ "def", "setnmaplimits", "(", ")", ":", "for", "(", "limit", ",", "value", ")", "in", "NMAP_LIMITS", ".", "iteritems", "(", ")", ":", "resource", ".", "setrlimit", "(", "limit", ",", "value", ")" ]
enforces limits from nmap_limits global variable .
train
false
49,176
def user_id_or_name_exists(user_id_or_name, context): model = context['model'] session = context['session'] result = session.query(model.User).get(user_id_or_name) if result: return user_id_or_name result = session.query(model.User).filter_by(name=user_id_or_name).first() if (not result): raise Invalid(('%s: %s' % (_('Not found'), _('User')))) return user_id_or_name
[ "def", "user_id_or_name_exists", "(", "user_id_or_name", ",", "context", ")", ":", "model", "=", "context", "[", "'model'", "]", "session", "=", "context", "[", "'session'", "]", "result", "=", "session", ".", "query", "(", "model", ".", "User", ")", ".", "get", "(", "user_id_or_name", ")", "if", "result", ":", "return", "user_id_or_name", "result", "=", "session", ".", "query", "(", "model", ".", "User", ")", ".", "filter_by", "(", "name", "=", "user_id_or_name", ")", ".", "first", "(", ")", "if", "(", "not", "result", ")", ":", "raise", "Invalid", "(", "(", "'%s: %s'", "%", "(", "_", "(", "'Not found'", ")", ",", "_", "(", "'User'", ")", ")", ")", ")", "return", "user_id_or_name" ]
return the given user_id_or_name if such a user exists .
train
false
49,178
def getMinimumByComplexPath(path): minimum = complex(9.876543219876543e+17, 9.876543219876543e+17) for point in path: minimum = getMinimum(minimum, point) return minimum
[ "def", "getMinimumByComplexPath", "(", "path", ")", ":", "minimum", "=", "complex", "(", "9.876543219876543e+17", ",", "9.876543219876543e+17", ")", "for", "point", "in", "path", ":", "minimum", "=", "getMinimum", "(", "minimum", ",", "point", ")", "return", "minimum" ]
get a complex with each component the minimum of the respective components of a complex path .
train
false
49,180
@not_implemented_for('undirected') def number_strongly_connected_components(G): return len(list(strongly_connected_components(G)))
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "number_strongly_connected_components", "(", "G", ")", ":", "return", "len", "(", "list", "(", "strongly_connected_components", "(", "G", ")", ")", ")" ]
return number of strongly connected components in graph .
train
false
49,181
def reconstruct_from_patches_2d(patches, image_size): (i_h, i_w) = image_size[:2] (p_h, p_w) = patches.shape[1:3] img = np.zeros(image_size) n_h = ((i_h - p_h) + 1) n_w = ((i_w - p_w) + 1) for (p, (i, j)) in zip(patches, product(range(n_h), range(n_w))): img[i:(i + p_h), j:(j + p_w)] += p for i in range(i_h): for j in range(i_w): img[(i, j)] /= float((min((i + 1), p_h, (i_h - i)) * min((j + 1), p_w, (i_w - j)))) return img
[ "def", "reconstruct_from_patches_2d", "(", "patches", ",", "image_size", ")", ":", "(", "i_h", ",", "i_w", ")", "=", "image_size", "[", ":", "2", "]", "(", "p_h", ",", "p_w", ")", "=", "patches", ".", "shape", "[", "1", ":", "3", "]", "img", "=", "np", ".", "zeros", "(", "image_size", ")", "n_h", "=", "(", "(", "i_h", "-", "p_h", ")", "+", "1", ")", "n_w", "=", "(", "(", "i_w", "-", "p_w", ")", "+", "1", ")", "for", "(", "p", ",", "(", "i", ",", "j", ")", ")", "in", "zip", "(", "patches", ",", "product", "(", "range", "(", "n_h", ")", ",", "range", "(", "n_w", ")", ")", ")", ":", "img", "[", "i", ":", "(", "i", "+", "p_h", ")", ",", "j", ":", "(", "j", "+", "p_w", ")", "]", "+=", "p", "for", "i", "in", "range", "(", "i_h", ")", ":", "for", "j", "in", "range", "(", "i_w", ")", ":", "img", "[", "(", "i", ",", "j", ")", "]", "/=", "float", "(", "(", "min", "(", "(", "i", "+", "1", ")", ",", "p_h", ",", "(", "i_h", "-", "i", ")", ")", "*", "min", "(", "(", "j", "+", "1", ")", ",", "p_w", ",", "(", "i_w", "-", "j", ")", ")", ")", ")", "return", "img" ]
reconstruct the image from all of its patches .
train
false
49,182
def image_delete(id=None, name=None, profile=None): g_client = _auth(profile) image = {'id': False, 'name': None} if name: for image in g_client.images.list(): if (image.name == name): id = image.id continue if (not id): return {'result': False, 'comment': 'Unable to resolve image id for name {0}'.format(name)} elif (not name): name = image['name'] try: g_client.images.delete(id) except exc.HTTPNotFound: return {'result': False, 'comment': 'No image with ID {0}'.format(id)} except exc.HTTPForbidden as forbidden: log.error(str(forbidden)) return {'result': False, 'comment': str(forbidden)} return {'result': True, 'comment': "Deleted image '{0}' ({1}).".format(name, id)}
[ "def", "image_delete", "(", "id", "=", "None", ",", "name", "=", "None", ",", "profile", "=", "None", ")", ":", "g_client", "=", "_auth", "(", "profile", ")", "image", "=", "{", "'id'", ":", "False", ",", "'name'", ":", "None", "}", "if", "name", ":", "for", "image", "in", "g_client", ".", "images", ".", "list", "(", ")", ":", "if", "(", "image", ".", "name", "==", "name", ")", ":", "id", "=", "image", ".", "id", "continue", "if", "(", "not", "id", ")", ":", "return", "{", "'result'", ":", "False", ",", "'comment'", ":", "'Unable to resolve image id for name {0}'", ".", "format", "(", "name", ")", "}", "elif", "(", "not", "name", ")", ":", "name", "=", "image", "[", "'name'", "]", "try", ":", "g_client", ".", "images", ".", "delete", "(", "id", ")", "except", "exc", ".", "HTTPNotFound", ":", "return", "{", "'result'", ":", "False", ",", "'comment'", ":", "'No image with ID {0}'", ".", "format", "(", "id", ")", "}", "except", "exc", ".", "HTTPForbidden", "as", "forbidden", ":", "log", ".", "error", "(", "str", "(", "forbidden", ")", ")", "return", "{", "'result'", ":", "False", ",", "'comment'", ":", "str", "(", "forbidden", ")", "}", "return", "{", "'result'", ":", "True", ",", "'comment'", ":", "\"Deleted image '{0}' ({1}).\"", ".", "format", "(", "name", ",", "id", ")", "}" ]
delete an image cli examples: .
train
true
49,183
def createBinaryMsg(oscAddress, dataArray, typehint=None): m = OSC.OSCMessage() m.address = oscAddress for x in dataArray: m.append(x, typehint) return m.getBinary()
[ "def", "createBinaryMsg", "(", "oscAddress", ",", "dataArray", ",", "typehint", "=", "None", ")", ":", "m", "=", "OSC", ".", "OSCMessage", "(", ")", "m", ".", "address", "=", "oscAddress", "for", "x", "in", "dataArray", ":", "m", ".", "append", "(", "x", ",", "typehint", ")", "return", "m", ".", "getBinary", "(", ")" ]
create and return general type binary osc msg .
train
false
49,185
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
49,186
def getoutputerror(cmd): return get_output_error_code(cmd)[:2]
[ "def", "getoutputerror", "(", "cmd", ")", ":", "return", "get_output_error_code", "(", "cmd", ")", "[", ":", "2", "]" ]
return of executing cmd in a shell .
train
false
49,187
def intra_community_edges(G, partition): return sum((G.subgraph(block).size() for block in partition))
[ "def", "intra_community_edges", "(", "G", ",", "partition", ")", ":", "return", "sum", "(", "(", "G", ".", "subgraph", "(", "block", ")", ".", "size", "(", ")", "for", "block", "in", "partition", ")", ")" ]
returns the number of intra-community edges according to the given partition of the nodes of g .
train
false
49,188
def GetSimpleInput(prompt, defValue='', title=None): if (title is None): title = win32ui.GetMainFrame().GetWindowText() DlgBaseClass = Dialog class DlgSimpleInput(DlgBaseClass, ): def __init__(self, prompt, defValue, title): self.title = title DlgBaseClass.__init__(self, win32ui.IDD_SIMPLE_INPUT) self.AddDDX(win32ui.IDC_EDIT1, 'result') self.AddDDX(win32ui.IDC_PROMPT1, 'prompt') self._obj_.data['result'] = defValue self._obj_.data['prompt'] = prompt def OnInitDialog(self): self.SetWindowText(self.title) return DlgBaseClass.OnInitDialog(self) dlg = DlgSimpleInput(prompt, defValue, title) if (dlg.DoModal() != win32con.IDOK): return None return dlg['result']
[ "def", "GetSimpleInput", "(", "prompt", ",", "defValue", "=", "''", ",", "title", "=", "None", ")", ":", "if", "(", "title", "is", "None", ")", ":", "title", "=", "win32ui", ".", "GetMainFrame", "(", ")", ".", "GetWindowText", "(", ")", "DlgBaseClass", "=", "Dialog", "class", "DlgSimpleInput", "(", "DlgBaseClass", ",", ")", ":", "def", "__init__", "(", "self", ",", "prompt", ",", "defValue", ",", "title", ")", ":", "self", ".", "title", "=", "title", "DlgBaseClass", ".", "__init__", "(", "self", ",", "win32ui", ".", "IDD_SIMPLE_INPUT", ")", "self", ".", "AddDDX", "(", "win32ui", ".", "IDC_EDIT1", ",", "'result'", ")", "self", ".", "AddDDX", "(", "win32ui", ".", "IDC_PROMPT1", ",", "'prompt'", ")", "self", ".", "_obj_", ".", "data", "[", "'result'", "]", "=", "defValue", "self", ".", "_obj_", ".", "data", "[", "'prompt'", "]", "=", "prompt", "def", "OnInitDialog", "(", "self", ")", ":", "self", ".", "SetWindowText", "(", "self", ".", "title", ")", "return", "DlgBaseClass", ".", "OnInitDialog", "(", "self", ")", "dlg", "=", "DlgSimpleInput", "(", "prompt", ",", "defValue", ",", "title", ")", "if", "(", "dlg", ".", "DoModal", "(", ")", "!=", "win32con", ".", "IDOK", ")", ":", "return", "None", "return", "dlg", "[", "'result'", "]" ]
displays a dialog .
train
false
49,189
def removeNestedGroups(node): global numElemsRemoved num = 0 groupsToRemove = [] if (not ((node.nodeType == 1) and (node.nodeName == 'switch'))): for child in node.childNodes: if ((child.nodeName == 'g') and (child.namespaceURI == NS['SVG']) and (len(child.attributes) == 0)): for grandchild in child.childNodes: if ((grandchild.nodeType == 1) and (grandchild.namespaceURI == NS['SVG']) and (grandchild.nodeName in ['title', 'desc'])): break else: groupsToRemove.append(child) for g in groupsToRemove: while (g.childNodes.length > 0): g.parentNode.insertBefore(g.firstChild, g) g.parentNode.removeChild(g) numElemsRemoved += 1 num += 1 for child in node.childNodes: if (child.nodeType == 1): num += removeNestedGroups(child) return num
[ "def", "removeNestedGroups", "(", "node", ")", ":", "global", "numElemsRemoved", "num", "=", "0", "groupsToRemove", "=", "[", "]", "if", "(", "not", "(", "(", "node", ".", "nodeType", "==", "1", ")", "and", "(", "node", ".", "nodeName", "==", "'switch'", ")", ")", ")", ":", "for", "child", "in", "node", ".", "childNodes", ":", "if", "(", "(", "child", ".", "nodeName", "==", "'g'", ")", "and", "(", "child", ".", "namespaceURI", "==", "NS", "[", "'SVG'", "]", ")", "and", "(", "len", "(", "child", ".", "attributes", ")", "==", "0", ")", ")", ":", "for", "grandchild", "in", "child", ".", "childNodes", ":", "if", "(", "(", "grandchild", ".", "nodeType", "==", "1", ")", "and", "(", "grandchild", ".", "namespaceURI", "==", "NS", "[", "'SVG'", "]", ")", "and", "(", "grandchild", ".", "nodeName", "in", "[", "'title'", ",", "'desc'", "]", ")", ")", ":", "break", "else", ":", "groupsToRemove", ".", "append", "(", "child", ")", "for", "g", "in", "groupsToRemove", ":", "while", "(", "g", ".", "childNodes", ".", "length", ">", "0", ")", ":", "g", ".", "parentNode", ".", "insertBefore", "(", "g", ".", "firstChild", ",", "g", ")", "g", ".", "parentNode", ".", "removeChild", "(", "g", ")", "numElemsRemoved", "+=", "1", "num", "+=", "1", "for", "child", "in", "node", ".", "childNodes", ":", "if", "(", "child", ".", "nodeType", "==", "1", ")", ":", "num", "+=", "removeNestedGroups", "(", "child", ")", "return", "num" ]
this walks further and further down the tree .
train
true
49,190
def sloccount(registry, xml_parent, data): top = XML.SubElement(xml_parent, 'hudson.plugins.sloccount.SloccountPublisher') top.set('plugin', 'sloccount') mappings = [('report-files', 'pattern', '**/sloccount.sc'), ('charset', 'encoding', 'UTF-8'), ('builds-in-graph', 'numBuildsInGraph', 0), ('comment-is-code', 'commentIsCode', False), ('ignore-build-failure', 'ignoreBuildFailure', False)] helpers.convert_mapping_to_xml(top, data, mappings, fail_required=True)
[ "def", "sloccount", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "top", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.sloccount.SloccountPublisher'", ")", "top", ".", "set", "(", "'plugin'", ",", "'sloccount'", ")", "mappings", "=", "[", "(", "'report-files'", ",", "'pattern'", ",", "'**/sloccount.sc'", ")", ",", "(", "'charset'", ",", "'encoding'", ",", "'UTF-8'", ")", ",", "(", "'builds-in-graph'", ",", "'numBuildsInGraph'", ",", "0", ")", ",", "(", "'comment-is-code'", ",", "'commentIsCode'", ",", "False", ")", ",", "(", "'ignore-build-failure'", ",", "'ignoreBuildFailure'", ",", "False", ")", "]", "helpers", ".", "convert_mapping_to_xml", "(", "top", ",", "data", ",", "mappings", ",", "fail_required", "=", "True", ")" ]
yaml: sloccount generates the trend report for sloccount requires the jenkins :jenkins-wiki:sloccount plugin <sloccount+plugin> .
train
false
49,191
def set_group_user(group=None, user=None): if group: (_, gid) = get_group_name_id(group) os.setgid(gid) os.setegid(gid) if user: (username, uid) = get_user_name_id(user) for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'): os.environ[name] = username os.setuid(uid) os.seteuid(uid)
[ "def", "set_group_user", "(", "group", "=", "None", ",", "user", "=", "None", ")", ":", "if", "group", ":", "(", "_", ",", "gid", ")", "=", "get_group_name_id", "(", "group", ")", "os", ".", "setgid", "(", "gid", ")", "os", ".", "setegid", "(", "gid", ")", "if", "user", ":", "(", "username", ",", "uid", ")", "=", "get_user_name_id", "(", "user", ")", "for", "name", "in", "(", "'LOGNAME'", ",", "'USER'", ",", "'LNAME'", ",", "'USERNAME'", ")", ":", "os", ".", "environ", "[", "name", "]", "=", "username", "os", ".", "setuid", "(", "uid", ")", "os", ".", "seteuid", "(", "uid", ")" ]
set the group and user id if gid or uid is defined .
train
false
49,194
def define_plugin_entry(name, module_name): if isinstance(name, tuple): (entry, name) = name else: entry = name return ('%s = %s:%s' % (entry, module_name, name))
[ "def", "define_plugin_entry", "(", "name", ",", "module_name", ")", ":", "if", "isinstance", "(", "name", ",", "tuple", ")", ":", "(", "entry", ",", "name", ")", "=", "name", "else", ":", "entry", "=", "name", "return", "(", "'%s = %s:%s'", "%", "(", "entry", ",", "module_name", ",", "name", ")", ")" ]
helper to produce lines suitable for setup .
train
true
49,196
def get_git_branch_name(ref_name): branch_ref_prefix = u'refs/heads/' if ref_name.startswith(branch_ref_prefix): return ref_name[len(branch_ref_prefix):]
[ "def", "get_git_branch_name", "(", "ref_name", ")", ":", "branch_ref_prefix", "=", "u'refs/heads/'", "if", "ref_name", ".", "startswith", "(", "branch_ref_prefix", ")", ":", "return", "ref_name", "[", "len", "(", "branch_ref_prefix", ")", ":", "]" ]
returns the branch name corresponding to the specified ref name .
train
false
49,198
def flake8extn(func): func.version = PLUGIN_VERSION func.name = PLUGIN_NAME return func
[ "def", "flake8extn", "(", "func", ")", ":", "func", ".", "version", "=", "PLUGIN_VERSION", "func", ".", "name", "=", "PLUGIN_NAME", "return", "func" ]
decorator to specify flake8 extension details .
train
false
49,199
@error.context_aware def lv_revert_with_snapshot(vg_name, lv_name, lv_snapshot_name, lv_snapshot_size): error.context('Reverting to snapshot and taking a new one', logging.info) lv_revert(vg_name, lv_name, lv_snapshot_name) lv_take_snapshot(vg_name, lv_name, lv_snapshot_name, lv_snapshot_size)
[ "@", "error", ".", "context_aware", "def", "lv_revert_with_snapshot", "(", "vg_name", ",", "lv_name", ",", "lv_snapshot_name", ",", "lv_snapshot_size", ")", ":", "error", ".", "context", "(", "'Reverting to snapshot and taking a new one'", ",", "logging", ".", "info", ")", "lv_revert", "(", "vg_name", ",", "lv_name", ",", "lv_snapshot_name", ")", "lv_take_snapshot", "(", "vg_name", ",", "lv_name", ",", "lv_snapshot_name", ",", "lv_snapshot_size", ")" ]
perform logical volume merge with snapshot and take a new snapshot .
train
false
49,200
def address_is_associated_with_device(ec2, address, device_id, isinstance=True): address = ec2.get_all_addresses(address.public_ip) if address: if isinstance: return (address and (address[0].instance_id == device_id)) else: return (address and (address[0].network_interface_id == device_id)) return False
[ "def", "address_is_associated_with_device", "(", "ec2", ",", "address", ",", "device_id", ",", "isinstance", "=", "True", ")", ":", "address", "=", "ec2", ".", "get_all_addresses", "(", "address", ".", "public_ip", ")", "if", "address", ":", "if", "isinstance", ":", "return", "(", "address", "and", "(", "address", "[", "0", "]", ".", "instance_id", "==", "device_id", ")", ")", "else", ":", "return", "(", "address", "and", "(", "address", "[", "0", "]", ".", "network_interface_id", "==", "device_id", ")", ")", "return", "False" ]
check if the elastic ip is currently associated with the device .
train
false
49,202
def _potential_after(i, input_string): return (((i + 2) >= len(input_string)) or ((input_string[(i + 2)] == input_string[i]) and (input_string[(i + 1)] not in seps)))
[ "def", "_potential_after", "(", "i", ",", "input_string", ")", ":", "return", "(", "(", "(", "i", "+", "2", ")", ">=", "len", "(", "input_string", ")", ")", "or", "(", "(", "input_string", "[", "(", "i", "+", "2", ")", "]", "==", "input_string", "[", "i", "]", ")", "and", "(", "input_string", "[", "(", "i", "+", "1", ")", "]", "not", "in", "seps", ")", ")", ")" ]
check if the character at position i can be a potential single char separator considering whats after it .
train
false
49,203
def randitems(n, obj='ndarray', mode=None, char=None): if (mode is None): mode = choice(cap[obj][MODE]) if (char is None): char = choice(tuple(fmtdict[mode])) multiplier = choice(cap[obj][MULT]) fmt = ((mode + '#') + (char * int((multiplier if multiplier else 1)))) items = gen_items(n, fmt, obj) item = gen_item(fmt, obj) fmt = ((mode.strip('amb') + multiplier) + char) return (fmt, items, item)
[ "def", "randitems", "(", "n", ",", "obj", "=", "'ndarray'", ",", "mode", "=", "None", ",", "char", "=", "None", ")", ":", "if", "(", "mode", "is", "None", ")", ":", "mode", "=", "choice", "(", "cap", "[", "obj", "]", "[", "MODE", "]", ")", "if", "(", "char", "is", "None", ")", ":", "char", "=", "choice", "(", "tuple", "(", "fmtdict", "[", "mode", "]", ")", ")", "multiplier", "=", "choice", "(", "cap", "[", "obj", "]", "[", "MULT", "]", ")", "fmt", "=", "(", "(", "mode", "+", "'#'", ")", "+", "(", "char", "*", "int", "(", "(", "multiplier", "if", "multiplier", "else", "1", ")", ")", ")", ")", "items", "=", "gen_items", "(", "n", ",", "fmt", ",", "obj", ")", "item", "=", "gen_item", "(", "fmt", ",", "obj", ")", "fmt", "=", "(", "(", "mode", ".", "strip", "(", "'amb'", ")", "+", "multiplier", ")", "+", "char", ")", "return", "(", "fmt", ",", "items", ",", "item", ")" ]
return random format .
train
false
49,204
def convert_systemd_target_to_runlevel(target): if (target == 'poweroff.target'): runlevel = '0' elif (target == 'rescue.target'): runlevel = 's' elif (target == 'multi-user.target'): runlevel = '3' elif (target == 'graphical.target'): runlevel = '5' elif (target == 'reboot.target'): runlevel = '6' else: raise ValueError(('unknown target %s' % target)) return runlevel
[ "def", "convert_systemd_target_to_runlevel", "(", "target", ")", ":", "if", "(", "target", "==", "'poweroff.target'", ")", ":", "runlevel", "=", "'0'", "elif", "(", "target", "==", "'rescue.target'", ")", ":", "runlevel", "=", "'s'", "elif", "(", "target", "==", "'multi-user.target'", ")", ":", "runlevel", "=", "'3'", "elif", "(", "target", "==", "'graphical.target'", ")", ":", "runlevel", "=", "'5'", "elif", "(", "target", "==", "'reboot.target'", ")", ":", "runlevel", "=", "'6'", "else", ":", "raise", "ValueError", "(", "(", "'unknown target %s'", "%", "target", ")", ")", "return", "runlevel" ]
convert systemd target to runlevel .
train
false
49,205
def trg_delete(uid, res_type, res_id, cr): return WorkflowService.new(cr, uid, res_type, res_id).delete()
[ "def", "trg_delete", "(", "uid", ",", "res_type", ",", "res_id", ",", "cr", ")", ":", "return", "WorkflowService", ".", "new", "(", "cr", ",", "uid", ",", "res_type", ",", "res_id", ")", ".", "delete", "(", ")" ]
delete a workflow instance .
train
false
49,207
@require_POST @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') def get_grading_config(request, course_id): course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access(request.user, 'staff', course_id, depth=None) grading_config_summary = instructor_analytics.basic.dump_grading_context(course) response_payload = {'course_id': course_id.to_deprecated_string(), 'grading_config_summary': grading_config_summary} return JsonResponse(response_payload)
[ "@", "require_POST", "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_level", "(", "'staff'", ")", "def", "get_grading_config", "(", "request", ",", "course_id", ")", ":", "course_id", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "course", "=", "get_course_with_access", "(", "request", ".", "user", ",", "'staff'", ",", "course_id", ",", "depth", "=", "None", ")", "grading_config_summary", "=", "instructor_analytics", ".", "basic", ".", "dump_grading_context", "(", "course", ")", "response_payload", "=", "{", "'course_id'", ":", "course_id", ".", "to_deprecated_string", "(", ")", ",", "'grading_config_summary'", ":", "grading_config_summary", "}", "return", "JsonResponse", "(", "response_payload", ")" ]
respond with json which contains a html formatted grade summary .
train
false
49,208
@task(name='geonode.tasks.update.geoserver_update_layers', queue='update') def geoserver_update_layers(*args, **kwargs): return gs_slurp(*args, **kwargs)
[ "@", "task", "(", "name", "=", "'geonode.tasks.update.geoserver_update_layers'", ",", "queue", "=", "'update'", ")", "def", "geoserver_update_layers", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "gs_slurp", "(", "*", "args", ",", "**", "kwargs", ")" ]
runs update layers .
train
false
49,209
def p_expr_number(p): p[0] = ('NUM', eval(p[1]))
[ "def", "p_expr_number", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "'NUM'", ",", "eval", "(", "p", "[", "1", "]", ")", ")" ]
expr : integer | float .
train
false
49,211
def _extractCallingMethodArgs(): import inspect import copy callingFrame = inspect.stack()[1][0] (argNames, _, _, frameLocalVarDict) = inspect.getargvalues(callingFrame) argNames.remove('self') args = copy.copy(frameLocalVarDict) for varName in frameLocalVarDict: if (varName not in argNames): args.pop(varName) return args
[ "def", "_extractCallingMethodArgs", "(", ")", ":", "import", "inspect", "import", "copy", "callingFrame", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "[", "0", "]", "(", "argNames", ",", "_", ",", "_", ",", "frameLocalVarDict", ")", "=", "inspect", ".", "getargvalues", "(", "callingFrame", ")", "argNames", ".", "remove", "(", "'self'", ")", "args", "=", "copy", ".", "copy", "(", "frameLocalVarDict", ")", "for", "varName", "in", "frameLocalVarDict", ":", "if", "(", "varName", "not", "in", "argNames", ")", ":", "args", ".", "pop", "(", "varName", ")", "return", "args" ]
returns args dictionary from the calling method .
train
true
49,212
def encode_string(string): if isinstance(string, text_type): string = string.encode('utf-8') return string
[ "def", "encode_string", "(", "string", ")", ":", "if", "isinstance", "(", "string", ",", "text_type", ")", ":", "string", "=", "string", ".", "encode", "(", "'utf-8'", ")", "return", "string" ]
encodes a string to bytes .
train
false
49,213
def look_ropeproject(path): env.debug('Look project', path) p = os.path.abspath(path) while True: if ('.ropeproject' in os.listdir(p)): return p new_p = os.path.abspath(os.path.join(p, '..')) if (new_p == p): return path p = new_p
[ "def", "look_ropeproject", "(", "path", ")", ":", "env", ".", "debug", "(", "'Look project'", ",", "path", ")", "p", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "while", "True", ":", "if", "(", "'.ropeproject'", "in", "os", ".", "listdir", "(", "p", ")", ")", ":", "return", "p", "new_p", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "p", ",", "'..'", ")", ")", "if", "(", "new_p", "==", "p", ")", ":", "return", "path", "p", "=", "new_p" ]
search for ropeproject in current and parent dirs .
train
false
49,215
def test_function_series2(): class my_function2(Function, ): def fdiff(self, argindex=1): return (- sin(self.args[0])) @classmethod def eval(cls, arg): arg = sympify(arg) if (arg == 0): return sympify(1) assert (my_function2(x).series(x, 0, 10) == cos(x).series(x, 0, 10))
[ "def", "test_function_series2", "(", ")", ":", "class", "my_function2", "(", "Function", ",", ")", ":", "def", "fdiff", "(", "self", ",", "argindex", "=", "1", ")", ":", "return", "(", "-", "sin", "(", "self", ".", "args", "[", "0", "]", ")", ")", "@", "classmethod", "def", "eval", "(", "cls", ",", "arg", ")", ":", "arg", "=", "sympify", "(", "arg", ")", "if", "(", "arg", "==", "0", ")", ":", "return", "sympify", "(", "1", ")", "assert", "(", "my_function2", "(", "x", ")", ".", "series", "(", "x", ",", "0", ",", "10", ")", "==", "cos", "(", "x", ")", ".", "series", "(", "x", ",", "0", ",", "10", ")", ")" ]
create our new "cos" function .
train
false
49,219
def is_categorical(obj): try: float(obj.iloc[0]) return False except: return True if is_sequence_of_strings(obj): return True if is_sequence_of_booleans(obj): return True return False
[ "def", "is_categorical", "(", "obj", ")", ":", "try", ":", "float", "(", "obj", ".", "iloc", "[", "0", "]", ")", "return", "False", "except", ":", "return", "True", "if", "is_sequence_of_strings", "(", "obj", ")", ":", "return", "True", "if", "is_sequence_of_booleans", "(", "obj", ")", ":", "return", "True", "return", "False" ]
return true if *obj* is array-like and has categorical values categorical values include: - strings - booleans .
train
false
49,220
@pytest.mark.parametrize('files, expected', [({}, []), ({'file': ['']}, []), ({'file': []}, []), ({'file1': ['foo\n', 'bar\n'], 'file2': ['baz\n']}, [('file1', 'foo\nbar'), ('file2', 'baz')]), ({'file': ['HOME_URL=example.com\n', 'NAME=FOO']}, [('file', 'NAME=FOO')]), ({'file': ['HOME_URL=example.com']}, []), (None, [])]) def test_release_info(files, expected, caplog, monkeypatch): fake = ReleaseInfoFake(files) monkeypatch.setattr('qutebrowser.utils.version.glob.glob', fake.glob_fake) monkeypatch.setattr(version, 'open', fake.open_fake, raising=False) with caplog.at_level(logging.ERROR, 'misc'): assert (version._release_info() == expected) if (files is None): assert (len(caplog.records) == 1) assert (caplog.records[0].message == 'Error while reading fake-file.')
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'files, expected'", ",", "[", "(", "{", "}", ",", "[", "]", ")", ",", "(", "{", "'file'", ":", "[", "''", "]", "}", ",", "[", "]", ")", ",", "(", "{", "'file'", ":", "[", "]", "}", ",", "[", "]", ")", ",", "(", "{", "'file1'", ":", "[", "'foo\\n'", ",", "'bar\\n'", "]", ",", "'file2'", ":", "[", "'baz\\n'", "]", "}", ",", "[", "(", "'file1'", ",", "'foo\\nbar'", ")", ",", "(", "'file2'", ",", "'baz'", ")", "]", ")", ",", "(", "{", "'file'", ":", "[", "'HOME_URL=example.com\\n'", ",", "'NAME=FOO'", "]", "}", ",", "[", "(", "'file'", ",", "'NAME=FOO'", ")", "]", ")", ",", "(", "{", "'file'", ":", "[", "'HOME_URL=example.com'", "]", "}", ",", "[", "]", ")", ",", "(", "None", ",", "[", "]", ")", "]", ")", "def", "test_release_info", "(", "files", ",", "expected", ",", "caplog", ",", "monkeypatch", ")", ":", "fake", "=", "ReleaseInfoFake", "(", "files", ")", "monkeypatch", ".", "setattr", "(", "'qutebrowser.utils.version.glob.glob'", ",", "fake", ".", "glob_fake", ")", "monkeypatch", ".", "setattr", "(", "version", ",", "'open'", ",", "fake", ".", "open_fake", ",", "raising", "=", "False", ")", "with", "caplog", ".", "at_level", "(", "logging", ".", "ERROR", ",", "'misc'", ")", ":", "assert", "(", "version", ".", "_release_info", "(", ")", "==", "expected", ")", "if", "(", "files", "is", "None", ")", ":", "assert", "(", "len", "(", "caplog", ".", "records", ")", "==", "1", ")", "assert", "(", "caplog", ".", "records", "[", "0", "]", ".", "message", "==", "'Error while reading fake-file.'", ")" ]
test _release_info() .
train
false
49,221
def _images_to_example(image, image2): example = tf.SequenceExample() feature_list = example.feature_lists.feature_list['moving_objs'] feature = feature_list.feature.add() feature.float_list.value.extend(np.reshape(image, [(-1)]).tolist()) feature = feature_list.feature.add() feature.float_list.value.extend(np.reshape(image2, [(-1)]).tolist()) return example
[ "def", "_images_to_example", "(", "image", ",", "image2", ")", ":", "example", "=", "tf", ".", "SequenceExample", "(", ")", "feature_list", "=", "example", ".", "feature_lists", ".", "feature_list", "[", "'moving_objs'", "]", "feature", "=", "feature_list", ".", "feature", ".", "add", "(", ")", "feature", ".", "float_list", ".", "value", ".", "extend", "(", "np", ".", "reshape", "(", "image", ",", "[", "(", "-", "1", ")", "]", ")", ".", "tolist", "(", ")", ")", "feature", "=", "feature_list", ".", "feature", ".", "add", "(", ")", "feature", ".", "float_list", ".", "value", ".", "extend", "(", "np", ".", "reshape", "(", "image2", ",", "[", "(", "-", "1", ")", "]", ")", ".", "tolist", "(", ")", ")", "return", "example" ]
convert two consecutive images to sequenceexample .
train
false
49,222
def getUniqueVertexes(loops): vertexDictionary = {} uniqueVertexes = [] for loop in loops: for (vertexIndex, vertex) in enumerate(loop): vertexTuple = (vertex.x, vertex.y, vertex.z) if (vertexTuple in vertexDictionary): loop[vertexIndex] = vertexDictionary[vertexTuple] else: if (vertex.__class__ == Vector3Index): loop[vertexIndex].index = len(vertexDictionary) else: loop[vertexIndex] = Vector3Index(len(vertexDictionary), vertex.x, vertex.y, vertex.z) vertexDictionary[vertexTuple] = loop[vertexIndex] uniqueVertexes.append(loop[vertexIndex]) return uniqueVertexes
[ "def", "getUniqueVertexes", "(", "loops", ")", ":", "vertexDictionary", "=", "{", "}", "uniqueVertexes", "=", "[", "]", "for", "loop", "in", "loops", ":", "for", "(", "vertexIndex", ",", "vertex", ")", "in", "enumerate", "(", "loop", ")", ":", "vertexTuple", "=", "(", "vertex", ".", "x", ",", "vertex", ".", "y", ",", "vertex", ".", "z", ")", "if", "(", "vertexTuple", "in", "vertexDictionary", ")", ":", "loop", "[", "vertexIndex", "]", "=", "vertexDictionary", "[", "vertexTuple", "]", "else", ":", "if", "(", "vertex", ".", "__class__", "==", "Vector3Index", ")", ":", "loop", "[", "vertexIndex", "]", ".", "index", "=", "len", "(", "vertexDictionary", ")", "else", ":", "loop", "[", "vertexIndex", "]", "=", "Vector3Index", "(", "len", "(", "vertexDictionary", ")", ",", "vertex", ".", "x", ",", "vertex", ".", "y", ",", "vertex", ".", "z", ")", "vertexDictionary", "[", "vertexTuple", "]", "=", "loop", "[", "vertexIndex", "]", "uniqueVertexes", ".", "append", "(", "loop", "[", "vertexIndex", "]", ")", "return", "uniqueVertexes" ]
get unique vertexes .
train
false
49,223
def writePlistToResource(rootObject, path, restype='plst', resid=0): warnings.warnpy3k('In 3.x, writePlistToResource is removed.') from Carbon.File import FSRef, FSGetResourceForkName from Carbon.Files import fsRdWrPerm from Carbon import Res plistData = writePlistToString(rootObject) fsRef = FSRef(path) resNum = Res.FSOpenResourceFile(fsRef, FSGetResourceForkName(), fsRdWrPerm) Res.UseResFile(resNum) try: Res.Get1Resource(restype, resid).RemoveResource() except Res.Error: pass res = Res.Resource(plistData) res.AddResource(restype, resid, '') res.WriteResource() Res.CloseResFile(resNum)
[ "def", "writePlistToResource", "(", "rootObject", ",", "path", ",", "restype", "=", "'plst'", ",", "resid", "=", "0", ")", ":", "warnings", ".", "warnpy3k", "(", "'In 3.x, writePlistToResource is removed.'", ")", "from", "Carbon", ".", "File", "import", "FSRef", ",", "FSGetResourceForkName", "from", "Carbon", ".", "Files", "import", "fsRdWrPerm", "from", "Carbon", "import", "Res", "plistData", "=", "writePlistToString", "(", "rootObject", ")", "fsRef", "=", "FSRef", "(", "path", ")", "resNum", "=", "Res", ".", "FSOpenResourceFile", "(", "fsRef", ",", "FSGetResourceForkName", "(", ")", ",", "fsRdWrPerm", ")", "Res", ".", "UseResFile", "(", "resNum", ")", "try", ":", "Res", ".", "Get1Resource", "(", "restype", ",", "resid", ")", ".", "RemoveResource", "(", ")", "except", "Res", ".", "Error", ":", "pass", "res", "=", "Res", ".", "Resource", "(", "plistData", ")", "res", ".", "AddResource", "(", "restype", ",", "resid", ",", "''", ")", "res", ".", "WriteResource", "(", ")", "Res", ".", "CloseResFile", "(", "resNum", ")" ]
write rootobject as a plst resource to the resource fork of path .
train
false
49,225
@depends(HAS_HDPARM) def _hdparm(args, failhard=True): cmd = 'hdparm {0}'.format(args) result = __salt__['cmd.run_all'](cmd) if (result['retcode'] != 0): msg = '{0}: {1}'.format(cmd, result['stderr']) if failhard: raise CommandExecutionError(msg) else: log.warning(msg) return result['stdout']
[ "@", "depends", "(", "HAS_HDPARM", ")", "def", "_hdparm", "(", "args", ",", "failhard", "=", "True", ")", ":", "cmd", "=", "'hdparm {0}'", ".", "format", "(", "args", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "if", "(", "result", "[", "'retcode'", "]", "!=", "0", ")", ":", "msg", "=", "'{0}: {1}'", ".", "format", "(", "cmd", ",", "result", "[", "'stderr'", "]", ")", "if", "failhard", ":", "raise", "CommandExecutionError", "(", "msg", ")", "else", ":", "log", ".", "warning", "(", "msg", ")", "return", "result", "[", "'stdout'", "]" ]
execute hdparm fail hard when required return output when possible .
train
true
49,226
@auth_decorator def authenticated_403(self): if (self.get_current_user() is None): raise web.HTTPError(403)
[ "@", "auth_decorator", "def", "authenticated_403", "(", "self", ")", ":", "if", "(", "self", ".", "get_current_user", "(", ")", "is", "None", ")", ":", "raise", "web", ".", "HTTPError", "(", "403", ")" ]
like web .
train
false
49,228
def mod_init(low): try: __salt__['portage_config.enforce_nice_config']() except Exception: return False return True
[ "def", "mod_init", "(", "low", ")", ":", "try", ":", "__salt__", "[", "'portage_config.enforce_nice_config'", "]", "(", ")", "except", "Exception", ":", "return", "False", "return", "True" ]
enforce a nice structure on the configuration files .
train
false
49,229
def string_from_hierarchy(dimension, hierarchy): if hierarchy: return ('%s@%s' % (_path_part_escape(str(dimension)), _path_part_escape(str(hierarchy)))) else: return _path_part_escape(str(dimension))
[ "def", "string_from_hierarchy", "(", "dimension", ",", "hierarchy", ")", ":", "if", "hierarchy", ":", "return", "(", "'%s@%s'", "%", "(", "_path_part_escape", "(", "str", "(", "dimension", ")", ")", ",", "_path_part_escape", "(", "str", "(", "hierarchy", ")", ")", ")", ")", "else", ":", "return", "_path_part_escape", "(", "str", "(", "dimension", ")", ")" ]
returns a string in form dimension@hierarchy or dimension if hierarchy is none .
train
false
49,230
def open_repo(path_or_repo): if isinstance(path_or_repo, BaseRepo): return path_or_repo return Repo(path_or_repo)
[ "def", "open_repo", "(", "path_or_repo", ")", ":", "if", "isinstance", "(", "path_or_repo", ",", "BaseRepo", ")", ":", "return", "path_or_repo", "return", "Repo", "(", "path_or_repo", ")" ]
open a copy of a repo in a temporary directory .
train
false
49,231
def duplication_divergence_graph(n, p, seed=None): if ((p > 1) or (p < 0)): msg = 'NetworkXError p={0} is not in [0,1].'.format(p) raise nx.NetworkXError(msg) if (n < 2): msg = 'n must be greater than or equal to 2' raise nx.NetworkXError(msg) if (seed is not None): random.seed(seed) G = nx.Graph() G.name = 'duplication_divergence_graph({}, {})'.format(n, p) G.add_edge(0, 1) i = 2 while (i < n): random_node = random.choice(list(G)) G.add_node(i) flag = False for nbr in G.neighbors(random_node): if (random.random() < p): G.add_edge(i, nbr) flag = True if (not flag): G.remove_node(i) else: i += 1 return G
[ "def", "duplication_divergence_graph", "(", "n", ",", "p", ",", "seed", "=", "None", ")", ":", "if", "(", "(", "p", ">", "1", ")", "or", "(", "p", "<", "0", ")", ")", ":", "msg", "=", "'NetworkXError p={0} is not in [0,1].'", ".", "format", "(", "p", ")", "raise", "nx", ".", "NetworkXError", "(", "msg", ")", "if", "(", "n", "<", "2", ")", ":", "msg", "=", "'n must be greater than or equal to 2'", "raise", "nx", ".", "NetworkXError", "(", "msg", ")", "if", "(", "seed", "is", "not", "None", ")", ":", "random", ".", "seed", "(", "seed", ")", "G", "=", "nx", ".", "Graph", "(", ")", "G", ".", "name", "=", "'duplication_divergence_graph({}, {})'", ".", "format", "(", "n", ",", "p", ")", "G", ".", "add_edge", "(", "0", ",", "1", ")", "i", "=", "2", "while", "(", "i", "<", "n", ")", ":", "random_node", "=", "random", ".", "choice", "(", "list", "(", "G", ")", ")", "G", ".", "add_node", "(", "i", ")", "flag", "=", "False", "for", "nbr", "in", "G", ".", "neighbors", "(", "random_node", ")", ":", "if", "(", "random", ".", "random", "(", ")", "<", "p", ")", ":", "G", ".", "add_edge", "(", "i", ",", "nbr", ")", "flag", "=", "True", "if", "(", "not", "flag", ")", ":", "G", ".", "remove_node", "(", "i", ")", "else", ":", "i", "+=", "1", "return", "G" ]
returns an undirected graph using the duplication-divergence model .
train
false
49,232
def SequenceToImageAndDiff(images): image_diff_list = [] image_seq = tf.unstack(images, axis=1) for size in [32, 64, 128, 256]: resized_images = [tf.image.resize_images(i, [size, size]) for i in image_seq] diffs = [] for i in xrange(0, (len(resized_images) - 1)): diffs.append((resized_images[(i + 1)] - resized_images[i])) image_diff_list.append((tf.concat(0, resized_images[:(-1)]), tf.concat(0, diffs))) return image_diff_list
[ "def", "SequenceToImageAndDiff", "(", "images", ")", ":", "image_diff_list", "=", "[", "]", "image_seq", "=", "tf", ".", "unstack", "(", "images", ",", "axis", "=", "1", ")", "for", "size", "in", "[", "32", ",", "64", ",", "128", ",", "256", "]", ":", "resized_images", "=", "[", "tf", ".", "image", ".", "resize_images", "(", "i", ",", "[", "size", ",", "size", "]", ")", "for", "i", "in", "image_seq", "]", "diffs", "=", "[", "]", "for", "i", "in", "xrange", "(", "0", ",", "(", "len", "(", "resized_images", ")", "-", "1", ")", ")", ":", "diffs", ".", "append", "(", "(", "resized_images", "[", "(", "i", "+", "1", ")", "]", "-", "resized_images", "[", "i", "]", ")", ")", "image_diff_list", ".", "append", "(", "(", "tf", ".", "concat", "(", "0", ",", "resized_images", "[", ":", "(", "-", "1", ")", "]", ")", ",", "tf", ".", "concat", "(", "0", ",", "diffs", ")", ")", ")", "return", "image_diff_list" ]
convert image sequence batch into image and diff batch .
train
false
49,233
def multicontrast_pvalues(tstat, tcorr, df=None, dist='t', alternative='two-sided'): from statsmodels.sandbox.distributions.multivariate import mvstdtprob if ((df is None) and (dist == 't')): raise ValueError('df has to be specified for the t-distribution') tstat = np.asarray(tstat) ntests = len(tstat) cc = np.abs(tstat) pval_global = (1 - mvstdtprob((- cc), cc, tcorr, df)) pvals = [] for ti in cc: limits = (ti * np.ones(ntests)) pvals.append((1 - mvstdtprob((- cc), cc, tcorr, df))) return (pval_global, np.asarray(pvals))
[ "def", "multicontrast_pvalues", "(", "tstat", ",", "tcorr", ",", "df", "=", "None", ",", "dist", "=", "'t'", ",", "alternative", "=", "'two-sided'", ")", ":", "from", "statsmodels", ".", "sandbox", ".", "distributions", ".", "multivariate", "import", "mvstdtprob", "if", "(", "(", "df", "is", "None", ")", "and", "(", "dist", "==", "'t'", ")", ")", ":", "raise", "ValueError", "(", "'df has to be specified for the t-distribution'", ")", "tstat", "=", "np", ".", "asarray", "(", "tstat", ")", "ntests", "=", "len", "(", "tstat", ")", "cc", "=", "np", ".", "abs", "(", "tstat", ")", "pval_global", "=", "(", "1", "-", "mvstdtprob", "(", "(", "-", "cc", ")", ",", "cc", ",", "tcorr", ",", "df", ")", ")", "pvals", "=", "[", "]", "for", "ti", "in", "cc", ":", "limits", "=", "(", "ti", "*", "np", ".", "ones", "(", "ntests", ")", ")", "pvals", ".", "append", "(", "(", "1", "-", "mvstdtprob", "(", "(", "-", "cc", ")", ",", "cc", ",", "tcorr", ",", "df", ")", ")", ")", "return", "(", "pval_global", ",", "np", ".", "asarray", "(", "pvals", ")", ")" ]
pvalues for simultaneous tests .
train
false
49,235
def concatenate_block_managers(mgrs_indexers, axes, concat_axis, copy): concat_plan = combine_concat_plans([get_mgr_concatenation_plan(mgr, indexers) for (mgr, indexers) in mgrs_indexers], concat_axis) blocks = [make_block(concatenate_join_units(join_units, concat_axis, copy=copy), placement=placement) for (placement, join_units) in concat_plan] return BlockManager(blocks, axes)
[ "def", "concatenate_block_managers", "(", "mgrs_indexers", ",", "axes", ",", "concat_axis", ",", "copy", ")", ":", "concat_plan", "=", "combine_concat_plans", "(", "[", "get_mgr_concatenation_plan", "(", "mgr", ",", "indexers", ")", "for", "(", "mgr", ",", "indexers", ")", "in", "mgrs_indexers", "]", ",", "concat_axis", ")", "blocks", "=", "[", "make_block", "(", "concatenate_join_units", "(", "join_units", ",", "concat_axis", ",", "copy", "=", "copy", ")", ",", "placement", "=", "placement", ")", "for", "(", "placement", ",", "join_units", ")", "in", "concat_plan", "]", "return", "BlockManager", "(", "blocks", ",", "axes", ")" ]
concatenate block managers into one .
train
false
49,236
def Radian(radians): return radians
[ "def", "Radian", "(", "radians", ")", ":", "return", "radians" ]
all functions take in radians .
train
false
49,237
def get_close_matches(word, possibilities, n=3, cutoff=0.6): if (not (n > 0)): raise ValueError(('n must be > 0: %r' % (n,))) if (not (0.0 <= cutoff <= 1.0)): raise ValueError(('cutoff must be in [0.0, 1.0]: %r' % (cutoff,))) result = [] s = SequenceMatcher() s.set_seq2(word) for x in possibilities: s.set_seq1(x) if ((s.real_quick_ratio() >= cutoff) and (s.quick_ratio() >= cutoff) and (s.ratio() >= cutoff)): result.append((s.ratio(), x)) result = heapq.nlargest(n, result) return [x for (score, x) in result]
[ "def", "get_close_matches", "(", "word", ",", "possibilities", ",", "n", "=", "3", ",", "cutoff", "=", "0.6", ")", ":", "if", "(", "not", "(", "n", ">", "0", ")", ")", ":", "raise", "ValueError", "(", "(", "'n must be > 0: %r'", "%", "(", "n", ",", ")", ")", ")", "if", "(", "not", "(", "0.0", "<=", "cutoff", "<=", "1.0", ")", ")", ":", "raise", "ValueError", "(", "(", "'cutoff must be in [0.0, 1.0]: %r'", "%", "(", "cutoff", ",", ")", ")", ")", "result", "=", "[", "]", "s", "=", "SequenceMatcher", "(", ")", "s", ".", "set_seq2", "(", "word", ")", "for", "x", "in", "possibilities", ":", "s", ".", "set_seq1", "(", "x", ")", "if", "(", "(", "s", ".", "real_quick_ratio", "(", ")", ">=", "cutoff", ")", "and", "(", "s", ".", "quick_ratio", "(", ")", ">=", "cutoff", ")", "and", "(", "s", ".", "ratio", "(", ")", ">=", "cutoff", ")", ")", ":", "result", ".", "append", "(", "(", "s", ".", "ratio", "(", ")", ",", "x", ")", ")", "result", "=", "heapq", ".", "nlargest", "(", "n", ",", "result", ")", "return", "[", "x", "for", "(", "score", ",", "x", ")", "in", "result", "]" ]
use sequencematcher to return list of the best "good enough" matches .
train
true
49,238
def test_unpickle_gpuarray_as_numpy_ndarray_flag0(): oldflag = config.experimental.unpickle_gpu_on_cpu config.experimental.unpickle_gpu_on_cpu = False try: testfile_dir = os.path.dirname(os.path.realpath(__file__)) fname = 'GpuArray.pkl' with open(os.path.join(testfile_dir, fname), 'rb') as fp: if PY3: u = CompatUnpickler(fp, encoding='latin1') else: u = CompatUnpickler(fp) mat = u.load() assert isinstance(mat, pygpu.gpuarray.GpuArray) assert (numpy.asarray(mat)[0] == (-42.0)) finally: config.experimental.unpickle_gpu_on_cpu = oldflag
[ "def", "test_unpickle_gpuarray_as_numpy_ndarray_flag0", "(", ")", ":", "oldflag", "=", "config", ".", "experimental", ".", "unpickle_gpu_on_cpu", "config", ".", "experimental", ".", "unpickle_gpu_on_cpu", "=", "False", "try", ":", "testfile_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", "fname", "=", "'GpuArray.pkl'", "with", "open", "(", "os", ".", "path", ".", "join", "(", "testfile_dir", ",", "fname", ")", ",", "'rb'", ")", "as", "fp", ":", "if", "PY3", ":", "u", "=", "CompatUnpickler", "(", "fp", ",", "encoding", "=", "'latin1'", ")", "else", ":", "u", "=", "CompatUnpickler", "(", "fp", ")", "mat", "=", "u", ".", "load", "(", ")", "assert", "isinstance", "(", "mat", ",", "pygpu", ".", "gpuarray", ".", "GpuArray", ")", "assert", "(", "numpy", ".", "asarray", "(", "mat", ")", "[", "0", "]", "==", "(", "-", "42.0", ")", ")", "finally", ":", "config", ".", "experimental", ".", "unpickle_gpu_on_cpu", "=", "oldflag" ]
test when pygpu isnt there for unpickle are in test_pickle .
train
false
49,239
def abstractMethod(): raise NotImplementedError('Method not implemented!')
[ "def", "abstractMethod", "(", ")", ":", "raise", "NotImplementedError", "(", "'Method not implemented!'", ")" ]
this should be called when an abstract method is called that should have been implemented by a subclass .
train
false
49,240
def input_file(filename): if (excluded(filename) or (not filename_match(filename))): return {} if options.verbose: message(('checking ' + filename)) options.counters['files'] = (options.counters.get('files', 0) + 1) errors = Checker(filename).check_all() if (options.testsuite and (not errors)): message(('%s: %s' % (filename, 'no errors found')))
[ "def", "input_file", "(", "filename", ")", ":", "if", "(", "excluded", "(", "filename", ")", "or", "(", "not", "filename_match", "(", "filename", ")", ")", ")", ":", "return", "{", "}", "if", "options", ".", "verbose", ":", "message", "(", "(", "'checking '", "+", "filename", ")", ")", "options", ".", "counters", "[", "'files'", "]", "=", "(", "options", ".", "counters", ".", "get", "(", "'files'", ",", "0", ")", "+", "1", ")", "errors", "=", "Checker", "(", "filename", ")", ".", "check_all", "(", ")", "if", "(", "options", ".", "testsuite", "and", "(", "not", "errors", ")", ")", ":", "message", "(", "(", "'%s: %s'", "%", "(", "filename", ",", "'no errors found'", ")", ")", ")" ]
run all checks on a python source file .
train
true
49,242
@mock.patch('ckanext.datastore.db._get_fields') def test_upsert_with_insert_method_and_invalid_data(mock_get_fields_function): mock_connection = mock.Mock() mock_connection.execute.side_effect = sqlalchemy.exc.DataError('statement', 'params', 'orig', connection_invalidated=False) context = {'connection': mock_connection} data_dict = {'fields': [{'id': 'value', 'type': 'numeric'}], 'records': [{'value': 0}, {'value': 1}, {'value': 2}, {'value': 3}, {'value': ' '}, {'value': 5}, {'value': 6}, {'value': 7}], 'method': 'insert', 'resource_id': 'fake-resource-id'} mock_get_fields_function.return_value = data_dict['fields'] nose.tools.assert_raises(db.InvalidDataError, db.upsert_data, context, data_dict)
[ "@", "mock", ".", "patch", "(", "'ckanext.datastore.db._get_fields'", ")", "def", "test_upsert_with_insert_method_and_invalid_data", "(", "mock_get_fields_function", ")", ":", "mock_connection", "=", "mock", ".", "Mock", "(", ")", "mock_connection", ".", "execute", ".", "side_effect", "=", "sqlalchemy", ".", "exc", ".", "DataError", "(", "'statement'", ",", "'params'", ",", "'orig'", ",", "connection_invalidated", "=", "False", ")", "context", "=", "{", "'connection'", ":", "mock_connection", "}", "data_dict", "=", "{", "'fields'", ":", "[", "{", "'id'", ":", "'value'", ",", "'type'", ":", "'numeric'", "}", "]", ",", "'records'", ":", "[", "{", "'value'", ":", "0", "}", ",", "{", "'value'", ":", "1", "}", ",", "{", "'value'", ":", "2", "}", ",", "{", "'value'", ":", "3", "}", ",", "{", "'value'", ":", "' '", "}", ",", "{", "'value'", ":", "5", "}", ",", "{", "'value'", ":", "6", "}", ",", "{", "'value'", ":", "7", "}", "]", ",", "'method'", ":", "'insert'", ",", "'resource_id'", ":", "'fake-resource-id'", "}", "mock_get_fields_function", ".", "return_value", "=", "data_dict", "[", "'fields'", "]", "nose", ".", "tools", ".", "assert_raises", "(", "db", ".", "InvalidDataError", ",", "db", ".", "upsert_data", ",", "context", ",", "data_dict", ")" ]
upsert_data() should raise invaliddataerror if given invalid data .
train
false
49,243
def needs_update(targ_capacity, curr_capacity, num_up_to_date): return (not (num_up_to_date >= curr_capacity == targ_capacity))
[ "def", "needs_update", "(", "targ_capacity", ",", "curr_capacity", ",", "num_up_to_date", ")", ":", "return", "(", "not", "(", "num_up_to_date", ">=", "curr_capacity", "==", "targ_capacity", ")", ")" ]
return whether there are more batch updates to do .
train
false