id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
27,167
def sftp_prefix(config): login_str = u'' port_str = u'' if (config[u'username'] and config[u'password']): login_str = (u'%s:%s@' % (config[u'username'], config[u'password'])) elif config[u'username']: login_str = (u'%s@' % config[u'username']) if (config[u'port'] and (config[u'port'] != 22)): port_str = (u':%d' % config[u'port']) return (u'sftp://%s%s%s/' % (login_str, config[u'host'], port_str))
[ "def", "sftp_prefix", "(", "config", ")", ":", "login_str", "=", "u''", "port_str", "=", "u''", "if", "(", "config", "[", "u'username'", "]", "and", "config", "[", "u'password'", "]", ")", ":", "login_str", "=", "(", "u'%s:%s@'", "%", "(", "config", "[...
generate sftp url prefix .
train
false
27,169
def test_erfinvgpu(): x = tensor.fmatrix() f = theano.function([x], tensor.Elemwise(erfinv)(x), mode=mode_with_gpu) theano.function([x], tensor.Elemwise(erfinv)(x), mode=mode_without_gpu) assert isinstance(f.maker.fgraph.toposort()[1].op, cuda.GpuElemwise) assert isinstance(f.maker.fgraph.toposort()[1].op.scalar_op, cuda.elemwise.ErfinvGPU) numpy.random.rand(7, 8).astype('float32')
[ "def", "test_erfinvgpu", "(", ")", ":", "x", "=", "tensor", ".", "fmatrix", "(", ")", "f", "=", "theano", ".", "function", "(", "[", "x", "]", ",", "tensor", ".", "Elemwise", "(", "erfinv", ")", "(", "x", ")", ",", "mode", "=", "mode_with_gpu", "...
test that local_gpu_elemwise_0 replaces erfinv with erfinvgpu .
train
false
27,170
@ensure_csrf_cookie @ensure_valid_course_key def jump_to_id(request, course_id, module_id): course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) items = modulestore().get_items(course_key, qualifiers={'name': module_id}) if (len(items) == 0): raise Http404(u'Could not find id: {0} in course_id: {1}. Referer: {2}'.format(module_id, course_id, request.META.get('HTTP_REFERER', ''))) if (len(items) > 1): log.warning(u'Multiple items found with id: %s in course_id: %s. Referer: %s. Using first: %s', module_id, course_id, request.META.get('HTTP_REFERER', ''), items[0].location.to_deprecated_string()) return jump_to(request, course_id, items[0].location.to_deprecated_string())
[ "@", "ensure_csrf_cookie", "@", "ensure_valid_course_key", "def", "jump_to_id", "(", "request", ",", "course_id", ",", "module_id", ")", ":", "course_key", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "items", "=", "modulestore...
this entry point allows for a shorter version of a jump to where just the id of the element is passed in .
train
false
27,172
def get_widgets(request): widgets = {} widgets.update(WIDGETS) try: agent = request.user.profile.serviceagent_set.all()[0] except Exception: agent = None if (not agent): del widgets['widget_index_assigned'] return widgets
[ "def", "get_widgets", "(", "request", ")", ":", "widgets", "=", "{", "}", "widgets", ".", "update", "(", "WIDGETS", ")", "try", ":", "agent", "=", "request", ".", "user", ".", "profile", ".", "serviceagent_set", ".", "all", "(", ")", "[", "0", "]", ...
returns a set of all available widgets .
train
false
27,173
def _get_source_name(): version = sdk_update_checker.GetVersionObject() if (version is None): release = 'unknown' else: release = version['release'] return ('Google-appcfg-%s' % release)
[ "def", "_get_source_name", "(", ")", ":", "version", "=", "sdk_update_checker", ".", "GetVersionObject", "(", ")", "if", "(", "version", "is", "None", ")", ":", "release", "=", "'unknown'", "else", ":", "release", "=", "version", "[", "'release'", "]", "re...
gets the name of this source version .
train
false
27,174
def _root_excitingmixing_doc(): pass
[ "def", "_root_excitingmixing_doc", "(", ")", ":", "pass" ]
options nit : int .
train
false
27,175
def missing_action(request, action): return Response(generate_template('missing_action.html', action=action), status=404)
[ "def", "missing_action", "(", "request", ",", "action", ")", ":", "return", "Response", "(", "generate_template", "(", "'missing_action.html'", ",", "action", "=", "action", ")", ",", "status", "=", "404", ")" ]
displayed if a user tried to access a action that does not exist .
train
false
27,176
def _get_course(course_key, user): try: course = get_course_with_access(user, 'load', course_key, check_if_enrolled=True) except Http404: raise CourseNotFoundError('Course not found.') if (not any([((tab.type == 'discussion') and tab.is_enabled(course, user)) for tab in course.tabs])): raise DiscussionDisabledError('Discussion is disabled for the course.') return course
[ "def", "_get_course", "(", "course_key", ",", "user", ")", ":", "try", ":", "course", "=", "get_course_with_access", "(", "user", ",", "'load'", ",", "course_key", ",", "check_if_enrolled", "=", "True", ")", "except", "Http404", ":", "raise", "CourseNotFoundEr...
helper function to load and return a users course .
train
false
27,177
def gethighlights(): page = request.vars.page course = request.vars.course if auth.user: result = db(((((db.user_highlights.user_id == auth.user.id) & (db.user_highlights.chapter_url == page)) & (db.user_highlights.course_id == course)) & (db.user_highlights.is_active == 1))).select() rowarray_list = [] for row in result: res = {'range': row.range, 'uniqueId': row.id, 'parentClass': row.parent_class, 'pageSection': row.sub_chapter_url, 'method': row.method} rowarray_list.append(res) return json.dumps(rowarray_list)
[ "def", "gethighlights", "(", ")", ":", "page", "=", "request", ".", "vars", ".", "page", "course", "=", "request", ".", "vars", ".", "course", "if", "auth", ".", "user", ":", "result", "=", "db", "(", "(", "(", "(", "(", "db", ".", "user_highlights...
return all the highlights for a given user .
train
false
27,178
def print_diffs(text1, text2): lines1 = text1.splitlines(True) lines2 = text2.splitlines(True) diffs = difflib.unified_diff(lines1, lines2, n=0, fromfile='expected', tofile='got') print ('\n' + ''.join(diffs))
[ "def", "print_diffs", "(", "text1", ",", "text2", ")", ":", "lines1", "=", "text1", ".", "splitlines", "(", "True", ")", "lines2", "=", "text2", ".", "splitlines", "(", "True", ")", "diffs", "=", "difflib", ".", "unified_diff", "(", "lines1", ",", "lin...
prints unified diffs for two texts .
train
false
27,179
def textindent(t, indent=0): return '\n'.join((((' ' * indent) + p) for p in t.split('\n')))
[ "def", "textindent", "(", "t", ",", "indent", "=", "0", ")", ":", "return", "'\\n'", ".", "join", "(", "(", "(", "(", "' '", "*", "indent", ")", "+", "p", ")", "for", "p", "in", "t", ".", "split", "(", "'\\n'", ")", ")", ")" ]
indent text .
train
false
27,180
def load_dostime(buf, pos): (stamp, pos) = load_le32(buf, pos) tup = parse_dos_time(stamp) return (to_datetime(tup), pos)
[ "def", "load_dostime", "(", "buf", ",", "pos", ")", ":", "(", "stamp", ",", "pos", ")", "=", "load_le32", "(", "buf", ",", "pos", ")", "tup", "=", "parse_dos_time", "(", "stamp", ")", "return", "(", "to_datetime", "(", "tup", ")", ",", "pos", ")" ]
load le32 dos timestamp .
train
true
27,181
def get_user_ca_bundle_path(settings): ensure_ca_bundle_dir() user_ca_bundle_path = os.path.join(ca_bundle_dir, 'Package Control.user-ca-bundle') if (not os.path.exists(user_ca_bundle_path)): if settings.get('debug'): console_write(u'\n Created blank user CA bundle\n ') open(user_ca_bundle_path, 'a').close() return user_ca_bundle_path
[ "def", "get_user_ca_bundle_path", "(", "settings", ")", ":", "ensure_ca_bundle_dir", "(", ")", "user_ca_bundle_path", "=", "os", ".", "path", ".", "join", "(", "ca_bundle_dir", ",", "'Package Control.user-ca-bundle'", ")", "if", "(", "not", "os", ".", "path", "....
return the path to the user ca bundle .
train
false
27,183
def validate_base64(value): try: base64.b64decode(value.split()[1]) except Exception as e: raise ValidationError(e)
[ "def", "validate_base64", "(", "value", ")", ":", "try", ":", "base64", ".", "b64decode", "(", "value", ".", "split", "(", ")", "[", "1", "]", ")", "except", "Exception", "as", "e", ":", "raise", "ValidationError", "(", "e", ")" ]
check that value contains only valid base64 characters .
train
false
27,184
def test_get_config_does_not_exist(): with pytest.raises(ConfigDoesNotExistException): config.get_config('tests/test-config/this-does-not-exist.yaml')
[ "def", "test_get_config_does_not_exist", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ConfigDoesNotExistException", ")", ":", "config", ".", "get_config", "(", "'tests/test-config/this-does-not-exist.yaml'", ")" ]
check that exceptions .
train
false
27,185
def _remove_key(path, base=win32con.HKEY_CLASSES_ROOT): try: win32api.RegDeleteKey(base, path) except win32api.error as (code, fn, msg): if (code != winerror.ERROR_FILE_NOT_FOUND): raise win32api.error(code, fn, msg)
[ "def", "_remove_key", "(", "path", ",", "base", "=", "win32con", ".", "HKEY_CLASSES_ROOT", ")", ":", "try", ":", "win32api", ".", "RegDeleteKey", "(", "base", ",", "path", ")", "except", "win32api", ".", "error", "as", "(", "code", ",", "fn", ",", "msg...
remove a string from the registry .
train
false
27,186
@box(types.Tuple) @box(types.UniTuple) def box_tuple(typ, val, c): tuple_val = c.pyapi.tuple_new(typ.count) for (i, dtype) in enumerate(typ): item = c.builder.extract_value(val, i) obj = c.box(dtype, item) c.pyapi.tuple_setitem(tuple_val, i, obj) return tuple_val
[ "@", "box", "(", "types", ".", "Tuple", ")", "@", "box", "(", "types", ".", "UniTuple", ")", "def", "box_tuple", "(", "typ", ",", "val", ",", "c", ")", ":", "tuple_val", "=", "c", ".", "pyapi", ".", "tuple_new", "(", "typ", ".", "count", ")", "...
convert native array or structure *val* to a tuple object .
train
false
27,187
def print_live_refs(*a, **kw): print(format_live_refs(*a, **kw))
[ "def", "print_live_refs", "(", "*", "a", ",", "**", "kw", ")", ":", "print", "(", "format_live_refs", "(", "*", "a", ",", "**", "kw", ")", ")" ]
print tracked objects .
train
false
27,188
def get_service(hass, config, discovery_info=None): command = config[CONF_COMMAND] return CommandLineNotificationService(command)
[ "def", "get_service", "(", "hass", ",", "config", ",", "discovery_info", "=", "None", ")", ":", "command", "=", "config", "[", "CONF_COMMAND", "]", "return", "CommandLineNotificationService", "(", "command", ")" ]
get the matrix notification service .
train
false
27,189
def common_log(environ, response, response_time=None): logger = logging.getLogger() logger.setLevel(logging.INFO) if response_time: formatter = ApacheFormatter(with_response_time=True) try: log_entry = formatter(response.status_code, environ, len(response.content), rt_us=response_time) except TypeError: log_entry = formatter(response.status_code, environ, len(response.content), rt_ms=response_time) else: formatter = ApacheFormatter(with_response_time=False) log_entry = formatter(response.status_code, environ, len(response.content)) logger.info(log_entry) return log_entry
[ "def", "common_log", "(", "environ", ",", "response", ",", "response_time", "=", "None", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "logger", ".", "setLevel", "(", "logging", ".", "INFO", ")", "if", "response_time", ":", "formatter", ...
given the wsgi environ and the response .
train
true
27,190
def _groupby_apply_funcs(df, *index, **kwargs): if len(index): kwargs.update(by=list(index)) funcs = kwargs.pop('funcs') grouped = df.groupby(**kwargs) result = collections.OrderedDict() for (result_column, func, func_kwargs) in funcs: result[result_column] = func(grouped, **func_kwargs) return pd.DataFrame(result)
[ "def", "_groupby_apply_funcs", "(", "df", ",", "*", "index", ",", "**", "kwargs", ")", ":", "if", "len", "(", "index", ")", ":", "kwargs", ".", "update", "(", "by", "=", "list", "(", "index", ")", ")", "funcs", "=", "kwargs", ".", "pop", "(", "'f...
group a dataframe and apply multiple aggregation functions .
train
false
27,191
def pofile(pofile, **kwargs): return _pofile_or_mofile(pofile, 'pofile', **kwargs)
[ "def", "pofile", "(", "pofile", ",", "**", "kwargs", ")", ":", "return", "_pofile_or_mofile", "(", "pofile", ",", "'pofile'", ",", "**", "kwargs", ")" ]
convenience function that parses the po or pot file pofile and returns a :class:~polib .
train
false
27,192
def trim_fasta(fasta_lines, output_length): for (seq_id, seq) in parse_fasta(fasta_lines): (yield ('>%s\n%s\n' % (seq_id, seq[:output_length])))
[ "def", "trim_fasta", "(", "fasta_lines", ",", "output_length", ")", ":", "for", "(", "seq_id", ",", "seq", ")", "in", "parse_fasta", "(", "fasta_lines", ")", ":", "(", "yield", "(", "'>%s\\n%s\\n'", "%", "(", "seq_id", ",", "seq", "[", ":", "output_lengt...
trim fasta seqs to output_length bases .
train
false
27,194
def _coerce_to_dtypes(result, dtypes): if (len(result) != len(dtypes)): raise AssertionError('_coerce_to_dtypes requires equal len arrays') from pandas.tseries.timedeltas import _coerce_scalar_to_timedelta_type def conv(r, dtype): try: if isnull(r): pass elif (dtype == _NS_DTYPE): r = lib.Timestamp(r) elif (dtype == _TD_DTYPE): r = _coerce_scalar_to_timedelta_type(r) elif (dtype == np.bool_): if (is_integer(r) and (r not in [0, 1])): return int(r) r = bool(r) elif (dtype.kind == 'f'): r = float(r) elif (dtype.kind == 'i'): r = int(r) except: pass return r return [conv(r, dtype) for (r, dtype) in zip(result, dtypes)]
[ "def", "_coerce_to_dtypes", "(", "result", ",", "dtypes", ")", ":", "if", "(", "len", "(", "result", ")", "!=", "len", "(", "dtypes", ")", ")", ":", "raise", "AssertionError", "(", "'_coerce_to_dtypes requires equal len arrays'", ")", "from", "pandas", ".", ...
given a dtypes and a result set .
train
false
27,195
def deploy_mission_response_count(row): if hasattr(row, 'deploy_mission'): row = row.deploy_mission try: mission_id = row.id except AttributeError: return 0 db = current.db table = db.deploy_response count = table.id.count() row = db((table.mission_id == mission_id)).select(count).first() if row: return row[count] else: return 0
[ "def", "deploy_mission_response_count", "(", "row", ")", ":", "if", "hasattr", "(", "row", ",", "'deploy_mission'", ")", ":", "row", "=", "row", ".", "deploy_mission", "try", ":", "mission_id", "=", "row", ".", "id", "except", "AttributeError", ":", "return"...
number of responses to a mission .
train
false
27,197
def emboss_piped_SeqIO_convert(records, old_format, new_format): cline = SeqretCommandline(exes['seqret'], sformat=old_format, osformat=new_format, auto=True, filter=True) child = subprocess.Popen(str(cline), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=(sys.platform != 'win32')) SeqIO.write(records, child.stdin, old_format) child.stdin.close() child.stderr.close() records = list(SeqIO.parse(child.stdout, new_format)) child.stdout.close() return records
[ "def", "emboss_piped_SeqIO_convert", "(", "records", ",", "old_format", ",", "new_format", ")", ":", "cline", "=", "SeqretCommandline", "(", "exes", "[", "'seqret'", "]", ",", "sformat", "=", "old_format", ",", "osformat", "=", "new_format", ",", "auto", "=", ...
run seqret .
train
false
27,198
def cert_is_wildcard(cert): domains = cert_get_domains(cert) if ((len(domains) == 1) and (domains[0][0:1] == '*')): return True if (cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[0].value[0:1] == '*'): return True
[ "def", "cert_is_wildcard", "(", "cert", ")", ":", "domains", "=", "cert_get_domains", "(", "cert", ")", "if", "(", "(", "len", "(", "domains", ")", "==", "1", ")", "and", "(", "domains", "[", "0", "]", "[", "0", ":", "1", "]", "==", "'*'", ")", ...
determines if certificate is a wildcard certificate .
train
false
27,199
def _alembic(*args): with _temp_alembic_ini('sqlite:///jupyterhub.sqlite') as alembic_ini: check_call((['alembic', '-c', alembic_ini] + list(args)))
[ "def", "_alembic", "(", "*", "args", ")", ":", "with", "_temp_alembic_ini", "(", "'sqlite:///jupyterhub.sqlite'", ")", "as", "alembic_ini", ":", "check_call", "(", "(", "[", "'alembic'", ",", "'-c'", ",", "alembic_ini", "]", "+", "list", "(", "args", ")", ...
run an alembic command with a temporary alembic .
train
false
27,202
def get_enabled(): return _get_svc_list('YES')
[ "def", "get_enabled", "(", ")", ":", "return", "_get_svc_list", "(", "'YES'", ")" ]
return a list of all enabled services cli example: .
train
false
27,203
def apply_spm_config(overrides, defaults): opts = defaults.copy() if overrides: opts.update(overrides) prepend_root_dirs = ['formula_path', 'pillar_path', 'reactor_path', 'spm_cache_dir', 'spm_build_dir'] for config_key in ('spm_logfile',): log_setting = opts.get(config_key, '') if (log_setting is None): continue if (urlparse(log_setting).scheme == ''): prepend_root_dirs.append(config_key) prepend_root_dir(opts, prepend_root_dirs) return opts
[ "def", "apply_spm_config", "(", "overrides", ",", "defaults", ")", ":", "opts", "=", "defaults", ".", "copy", "(", ")", "if", "overrides", ":", "opts", ".", "update", "(", "overrides", ")", "prepend_root_dirs", "=", "[", "'formula_path'", ",", "'pillar_path'...
returns the spm configurations dict .
train
true
27,204
def _get_installs(app_id): POPULARITY_PERIOD = 90 client = get_monolith_client() popular = {'filter': {'range': {'date': {'gte': days_ago(POPULARITY_PERIOD).date().isoformat(), 'lte': days_ago(1).date().isoformat()}}}, 'aggs': {'total_installs': {'sum': {'field': 'app_installs'}}}} query = {'query': {'filtered': {'query': {'match_all': {}}, 'filter': {'term': {'app-id': app_id}}}}, 'aggregations': {'popular': popular, 'region': {'terms': {'field': 'region', 'size': len(mkt.regions.ALL_REGIONS)}, 'aggregations': {'popular': popular}}}, 'size': 0} try: res = client.raw(query) except ValueError as e: task_log.error('Error response from Monolith: {0}'.format(e)) return {} if ('aggregations' not in res): task_log.error('No installs for app {}'.format(app_id)) return {} results = {'all': res['aggregations']['popular']['total_installs']['value']} if ('region' in res['aggregations']): for regional_res in res['aggregations']['region']['buckets']: region_slug = regional_res['key'] popular = regional_res['popular']['total_installs']['value'] results[region_slug] = popular return results
[ "def", "_get_installs", "(", "app_id", ")", ":", "POPULARITY_PERIOD", "=", "90", "client", "=", "get_monolith_client", "(", ")", "popular", "=", "{", "'filter'", ":", "{", "'range'", ":", "{", "'date'", ":", "{", "'gte'", ":", "days_ago", "(", "POPULARITY_...
calculate popularity of app for all regions and per region .
train
false
27,205
def dict_from_dotted_str(items): args = {} for (key, value) in items: parts = key.split('.') key = str(camelcase_to_underscore(parts[0])) if isinstance(value, six.string_types): value = _try_convert(value) if (len(parts) > 1): d = args.get(key, {}) args[key] = d for k in parts[1:(-1)]: k = camelcase_to_underscore(k) v = d.get(k, {}) d[k] = v d = v d[camelcase_to_underscore(parts[(-1)])] = value else: args[key] = value return args
[ "def", "dict_from_dotted_str", "(", "items", ")", ":", "args", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "items", ":", "parts", "=", "key", ".", "split", "(", "'.'", ")", "key", "=", "str", "(", "camelcase_to_underscore", "(", "parts...
parse multi dot-separated argument into dict .
train
false
27,206
def analyze_document(service, document): logging.info('Analyzing {}'.format(document.doc_id)) (sentiments, entities) = document.extract_sentiment_entities(service) return (sentiments, entities)
[ "def", "analyze_document", "(", "service", ",", "document", ")", ":", "logging", ".", "info", "(", "'Analyzing {}'", ".", "format", "(", "document", ".", "doc_id", ")", ")", "(", "sentiments", ",", "entities", ")", "=", "document", ".", "extract_sentiment_en...
analyze the document and get the distribution of sentiments and the movie name .
train
false
27,207
def _remove_nonjoin_identity_nodes(graph, keep_iterables=False): for node in _identity_nodes(graph, (not keep_iterables)): if (not hasattr(node, u'joinsource')): _remove_identity_node(graph, node) return graph
[ "def", "_remove_nonjoin_identity_nodes", "(", "graph", ",", "keep_iterables", "=", "False", ")", ":", "for", "node", "in", "_identity_nodes", "(", "graph", ",", "(", "not", "keep_iterables", ")", ")", ":", "if", "(", "not", "hasattr", "(", "node", ",", "u'...
remove non-join identity nodes from the given graph iterable nodes are retained if and only if the keep_iterables flag is set to true .
train
false
27,210
def zmqversion(): try: import zmq return {'zmqversion': zmq.zmq_version()} except ImportError: return {}
[ "def", "zmqversion", "(", ")", ":", "try", ":", "import", "zmq", "return", "{", "'zmqversion'", ":", "zmq", ".", "zmq_version", "(", ")", "}", "except", "ImportError", ":", "return", "{", "}" ]
return the zeromq version .
train
false
27,211
def get_endtime(jid): jid_dir = salt.utils.jid.jid_dir(jid, _job_dir(), __opts__['hash_type']) etpath = os.path.join(jid_dir, ENDTIME) if (not os.path.exists(etpath)): return False with salt.utils.fopen(etpath, 'r') as etfile: endtime = etfile.read().strip('\n') return endtime
[ "def", "get_endtime", "(", "jid", ")", ":", "jid_dir", "=", "salt", ".", "utils", ".", "jid", ".", "jid_dir", "(", "jid", ",", "_job_dir", "(", ")", ",", "__opts__", "[", "'hash_type'", "]", ")", "etpath", "=", "os", ".", "path", ".", "join", "(", ...
retrieve the stored endtime for a given job returns false if no endtime is present .
train
true
27,212
def version_cmp(pkg1, pkg2, ignore_epoch=False): return __salt__['lowpkg.version_cmp'](pkg1, pkg2, ignore_epoch=ignore_epoch)
[ "def", "version_cmp", "(", "pkg1", ",", "pkg2", ",", "ignore_epoch", "=", "False", ")", ":", "return", "__salt__", "[", "'lowpkg.version_cmp'", "]", "(", "pkg1", ",", "pkg2", ",", "ignore_epoch", "=", "ignore_epoch", ")" ]
sort versions from newest to oldest .
train
true
27,213
def b64(int_bloom): return utils.zpad(utils.int_to_big_endian(int_bloom), 256)
[ "def", "b64", "(", "int_bloom", ")", ":", "return", "utils", ".", "zpad", "(", "utils", ".", "int_to_big_endian", "(", "int_bloom", ")", ",", "256", ")" ]
returns b256 .
train
false
27,214
@webauth.SecurityCheck @renderers.ErrorHandler() def RenderApi(request): return http_api.RenderHttpResponse(request)
[ "@", "webauth", ".", "SecurityCheck", "@", "renderers", ".", "ErrorHandler", "(", ")", "def", "RenderApi", "(", "request", ")", ":", "return", "http_api", ".", "RenderHttpResponse", "(", "request", ")" ]
handler for the /api/ requests .
train
false
27,215
def addEmptyRow(gridPosition): gridPosition.increment() Tkinter.Label(gridPosition.master).grid(row=gridPosition.row, column=gridPosition.column)
[ "def", "addEmptyRow", "(", "gridPosition", ")", ":", "gridPosition", ".", "increment", "(", ")", "Tkinter", ".", "Label", "(", "gridPosition", ".", "master", ")", ".", "grid", "(", "row", "=", "gridPosition", ".", "row", ",", "column", "=", "gridPosition",...
add an empty row .
train
false
27,218
def plan_resume(interval): global __SCHED, __PAUSE_END if (interval > 0): __PAUSE_END = (time.time() + (interval * 60)) logging.debug('Schedule resume at %s', __PAUSE_END) __SCHED.add_single_task(__oneshot_resume, '', (interval * 60), kronos.method.sequential, [__PAUSE_END], None) sabnzbd.downloader.Downloader.do.pause() else: __PAUSE_END = None sabnzbd.unpause_all()
[ "def", "plan_resume", "(", "interval", ")", ":", "global", "__SCHED", ",", "__PAUSE_END", "if", "(", "interval", ">", "0", ")", ":", "__PAUSE_END", "=", "(", "time", ".", "time", "(", ")", "+", "(", "interval", "*", "60", ")", ")", "logging", ".", ...
set a scheduled resume after the interval .
train
false
27,219
def as_es2_command(command): if (command[0] == 'FUNC'): return ((command[0], re.sub('^gl([A-Z])', (lambda m: m.group(1).lower()), command[1])) + command[2:]) if (command[0] == 'SHADERS'): return (command[:2] + convert_shaders('es2', command[2:])) if (command[0] == 'UNIFORM'): return (command[:(-1)] + (command[(-1)].tolist(),)) return command
[ "def", "as_es2_command", "(", "command", ")", ":", "if", "(", "command", "[", "0", "]", "==", "'FUNC'", ")", ":", "return", "(", "(", "command", "[", "0", "]", ",", "re", ".", "sub", "(", "'^gl([A-Z])'", ",", "(", "lambda", "m", ":", "m", ".", ...
modify a desktop command so it works on es2 .
train
true
27,220
def dedent_interpd(func): if (isinstance(func, types.MethodType) and (not six.PY3)): func = func.im_func return interpd(dedent(func))
[ "def", "dedent_interpd", "(", "func", ")", ":", "if", "(", "isinstance", "(", "func", ",", "types", ".", "MethodType", ")", "and", "(", "not", "six", ".", "PY3", ")", ")", ":", "func", "=", "func", ".", "im_func", "return", "interpd", "(", "dedent", ...
a special case of the interpd that first performs a dedent on the incoming docstring .
train
false
27,221
def build_dictionary(text): wordcount = OrderedDict() for cc in text: words = cc.split() for w in words: if (w not in wordcount): wordcount[w] = 0 wordcount[w] += 1 words = wordcount.keys() freqs = wordcount.values() sorted_idx = numpy.argsort(freqs)[::(-1)] worddict = OrderedDict() for (idx, sidx) in enumerate(sorted_idx): worddict[words[sidx]] = (idx + 2) return (worddict, wordcount)
[ "def", "build_dictionary", "(", "text", ")", ":", "wordcount", "=", "OrderedDict", "(", ")", "for", "cc", "in", "text", ":", "words", "=", "cc", ".", "split", "(", ")", "for", "w", "in", "words", ":", "if", "(", "w", "not", "in", "wordcount", ")", ...
build a dictionary text: list of sentences .
train
false
27,222
def youtube(link): pattern = 'http://www\\.youtube\\.com\\/watch\\?v=([a-zA-Z0-9\\-\\_]+)' match = re.match(pattern, link) if (not match): pattern = 'http:\\/\\/youtu.be\\/([a-zA-Z0-9\\-\\_]+)' match = re.match(pattern, link) if (not match): return None return ('http://www.youtube.com/embed/%s' % match.group(1))
[ "def", "youtube", "(", "link", ")", ":", "pattern", "=", "'http://www\\\\.youtube\\\\.com\\\\/watch\\\\?v=([a-zA-Z0-9\\\\-\\\\_]+)'", "match", "=", "re", ".", "match", "(", "pattern", ",", "link", ")", "if", "(", "not", "match", ")", ":", "pattern", "=", "'http:...
youtube <query> -- returns the first youtube search result for <query> .
train
false
27,223
def launchctl(sub_cmd, *args, **kwargs): return_stdout = kwargs.pop('return_stdout', False) cmd = ['launchctl', sub_cmd] cmd.extend(args) kwargs['python_shell'] = False ret = __salt__['cmd.run_all'](cmd, **kwargs) if ret['retcode']: out = 'Failed to {0} service:\n'.format(sub_cmd) out += 'stdout: {0}\n'.format(ret['stdout']) out += 'stderr: {0}\n'.format(ret['stderr']) out += 'retcode: {0}\n'.format(ret['retcode']) raise CommandExecutionError(out) else: return (ret['stdout'] if return_stdout else True)
[ "def", "launchctl", "(", "sub_cmd", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return_stdout", "=", "kwargs", ".", "pop", "(", "'return_stdout'", ",", "False", ")", "cmd", "=", "[", "'launchctl'", ",", "sub_cmd", "]", "cmd", ".", "extend", "(", ...
run a launchctl command and raise an error if it fails .
train
false
27,224
def disable_warnings(_option, _opt_str, _value, _parser): rootlogger = logging.getLogger('') rootlogger.setLevel((logging.WARNING + 1))
[ "def", "disable_warnings", "(", "_option", ",", "_opt_str", ",", "_value", ",", "_parser", ")", ":", "rootlogger", "=", "logging", ".", "getLogger", "(", "''", ")", "rootlogger", ".", "setLevel", "(", "(", "logging", ".", "WARNING", "+", "1", ")", ")" ]
sets the location variable in the parser to the filename in question .
train
false
27,225
def repr_readers(h): return [u'({0}){1}->{2}'.format(fd, _rcb(cb), repr_flag((READ | ERR))) for (fd, cb) in items(h.readers)]
[ "def", "repr_readers", "(", "h", ")", ":", "return", "[", "u'({0}){1}->{2}'", ".", "format", "(", "fd", ",", "_rcb", "(", "cb", ")", ",", "repr_flag", "(", "(", "READ", "|", "ERR", ")", ")", ")", "for", "(", "fd", ",", "cb", ")", "in", "items", ...
return description of pending readers .
train
false
27,226
def set_sleep(minutes): value = _validate_sleep(minutes) cmd = 'systemsetup -setsleep {0}'.format(value) salt.utils.mac_utils.execute_return_success(cmd) state = [] for check in (get_computer_sleep, get_display_sleep, get_harddisk_sleep): state.append(salt.utils.mac_utils.confirm_updated(value, check)) return all(state)
[ "def", "set_sleep", "(", "minutes", ")", ":", "value", "=", "_validate_sleep", "(", "minutes", ")", "cmd", "=", "'systemsetup -setsleep {0}'", ".", "format", "(", "value", ")", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_success", "(", "cmd", ...
sets the amount of idle time until the machine sleeps .
train
true
27,227
def get_requirements(): filename = ('win_requirements.txt' if OS_WINDOWS else 'requirements.txt') with open(filename, 'r') as f: req_lines = f.readlines() reqs = [] for line in req_lines: line = line.split('#')[0].strip() if line: reqs.append(line) return reqs
[ "def", "get_requirements", "(", ")", ":", "filename", "=", "(", "'win_requirements.txt'", "if", "OS_WINDOWS", "else", "'requirements.txt'", ")", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "req_lines", "=", "f", ".", "readlines", "(", ...
to update the requirements for evennia .
train
false
27,228
def test_noop(): with pytest.raises(DocoptExit): main([]) with pytest.raises(DocoptExit): main(['foobar'])
[ "def", "test_noop", "(", ")", ":", "with", "pytest", ".", "raises", "(", "DocoptExit", ")", ":", "main", "(", "[", "]", ")", "with", "pytest", ".", "raises", "(", "DocoptExit", ")", ":", "main", "(", "[", "'foobar'", "]", ")" ]
ensure docopt exit .
train
false
27,229
def newer_group(sources, target, missing='error'): if (not os.path.exists(target)): return True target_mtime = os.stat(target)[ST_MTIME] for source in sources: if (not os.path.exists(source)): if (missing == 'error'): pass elif (missing == 'ignore'): continue elif (missing == 'newer'): return True if (os.stat(source)[ST_MTIME] > target_mtime): return True return False
[ "def", "newer_group", "(", "sources", ",", "target", ",", "missing", "=", "'error'", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "target", ")", ")", ":", "return", "True", "target_mtime", "=", "os", ".", "stat", "(", "target", ...
return true if target is out-of-date with respect to any file listed in sources .
train
false
27,231
def make_string(seq): string = '' for c in seq: try: if ((32 <= c) and (c < 256)): string += chr(c) except TypeError: pass if (not string): return str(seq) return string
[ "def", "make_string", "(", "seq", ")", ":", "string", "=", "''", "for", "c", "in", "seq", ":", "try", ":", "if", "(", "(", "32", "<=", "c", ")", "and", "(", "c", "<", "256", ")", ")", ":", "string", "+=", "chr", "(", "c", ")", "except", "Ty...
dont throw an exception when given an out of range character .
train
true
27,232
def kmp_table(word): pos = 2 cnd = 0 table = list() table.append((-1)) table.append(0) while (pos < len(word)): if (word[(pos - 1)] == word[cnd]): cnd = (cnd + 1) table.append(cnd) pos = (pos + 1) elif (cnd > 0): cnd = table[cnd] else: table.append(0) pos = (pos + 1) return table
[ "def", "kmp_table", "(", "word", ")", ":", "pos", "=", "2", "cnd", "=", "0", "table", "=", "list", "(", ")", "table", ".", "append", "(", "(", "-", "1", ")", ")", "table", ".", "append", "(", "0", ")", "while", "(", "pos", "<", "len", "(", ...
build the partial match table of the knuth-morris-pratt algorithm .
train
false
27,233
def print_fs(host_fs): print '{} DCTB {} DCTB '.format('Datastore: ', host_fs.volume.name) print '{} DCTB {} DCTB '.format('UUID: ', host_fs.volume.uuid) print '{} DCTB {} DCTB '.format('Capacity: ', sizeof_fmt(host_fs.volume.capacity)) print '{} DCTB {} DCTB '.format('VMFS Version: ', host_fs.volume.version) print '{} DCTB {} DCTB '.format('Is Local VMFS: ', host_fs.volume.local) print '{} DCTB {} DCTB '.format('SSD: ', host_fs.volume.ssd)
[ "def", "print_fs", "(", "host_fs", ")", ":", "print", "'{} DCTB {} DCTB '", ".", "format", "(", "'Datastore: '", ",", "host_fs", ".", "volume", ".", "name", ")", "print", "'{} DCTB {} DCTB '", ".", "format", "(", "'UUID: '", ",", "host_fs", ".", "...
prints the host file system volume info .
train
false
27,234
@contextlib.contextmanager def for_iter(context, builder, iterable_type, val): iterator_type = iterable_type.iterator_type iterval = call_getiter(context, builder, iterable_type, val) bb_body = builder.append_basic_block('for_iter.body') bb_end = builder.append_basic_block('for_iter.end') def do_break(): builder.branch(bb_end) builder.branch(bb_body) with builder.goto_block(bb_body): res = call_iternext(context, builder, iterator_type, iterval) with builder.if_then(builder.not_(res.is_valid()), likely=False): builder.branch(bb_end) (yield _ForIterLoop(res.yielded_value(), do_break)) builder.branch(bb_body) builder.position_at_end(bb_end) if context.enable_nrt: context.nrt.decref(builder, iterator_type, iterval)
[ "@", "contextlib", ".", "contextmanager", "def", "for_iter", "(", "context", ",", "builder", ",", "iterable_type", ",", "val", ")", ":", "iterator_type", "=", "iterable_type", ".", "iterator_type", "iterval", "=", "call_getiter", "(", "context", ",", "builder", ...
simulate a for loop on the given iterable .
train
false
27,235
def visitors_by_locale(start_date, end_date): visits_by_locale = {} request = _build_request() @retry_503 def _make_request(): return request.get(ids=('ga:' + profile_id), start_date=str(start_date), end_date=str(end_date), metrics='ga:visitors', dimensions='ga:pagePathLevel1').execute() results = _make_request() for result in results['rows']: path = result[0][1:(-1)] visitors = int(result[1]) if (path in settings.SUMO_LANGUAGES): visits_by_locale[path] = visitors return visits_by_locale
[ "def", "visitors_by_locale", "(", "start_date", ",", "end_date", ")", ":", "visits_by_locale", "=", "{", "}", "request", "=", "_build_request", "(", ")", "@", "retry_503", "def", "_make_request", "(", ")", ":", "return", "request", ".", "get", "(", "ids", ...
return the number of unique visits by locale in a given date range .
train
false
27,236
def create_managed_config(base_config, cluster): config = dict(base_config) config.update(generate_managed_section(cluster)) return config
[ "def", "create_managed_config", "(", "base_config", ",", "cluster", ")", ":", "config", "=", "dict", "(", "base_config", ")", "config", ".", "update", "(", "generate_managed_section", "(", "cluster", ")", ")", "return", "config" ]
generate a full configuration from the given base configuration by adding a managed section for the given cluster instance .
train
false
27,237
def test_batch_normalized_mlp_transformed(): x = tensor.matrix('x') mlp = BatchNormalizedMLP([Tanh(), Tanh()], [5, 7, 9]) with batch_normalization(mlp): y = mlp.apply(x) assert (len(get_batch_normalization_updates(ComputationGraph([y]))) == 4)
[ "def", "test_batch_normalized_mlp_transformed", "(", ")", ":", "x", "=", "tensor", ".", "matrix", "(", "'x'", ")", "mlp", "=", "BatchNormalizedMLP", "(", "[", "Tanh", "(", ")", ",", "Tanh", "(", ")", "]", ",", "[", "5", ",", "7", ",", "9", "]", ")"...
smoke test that a graph involving a batchnormalizedmlp transforms .
train
false
27,238
def _os_walk_unicode(top): try: names = os.listdir(deunicodise(top)) except: return (dirs, nondirs) = ([], []) for name in names: name = unicodise(name) if os.path.isdir(deunicodise(os.path.join(top, name))): if (not handle_exclude_include_walk_dir(top, name)): dirs.append(name) else: nondirs.append(name) (yield (top, dirs, nondirs)) for name in dirs: new_path = os.path.join(top, name) if (not os.path.islink(deunicodise(new_path))): for x in _os_walk_unicode(new_path): (yield x)
[ "def", "_os_walk_unicode", "(", "top", ")", ":", "try", ":", "names", "=", "os", ".", "listdir", "(", "deunicodise", "(", "top", ")", ")", "except", ":", "return", "(", "dirs", ",", "nondirs", ")", "=", "(", "[", "]", ",", "[", "]", ")", "for", ...
reimplementation of pythons os .
train
false
27,240
def verySimpleLimit(bw=150): intf = custom(TCIntf, bw=bw) net = Mininet(intf=intf) (h1, h2) = (net.addHost('h1'), net.addHost('h2')) net.addLink(h1, h2) net.start() net.pingAll() net.iperf() h1.cmdPrint('tc -s qdisc ls dev', h1.defaultIntf()) h2.cmdPrint('tc -d class show dev', h2.defaultIntf()) h1.cmdPrint('tc -s qdisc ls dev', h1.defaultIntf()) h2.cmdPrint('tc -d class show dev', h2.defaultIntf()) net.stop()
[ "def", "verySimpleLimit", "(", "bw", "=", "150", ")", ":", "intf", "=", "custom", "(", "TCIntf", ",", "bw", "=", "bw", ")", "net", "=", "Mininet", "(", "intf", "=", "intf", ")", "(", "h1", ",", "h2", ")", "=", "(", "net", ".", "addHost", "(", ...
absurdly simple limiting test .
train
false
27,242
def normalize_scopes(scopes): all_scopes = set() for sc in scopes: try: scope_tuple = public_scopes[sc] all_scopes |= scope_tuple.parts except KeyError: pass return all_scopes
[ "def", "normalize_scopes", "(", "scopes", ")", ":", "all_scopes", "=", "set", "(", ")", "for", "sc", "in", "scopes", ":", "try", ":", "scope_tuple", "=", "public_scopes", "[", "sc", "]", "all_scopes", "|=", "scope_tuple", ".", "parts", "except", "KeyError"...
given a list of public-facing scope names from a cas token .
train
false
27,244
@contextmanager def raise_on_meta_error(funcname=None): try: (yield) except Exception as e: (exc_type, exc_value, exc_traceback) = sys.exc_info() tb = ''.join(traceback.format_tb(exc_traceback)) msg = 'Metadata inference failed{0}.\n\nOriginal error is below:\n------------------------\n{1}\n\nTraceback:\n---------\n{2}'.format((' in `{0}`'.format(funcname) if funcname else ''), repr(e), tb) raise ValueError(msg)
[ "@", "contextmanager", "def", "raise_on_meta_error", "(", "funcname", "=", "None", ")", ":", "try", ":", "(", "yield", ")", "except", "Exception", "as", "e", ":", "(", "exc_type", ",", "exc_value", ",", "exc_traceback", ")", "=", "sys", ".", "exc_info", ...
reraise errors in this block to show metadata inference failure .
train
false
27,246
def ensure_unquoted(cookie_str): for _ in range(3): new_str = unquote(cookie_str) if (new_str == cookie_str): return new_str cookie_str = new_str
[ "def", "ensure_unquoted", "(", "cookie_str", ")", ":", "for", "_", "in", "range", "(", "3", ")", ":", "new_str", "=", "unquote", "(", "cookie_str", ")", "if", "(", "new_str", "==", "cookie_str", ")", ":", "return", "new_str", "cookie_str", "=", "new_str"...
keep unquoting .
train
false
27,247
def himmelblau(individual): return ((((((individual[0] * individual[0]) + individual[1]) - 11) ** 2) + (((individual[0] + (individual[1] * individual[1])) - 7) ** 2)),)
[ "def", "himmelblau", "(", "individual", ")", ":", "return", "(", "(", "(", "(", "(", "(", "individual", "[", "0", "]", "*", "individual", "[", "0", "]", ")", "+", "individual", "[", "1", "]", ")", "-", "11", ")", "**", "2", ")", "+", "(", "("...
the himmelblaus function is multimodal with 4 defined minimums in :math:[-6 .
train
false
27,248
def functions_of(task): funcs = set() work = [task] sequence_types = {list, tuple} while work: new_work = [] for task in work: if (type(task) in sequence_types): if istask(task): funcs.add(unwrap_partial(task[0])) new_work += task[1:] else: new_work += task work = new_work return funcs
[ "def", "functions_of", "(", "task", ")", ":", "funcs", "=", "set", "(", ")", "work", "=", "[", "task", "]", "sequence_types", "=", "{", "list", ",", "tuple", "}", "while", "work", ":", "new_work", "=", "[", "]", "for", "task", "in", "work", ":", ...
set of functions contained within nested task examples .
train
false
27,249
@handle_response_format @treeio_login_required def widget_contact_me(request, response_format='html'): contact = request.user.profile.get_contact() if (not request.user.profile.has_permission(contact)): return user_denied(request, message="You don't have access to this Contact") types = Object.filter_by_request(request, ContactType.objects.order_by('name')) if contact: return render_to_response('identities/widgets/contact_me', {'contact': contact, 'types': types}, context_instance=RequestContext(request), response_format=response_format) else: return render_to_response('identities/widgets/contact_me_missing', {'types': types}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "widget_contact_me", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "contact", "=", "request", ".", "user", ".", "profile", ".", "get_contact", "(", ")", "if", "(", "not", "r...
my contact card .
train
false
27,250
def str_contains(arr, pat, case=True, flags=0, na=np.nan, regex=True): if regex: if (not case): flags |= re.IGNORECASE regex = re.compile(pat, flags=flags) if (regex.groups > 0): warnings.warn('This pattern has match groups. To actually get the groups, use str.extract.', UserWarning, stacklevel=3) f = (lambda x: bool(regex.search(x))) elif case: f = (lambda x: (pat in x)) else: upper_pat = pat.upper() f = (lambda x: (upper_pat in x)) uppered = _na_map((lambda x: x.upper()), arr) return _na_map(f, uppered, na, dtype=bool) return _na_map(f, arr, na, dtype=bool)
[ "def", "str_contains", "(", "arr", ",", "pat", ",", "case", "=", "True", ",", "flags", "=", "0", ",", "na", "=", "np", ".", "nan", ",", "regex", "=", "True", ")", ":", "if", "regex", ":", "if", "(", "not", "case", ")", ":", "flags", "|=", "re...
return boolean series/array whether given pattern/regex is contained in each string in the series/index .
train
true
27,251
def _log_add(*values): x = max(values) if (x > (- np.inf)): sum_diffs = 0 for value in values: sum_diffs += (2 ** (value - x)) return (x + np.log2(sum_diffs)) else: return x
[ "def", "_log_add", "(", "*", "values", ")", ":", "x", "=", "max", "(", "values", ")", "if", "(", "x", ">", "(", "-", "np", ".", "inf", ")", ")", ":", "sum_diffs", "=", "0", "for", "value", "in", "values", ":", "sum_diffs", "+=", "(", "2", "**...
adds the logged values .
train
false
27,252
@when(u'we run pgcli') def step_run_cli(context): context.cli = pexpect.spawnu(u'pgcli') context.exit_sent = False
[ "@", "when", "(", "u'we run pgcli'", ")", "def", "step_run_cli", "(", "context", ")", ":", "context", ".", "cli", "=", "pexpect", ".", "spawnu", "(", "u'pgcli'", ")", "context", ".", "exit_sent", "=", "False" ]
run the process using pexpect .
train
false
27,253
def bitstonats(X): return (logbasechange(2, np.e) * X)
[ "def", "bitstonats", "(", "X", ")", ":", "return", "(", "logbasechange", "(", "2", ",", "np", ".", "e", ")", "*", "X", ")" ]
converts from bits to nats .
train
false
27,256
def _purgeOutput(): if conf.purgeOutput: purge(paths.SQLMAP_OUTPUT_PATH)
[ "def", "_purgeOutput", "(", ")", ":", "if", "conf", ".", "purgeOutput", ":", "purge", "(", "paths", ".", "SQLMAP_OUTPUT_PATH", ")" ]
safely removes output directory .
train
false
27,259
def withSentence(): pass
[ "def", "withSentence", "(", ")", ":", "pass" ]
i have a sentence which spans multiple lines .
train
false
27,260
def test_get_log_destinations_empty(monkeypatch): assert (os.getenv('WALE_LOG_DESTINATION') is None) out = log_help.get_log_destinations() assert (out == ['stderr', 'syslog'])
[ "def", "test_get_log_destinations_empty", "(", "monkeypatch", ")", ":", "assert", "(", "os", ".", "getenv", "(", "'WALE_LOG_DESTINATION'", ")", "is", "None", ")", "out", "=", "log_help", ".", "get_log_destinations", "(", ")", "assert", "(", "out", "==", "[", ...
wale_log_destination is not set .
train
false
27,261
def _hydrate(params, container, cli_type, key, value): params['bag'] = {'ArgumentBaz': {'SomeValueAbc': (value / 100.0)}}
[ "def", "_hydrate", "(", "params", ",", "container", ",", "cli_type", ",", "key", ",", "value", ")", ":", "params", "[", "'bag'", "]", "=", "{", "'ArgumentBaz'", ":", "{", "'SomeValueAbc'", ":", "(", "value", "/", "100.0", ")", "}", "}" ]
an example to hydrate a complex structure with custom value logic .
train
false
27,262
def load_logfile_level(): if ('DIGITS_MODE_TEST' in os.environ): return logging.DEBUG elif ('DIGITS_LOGFILE_LEVEL' in os.environ): level = os.environ['DIGITS_LOGFILE_LEVEL'].strip().lower() if (level == 'debug'): return logging.DEBUG elif (level == 'info'): return logging.INFO elif (level == 'warning'): return logging.WARNING elif (level == 'error'): return logging.ERROR elif (level == 'critical'): return logging.CRITICAL else: raise ValueError(('Invalid value "%s" for logfile_level. Set DIGITS_LOGFILE_LEVEL to fix your configuration.' % level)) else: return logging.INFO
[ "def", "load_logfile_level", "(", ")", ":", "if", "(", "'DIGITS_MODE_TEST'", "in", "os", ".", "environ", ")", ":", "return", "logging", ".", "DEBUG", "elif", "(", "'DIGITS_LOGFILE_LEVEL'", "in", "os", ".", "environ", ")", ":", "level", "=", "os", ".", "e...
return the configured logging level .
train
false
27,263
def _failhard(): raise FileserverConfigError('Failed to load hg fileserver backend')
[ "def", "_failhard", "(", ")", ":", "raise", "FileserverConfigError", "(", "'Failed to load hg fileserver backend'", ")" ]
fatal fileserver configuration issue .
train
false
27,265
def test_distance_is_quantity(): Distance((2 * u.kpc)) d = Distance([2, 3.1], u.kpc) assert (d.shape == (2,)) a = d.view(np.ndarray) q = d.view(u.Quantity) a[0] = 1.2 q.value[1] = 5.4 assert (d[0].value == 1.2) assert (d[1].value == 5.4) q = u.Quantity(d, copy=True) q.value[1] = 0 assert (q.value[1] == 0) assert (d.value[1] != 0) d = Distance([(2 * u.kpc), (250.0 * u.pc)]) assert (d.unit is u.kpc) assert np.all((d.value == np.array([2.0, 0.25])))
[ "def", "test_distance_is_quantity", "(", ")", ":", "Distance", "(", "(", "2", "*", "u", ".", "kpc", ")", ")", "d", "=", "Distance", "(", "[", "2", ",", "3.1", "]", ",", "u", ".", "kpc", ")", "assert", "(", "d", ".", "shape", "==", "(", "2", "...
test that distance behaves like a proper quantity .
train
false
27,266
def urldefragauth(url): (scheme, netloc, path, params, query, fragment) = urlparse(url) if (not netloc): (netloc, path) = (path, netloc) netloc = netloc.rsplit('@', 1)[(-1)] return urlunparse((scheme, netloc, path, params, query, ''))
[ "def", "urldefragauth", "(", "url", ")", ":", "(", "scheme", ",", "netloc", ",", "path", ",", "params", ",", "query", ",", "fragment", ")", "=", "urlparse", "(", "url", ")", "if", "(", "not", "netloc", ")", ":", "(", "netloc", ",", "path", ")", "...
given a url remove the fragment and the authentication part .
train
true
27,268
def sources_list(ruby=None, runas=None, gem_bin=None): ret = _gem(['sources'], ruby, gem_bin=gem_bin, runas=runas) return ([] if (ret is False) else ret.splitlines()[2:])
[ "def", "sources_list", "(", "ruby", "=", "None", ",", "runas", "=", "None", ",", "gem_bin", "=", "None", ")", ":", "ret", "=", "_gem", "(", "[", "'sources'", "]", ",", "ruby", ",", "gem_bin", "=", "gem_bin", ",", "runas", "=", "runas", ")", "return...
list the configured gem sources .
train
true
27,269
def getEndpointFromPath(path, pathIndex): begin = path[(-1)] end = path[(-2)] endpointBegin = Endpoint() endpointEnd = Endpoint().getFromOtherPoint(endpointBegin, end) endpointBegin.getFromOtherPoint(endpointEnd, begin) endpointBegin.path = path endpointBegin.pathIndex = pathIndex return endpointBegin
[ "def", "getEndpointFromPath", "(", "path", ",", "pathIndex", ")", ":", "begin", "=", "path", "[", "(", "-", "1", ")", "]", "end", "=", "path", "[", "(", "-", "2", ")", "]", "endpointBegin", "=", "Endpoint", "(", ")", "endpointEnd", "=", "Endpoint", ...
get endpoint segment from a path .
train
false
27,270
def gauss_lobatto(n, n_digits): x = Dummy('x') p = legendre_poly((n - 1), x, polys=True) pd = p.diff(x) xi = [] w = [] for r in pd.real_roots(): if isinstance(r, RootOf): r = r.eval_rational((S(1) / (10 ** (n_digits + 2)))) xi.append(r.n(n_digits)) w.append((2 / ((n * (n - 1)) * (p.subs(x, r) ** 2))).n(n_digits)) xi.insert(0, (-1)) xi.append(1) w.insert(0, (S(2) / (n * (n - 1))).n(n_digits)) w.append((S(2) / (n * (n - 1))).n(n_digits)) return (xi, w)
[ "def", "gauss_lobatto", "(", "n", ",", "n_digits", ")", ":", "x", "=", "Dummy", "(", "'x'", ")", "p", "=", "legendre_poly", "(", "(", "n", "-", "1", ")", ",", "x", ",", "polys", "=", "True", ")", "pd", "=", "p", ".", "diff", "(", "x", ")", ...
computes the gauss-lobatto quadrature [1]_ points and weights .
train
false
27,273
def make_prefixless_processnode(test_case): return ProcessNode(initial_command_arguments=[])
[ "def", "make_prefixless_processnode", "(", "test_case", ")", ":", "return", "ProcessNode", "(", "initial_command_arguments", "=", "[", "]", ")" ]
create a processnode that just runs the given command with no prefix .
train
false
27,274
def verify_request(): query_string = (frappe.local.flags.signed_query_string or getattr(frappe.request, u'query_string', None)) valid = False if (u'&_signature=' in query_string): (params, signature) = query_string.split(u'&_signature=') given_signature = hmac.new(params.encode(u'utf-8')) given_signature.update(get_secret()) valid = (signature == given_signature.hexdigest()) if (not valid): frappe.respond_as_web_page(_(u'Invalid Link'), _(u'This link is invalid or expired. Please make sure you have pasted correctly.')) return valid
[ "def", "verify_request", "(", ")", ":", "query_string", "=", "(", "frappe", ".", "local", ".", "flags", ".", "signed_query_string", "or", "getattr", "(", "frappe", ".", "request", ",", "u'query_string'", ",", "None", ")", ")", "valid", "=", "False", "if", ...
verify if the incoming signed request if it is correct .
train
false
27,275
def _parse_html_table_of_contents(html): lines = html.splitlines()[2:(-2)] parents = [] ret = [] for line in lines: parser = TOCParser() parser.feed(line) if parser.title: try: href = parser.attrs[u'href'] except KeyError: continue title = parser.title nav = AnchorLink(title, href) if parents: parents[(-1)].children.append(nav) else: ret.append(nav) if line.endswith(u'<ul>'): parents.append(nav) elif line.startswith(u'</ul>'): if parents: parents.pop() if ret: ret[0].active = True return ret
[ "def", "_parse_html_table_of_contents", "(", "html", ")", ":", "lines", "=", "html", ".", "splitlines", "(", ")", "[", "2", ":", "(", "-", "2", ")", "]", "parents", "=", "[", "]", "ret", "=", "[", "]", "for", "line", "in", "lines", ":", "parser", ...
given a table of contents string that has been automatically generated by the markdown library .
train
false
27,276
def _get_drop_indices(event_times, method): small_idx = np.argmin([e.shape[0] for e in event_times]) small_e_times = event_times[small_idx] if (method not in ['mintime', 'truncate']): raise ValueError(('method must be either mintime or truncate, not %s' % method)) indices = list() for e in event_times: if (method == 'mintime'): mask = _minimize_time_diff(small_e_times, e) else: mask = np.ones(e.shape[0], dtype=bool) mask[small_e_times.shape[0]:] = False indices.append(np.where(np.logical_not(mask))[0]) return indices
[ "def", "_get_drop_indices", "(", "event_times", ",", "method", ")", ":", "small_idx", "=", "np", ".", "argmin", "(", "[", "e", ".", "shape", "[", "0", "]", "for", "e", "in", "event_times", "]", ")", "small_e_times", "=", "event_times", "[", "small_idx", ...
get indices to drop from multiple event timing lists .
train
false
27,279
@skip('silverlight') def test_max_args(): AssertErrorWithMatch(TypeError, '.*takes at most 4 arguments.*', file, 2, 3, 4, 5, 6, 7, 8, 9)
[ "@", "skip", "(", "'silverlight'", ")", "def", "test_max_args", "(", ")", ":", "AssertErrorWithMatch", "(", "TypeError", ",", "'.*takes at most 4 arguments.*'", ",", "file", ",", "2", ",", "3", ",", "4", ",", "5", ",", "6", ",", "7", ",", "8", ",", "9"...
verify the correct number of max args are reported .
train
false
27,280
def _arp_getnode(): import os, socket try: ip_addr = socket.gethostbyname(socket.gethostname()) except OSError: return None return _find_mac('arp', '-an', [os.fsencode(ip_addr)], (lambda i: (-1)))
[ "def", "_arp_getnode", "(", ")", ":", "import", "os", ",", "socket", "try", ":", "ip_addr", "=", "socket", ".", "gethostbyname", "(", "socket", ".", "gethostname", "(", ")", ")", "except", "OSError", ":", "return", "None", "return", "_find_mac", "(", "'a...
get the hardware address on unix by running arp .
train
false
27,282
def make_max_clique_graph(G, create_using=None): B = (create_using if (create_using is not None) else networkx.Graph()) B.clear() cliques = list(enumerate((set(c) for c in find_cliques(G)))) B.add_nodes_from((i for (i, c) in cliques)) clique_pairs = combinations(cliques, 2) B.add_edges_from(((i, j) for ((i, c1), (j, c2)) in clique_pairs if (c1 & c2))) return B
[ "def", "make_max_clique_graph", "(", "G", ",", "create_using", "=", "None", ")", ":", "B", "=", "(", "create_using", "if", "(", "create_using", "is", "not", "None", ")", "else", "networkx", ".", "Graph", "(", ")", ")", "B", ".", "clear", "(", ")", "c...
returns the maximal clique graph of the given graph .
train
false
27,284
@register.tag(name=u'user_display') def do_user_display(parser, token): bits = token.split_contents() if (len(bits) == 2): user = bits[1] as_var = None elif (len(bits) == 4): user = bits[1] as_var = bits[3] else: raise template.TemplateSyntaxError(u"'{0}' takes either two or four arguments".format(bits[0])) return UserDisplayNode(user, as_var)
[ "@", "register", ".", "tag", "(", "name", "=", "u'user_display'", ")", "def", "do_user_display", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "==", "2", ")", ":", ...
example usage:: {% user_display user %} or if you need to use in a {% blocktrans %}:: {% user_display user as user_display} {% blocktrans %}{{ user_display }} has sent you a gift .
train
false
27,285
def intToBin(i): i1 = (i % 256) i2 = int((i / 256)) return (chr(i1) + chr(i2))
[ "def", "intToBin", "(", "i", ")", ":", "i1", "=", "(", "i", "%", "256", ")", "i2", "=", "int", "(", "(", "i", "/", "256", ")", ")", "return", "(", "chr", "(", "i1", ")", "+", "chr", "(", "i2", ")", ")" ]
integer to two bytes .
train
true
27,286
def update_cluster(cluster_ref, cluster_spec): cluster_name = get_managed_object_name(cluster_ref) log.trace("Updating cluster '{0}'".format(cluster_name)) try: task = cluster_ref.ReconfigureComputeResource_Task(cluster_spec, modify=True) except vim.fault.VimFault as exc: raise salt.exceptions.VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: raise salt.exceptions.VMwareRuntimeError(exc.msg) wait_for_task(task, cluster_name, 'ClusterUpdateTask')
[ "def", "update_cluster", "(", "cluster_ref", ",", "cluster_spec", ")", ":", "cluster_name", "=", "get_managed_object_name", "(", "cluster_ref", ")", "log", ".", "trace", "(", "\"Updating cluster '{0}'\"", ".", "format", "(", "cluster_name", ")", ")", "try", ":", ...
updates a cluster in a datacenter .
train
false
27,287
def test_no_duplicate_modules(): import sys jedipath = os.path.dirname(os.path.abspath(jedi.__file__)) def is_submodule(m): try: filepath = m.__file__ except AttributeError: return False return os.path.abspath(filepath).startswith(jedipath) modules = list(filter(is_submodule, sys.modules.values())) top_modules = [m for m in modules if (not m.__name__.startswith('jedi.'))] for m in modules: if (m is jedi): continue for tm in top_modules: try: imported = getattr(m, tm.__name__) except AttributeError: continue if inspect.ismodule(imported): assert (imported is tm)
[ "def", "test_no_duplicate_modules", "(", ")", ":", "import", "sys", "jedipath", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "jedi", ".", "__file__", ")", ")", "def", "is_submodule", "(", "m", ")", ":", "try", ...
make sure that import hack works as expected .
train
false
27,288
def register_handler(handler): global _handler _handler = handler
[ "def", "register_handler", "(", "handler", ")", ":", "global", "_handler", "_handler", "=", "handler" ]
install application-specific wmf image handler .
train
false
27,289
def words2bytes(data, length=8): try: return data.tostring() except AttributeError: return np.uint64(data).tostring()
[ "def", "words2bytes", "(", "data", ",", "length", "=", "8", ")", ":", "try", ":", "return", "data", ".", "tostring", "(", ")", "except", "AttributeError", ":", "return", "np", ".", "uint64", "(", "data", ")", ".", "tostring", "(", ")" ]
return a length * 8 byte string from data .
train
false
27,290
def _calc_g(cosang, stiffness=4, num_lterms=50): factors = [(((2 * n) + 1) / ((((n ** stiffness) * ((n + 1) ** stiffness)) * 4) * np.pi)) for n in range(1, (num_lterms + 1))] return legval(cosang, ([0] + factors))
[ "def", "_calc_g", "(", "cosang", ",", "stiffness", "=", "4", ",", "num_lterms", "=", "50", ")", ":", "factors", "=", "[", "(", "(", "(", "2", "*", "n", ")", "+", "1", ")", "/", "(", "(", "(", "(", "n", "**", "stiffness", ")", "*", "(", "(",...
calculate spherical spline g function between points on a sphere .
train
false
27,293
def offset(image, xoffset, yoffset=None): if (yoffset is None): yoffset = xoffset image.load() return image._new(image.im.offset(xoffset, yoffset))
[ "def", "offset", "(", "image", ",", "xoffset", ",", "yoffset", "=", "None", ")", ":", "if", "(", "yoffset", "is", "None", ")", ":", "yoffset", "=", "xoffset", "image", ".", "load", "(", ")", "return", "image", ".", "_new", "(", "image", ".", "im", ...
returns a copy of the image where data has been offset by the given distances .
train
false
27,294
def closest_ref_length(references, hyp_len): ref_lens = (len(reference) for reference in references) closest_ref_len = min(ref_lens, key=(lambda ref_len: (abs((ref_len - hyp_len)), ref_len))) return closest_ref_len
[ "def", "closest_ref_length", "(", "references", ",", "hyp_len", ")", ":", "ref_lens", "=", "(", "len", "(", "reference", ")", "for", "reference", "in", "references", ")", "closest_ref_len", "=", "min", "(", "ref_lens", ",", "key", "=", "(", "lambda", "ref_...
this function finds the reference that is the closest length to the hypothesis .
train
false
27,295
def swapoff(name): on_ = swaps() if (name in on_): if (__grains__['kernel'] == 'SunOS'): if (__grains__['virtual'] != 'zone'): __salt__['cmd.run']('swap -a {0}'.format(name), python_shell=False) else: return False elif (__grains__['os'] != 'OpenBSD'): __salt__['cmd.run']('swapoff {0}'.format(name), python_shell=False) else: __salt__['cmd.run']('swapctl -d {0}'.format(name), python_shell=False) on_ = swaps() if (name in on_): return False return True return None
[ "def", "swapoff", "(", "name", ")", ":", "on_", "=", "swaps", "(", ")", "if", "(", "name", "in", "on_", ")", ":", "if", "(", "__grains__", "[", "'kernel'", "]", "==", "'SunOS'", ")", ":", "if", "(", "__grains__", "[", "'virtual'", "]", "!=", "'zo...
deactivate a named swap mount .
train
true
27,296
def test_fault_midstream(): pool = make_pool(1, 1) tpart = FakeTarPartition(1, explosive=Explosion('Boom')) pool.put(tpart) tpart = FakeTarPartition(1) with pytest.raises(Explosion): pool.put(tpart)
[ "def", "test_fault_midstream", "(", ")", ":", "pool", "=", "make_pool", "(", "1", ",", "1", ")", "tpart", "=", "FakeTarPartition", "(", "1", ",", "explosive", "=", "Explosion", "(", "'Boom'", ")", ")", "pool", ".", "put", "(", "tpart", ")", "tpart", ...
test if a previous upload fault is detected in calling .
train
false