id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
21,170
def countZipFileChunks(filename, chunksize): totalchunks = 0 zf = ChunkingZipFile(filename) for info in zf.infolist(): totalchunks += countFileChunks(info, chunksize) return totalchunks
[ "def", "countZipFileChunks", "(", "filename", ",", "chunksize", ")", ":", "totalchunks", "=", "0", "zf", "=", "ChunkingZipFile", "(", "filename", ")", "for", "info", "in", "zf", ".", "infolist", "(", ")", ":", "totalchunks", "+=", "countFileChunks", "(", "info", ",", "chunksize", ")", "return", "totalchunks" ]
predict the number of chunks that will be extracted from the entire zipfile .
train
false
21,171
@contextmanager def mock_get_score(earned=0, possible=1): with patch('lms.djangoapps.grades.new.subsection_grade.get_score') as mock_score: mock_score.return_value = ProblemScore(raw_earned=earned, raw_possible=possible, weighted_earned=earned, weighted_possible=possible, weight=1, graded=True, attempted=True) (yield mock_score)
[ "@", "contextmanager", "def", "mock_get_score", "(", "earned", "=", "0", ",", "possible", "=", "1", ")", ":", "with", "patch", "(", "'lms.djangoapps.grades.new.subsection_grade.get_score'", ")", "as", "mock_score", ":", "mock_score", ".", "return_value", "=", "ProblemScore", "(", "raw_earned", "=", "earned", ",", "raw_possible", "=", "possible", ",", "weighted_earned", "=", "earned", ",", "weighted_possible", "=", "possible", ",", "weight", "=", "1", ",", "graded", "=", "True", ",", "attempted", "=", "True", ")", "(", "yield", "mock_score", ")" ]
mocks the get_score function to return a valid grade .
train
false
21,172
def exhaust_stream(f): def wrapper(self, stream, *args, **kwargs): try: return f(self, stream, *args, **kwargs) finally: stream.exhaust() return update_wrapper(wrapper, f)
[ "def", "exhaust_stream", "(", "f", ")", ":", "def", "wrapper", "(", "self", ",", "stream", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "f", "(", "self", ",", "stream", ",", "*", "args", ",", "**", "kwargs", ")", "finally", ":", "stream", ".", "exhaust", "(", ")", "return", "update_wrapper", "(", "wrapper", ",", "f", ")" ]
helper decorator for methods that exhausts the stream on return .
train
false
21,173
def annotate_axes(index, labels, points, offset_points, size, ax, **kwargs): for i in index: label = labels[i] point = points[i] offset = offset_points[i] ax.annotate(label, point, xytext=offset, textcoords='offset points', size=size, **kwargs) return ax
[ "def", "annotate_axes", "(", "index", ",", "labels", ",", "points", ",", "offset_points", ",", "size", ",", "ax", ",", "**", "kwargs", ")", ":", "for", "i", "in", "index", ":", "label", "=", "labels", "[", "i", "]", "point", "=", "points", "[", "i", "]", "offset", "=", "offset_points", "[", "i", "]", "ax", ".", "annotate", "(", "label", ",", "point", ",", "xytext", "=", "offset", ",", "textcoords", "=", "'offset points'", ",", "size", "=", "size", ",", "**", "kwargs", ")", "return", "ax" ]
annotate axes with labels .
train
false
21,174
def allocate_controller_key_and_unit_number(client_factory, devices, adapter_type): if (devices.__class__.__name__ == 'ArrayOfVirtualDevice'): devices = devices.VirtualDevice taken = _find_allocated_slots(devices) ret = None if (adapter_type == constants.ADAPTER_TYPE_IDE): ide_keys = [dev.key for dev in devices if _is_ide_controller(dev)] ret = _find_controller_slot(ide_keys, taken, 2) elif (adapter_type in constants.SCSI_ADAPTER_TYPES): scsi_keys = [dev.key for dev in devices if _is_scsi_controller(dev)] ret = _find_controller_slot(scsi_keys, taken, 16) if ret: return (ret[0], ret[1], None) controller_key = (-101) bus_number = 0 if (adapter_type in constants.SCSI_ADAPTER_TYPES): bus_number = _get_bus_number_for_scsi_controller(devices) controller_spec = create_controller_spec(client_factory, controller_key, adapter_type, bus_number) return (controller_key, 0, controller_spec)
[ "def", "allocate_controller_key_and_unit_number", "(", "client_factory", ",", "devices", ",", "adapter_type", ")", ":", "if", "(", "devices", ".", "__class__", ".", "__name__", "==", "'ArrayOfVirtualDevice'", ")", ":", "devices", "=", "devices", ".", "VirtualDevice", "taken", "=", "_find_allocated_slots", "(", "devices", ")", "ret", "=", "None", "if", "(", "adapter_type", "==", "constants", ".", "ADAPTER_TYPE_IDE", ")", ":", "ide_keys", "=", "[", "dev", ".", "key", "for", "dev", "in", "devices", "if", "_is_ide_controller", "(", "dev", ")", "]", "ret", "=", "_find_controller_slot", "(", "ide_keys", ",", "taken", ",", "2", ")", "elif", "(", "adapter_type", "in", "constants", ".", "SCSI_ADAPTER_TYPES", ")", ":", "scsi_keys", "=", "[", "dev", ".", "key", "for", "dev", "in", "devices", "if", "_is_scsi_controller", "(", "dev", ")", "]", "ret", "=", "_find_controller_slot", "(", "scsi_keys", ",", "taken", ",", "16", ")", "if", "ret", ":", "return", "(", "ret", "[", "0", "]", ",", "ret", "[", "1", "]", ",", "None", ")", "controller_key", "=", "(", "-", "101", ")", "bus_number", "=", "0", "if", "(", "adapter_type", "in", "constants", ".", "SCSI_ADAPTER_TYPES", ")", ":", "bus_number", "=", "_get_bus_number_for_scsi_controller", "(", "devices", ")", "controller_spec", "=", "create_controller_spec", "(", "client_factory", ",", "controller_key", ",", "adapter_type", ",", "bus_number", ")", "return", "(", "controller_key", ",", "0", ",", "controller_spec", ")" ]
this function inspects the current set of hardware devices and returns controller_key and unit_number that can be used for attaching a new virtual disk to adapter with the given adapter_type .
train
false
21,175
def notify_new_translation(unit, oldunit, user): mails = [] subscriptions = Profile.objects.subscribed_any_translation(unit.translation.subproject.project, unit.translation.language, user) for subscription in subscriptions: mails.append(subscription.notify_any_translation(unit, oldunit)) send_mails(mails)
[ "def", "notify_new_translation", "(", "unit", ",", "oldunit", ",", "user", ")", ":", "mails", "=", "[", "]", "subscriptions", "=", "Profile", ".", "objects", ".", "subscribed_any_translation", "(", "unit", ".", "translation", ".", "subproject", ".", "project", ",", "unit", ".", "translation", ".", "language", ",", "user", ")", "for", "subscription", "in", "subscriptions", ":", "mails", ".", "append", "(", "subscription", ".", "notify_any_translation", "(", "unit", ",", "oldunit", ")", ")", "send_mails", "(", "mails", ")" ]
notify subscribed users about new translation .
train
false
21,176
def dictfetchone(cursor): row = cursor.fetchone() if (not row): return None return _dict_helper(cursor.description, row)
[ "def", "dictfetchone", "(", "cursor", ")", ":", "row", "=", "cursor", ".", "fetchone", "(", ")", "if", "(", "not", "row", ")", ":", "return", "None", "return", "_dict_helper", "(", "cursor", ".", "description", ",", "row", ")" ]
returns a row from the cursor as a dict .
train
false
21,177
def _fallback_get_mx_domains(domain): try: query = dns.message.make_query(domain, dns.rdatatype.MX) answers = dns.query.udp(query, GOOGLE_DNS_IP).answer[0] return [a for a in answers if (a.rdtype == dns.rdatatype.MX)] except: return []
[ "def", "_fallback_get_mx_domains", "(", "domain", ")", ":", "try", ":", "query", "=", "dns", ".", "message", ".", "make_query", "(", "domain", ",", "dns", ".", "rdatatype", ".", "MX", ")", "answers", "=", "dns", ".", "query", ".", "udp", "(", "query", ",", "GOOGLE_DNS_IP", ")", ".", "answer", "[", "0", "]", "return", "[", "a", "for", "a", "in", "answers", "if", "(", "a", ".", "rdtype", "==", "dns", ".", "rdatatype", ".", "MX", ")", "]", "except", ":", "return", "[", "]" ]
sometimes dns .
train
false
21,179
def create_generic_db(jobs_dir, dataset_id, stage): if (jobs_dir == 'none'): jobs_dir = digits.config.config_value('jobs_dir') dataset_dir = os.path.join(jobs_dir, dataset_id) if (not os.path.isdir(dataset_dir)): raise IOError(('Dataset dir %s does not exist' % dataset_dir)) dataset = Job.load(dataset_dir) extension_id = dataset.extension_id extension_class = extensions.data.get_extension(extension_id) extension = extension_class(**dataset.extension_userdata) feature_encoding = dataset.feature_encoding label_encoding = dataset.label_encoding batch_size = dataset.batch_size num_threads = dataset.num_threads force_same_shape = dataset.force_same_shape db_creator = DbCreator() db_creator.create_db(extension, stage, dataset_dir, batch_size, num_threads, feature_encoding, label_encoding, force_same_shape) logger.info('Generic DB creation Done')
[ "def", "create_generic_db", "(", "jobs_dir", ",", "dataset_id", ",", "stage", ")", ":", "if", "(", "jobs_dir", "==", "'none'", ")", ":", "jobs_dir", "=", "digits", ".", "config", ".", "config_value", "(", "'jobs_dir'", ")", "dataset_dir", "=", "os", ".", "path", ".", "join", "(", "jobs_dir", ",", "dataset_id", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "dataset_dir", ")", ")", ":", "raise", "IOError", "(", "(", "'Dataset dir %s does not exist'", "%", "dataset_dir", ")", ")", "dataset", "=", "Job", ".", "load", "(", "dataset_dir", ")", "extension_id", "=", "dataset", ".", "extension_id", "extension_class", "=", "extensions", ".", "data", ".", "get_extension", "(", "extension_id", ")", "extension", "=", "extension_class", "(", "**", "dataset", ".", "extension_userdata", ")", "feature_encoding", "=", "dataset", ".", "feature_encoding", "label_encoding", "=", "dataset", ".", "label_encoding", "batch_size", "=", "dataset", ".", "batch_size", "num_threads", "=", "dataset", ".", "num_threads", "force_same_shape", "=", "dataset", ".", "force_same_shape", "db_creator", "=", "DbCreator", "(", ")", "db_creator", ".", "create_db", "(", "extension", ",", "stage", ",", "dataset_dir", ",", "batch_size", ",", "num_threads", ",", "feature_encoding", ",", "label_encoding", ",", "force_same_shape", ")", "logger", ".", "info", "(", "'Generic DB creation Done'", ")" ]
create a generic db .
train
false
21,180
def test_nested(a, b, c): def one(): return a def two(): return b def three(): return c def new_closure(a, b): def sum(): return (a + b) return sum (yield one) (yield two) (yield three) (yield new_closure(a, c))
[ "def", "test_nested", "(", "a", ",", "b", ",", "c", ")", ":", "def", "one", "(", ")", ":", "return", "a", "def", "two", "(", ")", ":", "return", "b", "def", "three", "(", ")", ":", "return", "c", "def", "new_closure", "(", "a", ",", "b", ")", ":", "def", "sum", "(", ")", ":", "return", "(", "a", "+", "b", ")", "return", "sum", "(", "yield", "one", ")", "(", "yield", "two", ")", "(", "yield", "three", ")", "(", "yield", "new_closure", "(", "a", ",", "c", ")", ")" ]
if the bundle structure is nested .
train
false
21,181
def _get_system_volume(vm_): ssh_keys = get_public_keys(vm_) disk_size = get_size(vm_)['disk'] if ('disk_size' in vm_): disk_size = vm_['disk_size'] volume = Volume(name='{0} Storage'.format(vm_['name']), size=disk_size, image=get_image(vm_)['id'], disk_type=get_disk_type(vm_), ssh_keys=ssh_keys) if ('disk_availability_zone' in vm_): volume.availability_zone = vm_['disk_availability_zone'] return volume
[ "def", "_get_system_volume", "(", "vm_", ")", ":", "ssh_keys", "=", "get_public_keys", "(", "vm_", ")", "disk_size", "=", "get_size", "(", "vm_", ")", "[", "'disk'", "]", "if", "(", "'disk_size'", "in", "vm_", ")", ":", "disk_size", "=", "vm_", "[", "'disk_size'", "]", "volume", "=", "Volume", "(", "name", "=", "'{0} Storage'", ".", "format", "(", "vm_", "[", "'name'", "]", ")", ",", "size", "=", "disk_size", ",", "image", "=", "get_image", "(", "vm_", ")", "[", "'id'", "]", ",", "disk_type", "=", "get_disk_type", "(", "vm_", ")", ",", "ssh_keys", "=", "ssh_keys", ")", "if", "(", "'disk_availability_zone'", "in", "vm_", ")", ":", "volume", ".", "availability_zone", "=", "vm_", "[", "'disk_availability_zone'", "]", "return", "volume" ]
construct vm system volume list from cloud profile config .
train
true
21,182
def chain(*tasks): for (up_task, down_task) in zip(tasks[:(-1)], tasks[1:]): up_task.set_downstream(down_task)
[ "def", "chain", "(", "*", "tasks", ")", ":", "for", "(", "up_task", ",", "down_task", ")", "in", "zip", "(", "tasks", "[", ":", "(", "-", "1", ")", "]", ",", "tasks", "[", "1", ":", "]", ")", ":", "up_task", ".", "set_downstream", "(", "down_task", ")" ]
given a number of tasks .
train
true
21,183
@ensure_csrf_cookie @login_required def export_git(request, course_key_string): course_key = CourseKey.from_string(course_key_string) if (not has_course_author_access(request.user, course_key)): raise PermissionDenied() course_module = modulestore().get_course(course_key) failed = False log.debug('export_git course_module=%s', course_module) msg = '' if (('action' in request.GET) and course_module.giturl): if (request.GET['action'] == 'push'): try: git_export_utils.export_to_git(course_module.id, course_module.giturl, request.user) msg = _('Course successfully exported to git repository') except git_export_utils.GitExportError as ex: failed = True msg = unicode(ex) return render_to_response('export_git.html', {'context_course': course_module, 'msg': msg, 'failed': failed})
[ "@", "ensure_csrf_cookie", "@", "login_required", "def", "export_git", "(", "request", ",", "course_key_string", ")", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "course_key_string", ")", "if", "(", "not", "has_course_author_access", "(", "request", ".", "user", ",", "course_key", ")", ")", ":", "raise", "PermissionDenied", "(", ")", "course_module", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ")", "failed", "=", "False", "log", ".", "debug", "(", "'export_git course_module=%s'", ",", "course_module", ")", "msg", "=", "''", "if", "(", "(", "'action'", "in", "request", ".", "GET", ")", "and", "course_module", ".", "giturl", ")", ":", "if", "(", "request", ".", "GET", "[", "'action'", "]", "==", "'push'", ")", ":", "try", ":", "git_export_utils", ".", "export_to_git", "(", "course_module", ".", "id", ",", "course_module", ".", "giturl", ",", "request", ".", "user", ")", "msg", "=", "_", "(", "'Course successfully exported to git repository'", ")", "except", "git_export_utils", ".", "GitExportError", "as", "ex", ":", "failed", "=", "True", "msg", "=", "unicode", "(", "ex", ")", "return", "render_to_response", "(", "'export_git.html'", ",", "{", "'context_course'", ":", "course_module", ",", "'msg'", ":", "msg", ",", "'failed'", ":", "failed", "}", ")" ]
this method serves up the export to git page .
train
false
21,184
def sqrt_depth(p): if p.is_Atom: return 0 elif (p.is_Add or p.is_Mul): return max([sqrt_depth(x) for x in p.args], key=default_sort_key) elif is_sqrt(p): return (sqrt_depth(p.base) + 1) else: return 0
[ "def", "sqrt_depth", "(", "p", ")", ":", "if", "p", ".", "is_Atom", ":", "return", "0", "elif", "(", "p", ".", "is_Add", "or", "p", ".", "is_Mul", ")", ":", "return", "max", "(", "[", "sqrt_depth", "(", "x", ")", "for", "x", "in", "p", ".", "args", "]", ",", "key", "=", "default_sort_key", ")", "elif", "is_sqrt", "(", "p", ")", ":", "return", "(", "sqrt_depth", "(", "p", ".", "base", ")", "+", "1", ")", "else", ":", "return", "0" ]
return the maximum depth of any square root argument of p .
train
false
21,185
def test_install_from_wheel_with_extras(script, data): result = script.pip('install', 'complex-dist[simple]', '--no-index', ('--find-links=' + data.find_links), expect_error=False) dist_info_folder = (script.site_packages / 'complex_dist-0.1.dist-info') assert (dist_info_folder in result.files_created), (dist_info_folder, result.files_created, result.stdout) dist_info_folder = (script.site_packages / 'simple.dist-0.1.dist-info') assert (dist_info_folder in result.files_created), (dist_info_folder, result.files_created, result.stdout)
[ "def", "test_install_from_wheel_with_extras", "(", "script", ",", "data", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'complex-dist[simple]'", ",", "'--no-index'", ",", "(", "'--find-links='", "+", "data", ".", "find_links", ")", ",", "expect_error", "=", "False", ")", "dist_info_folder", "=", "(", "script", ".", "site_packages", "/", "'complex_dist-0.1.dist-info'", ")", "assert", "(", "dist_info_folder", "in", "result", ".", "files_created", ")", ",", "(", "dist_info_folder", ",", "result", ".", "files_created", ",", "result", ".", "stdout", ")", "dist_info_folder", "=", "(", "script", ".", "site_packages", "/", "'simple.dist-0.1.dist-info'", ")", "assert", "(", "dist_info_folder", "in", "result", ".", "files_created", ")", ",", "(", "dist_info_folder", ",", "result", ".", "files_created", ",", "result", ".", "stdout", ")" ]
test installing from a wheel with extras .
train
false
21,186
def user_add_mail(user, kwargs): user_role = {'SU': u'\u8d85\u7ea7\u7ba1\u7406\u5458', 'GA': u'\u7ec4\u7ba1\u7406\u5458', 'CU': u'\u666e\u901a\u7528\u6237'} mail_title = (u'\u606d\u559c\u4f60\u7684\u8df3\u677f\u673a\u7528\u6237 %s \u6dfb\u52a0\u6210\u529f Jumpserver' % user.name) mail_msg = (u'\n Hi, %s\n \u60a8\u7684\u7528\u6237\u540d\uff1a %s\n \u60a8\u7684\u6743\u9650\uff1a %s\n \u60a8\u7684web\u767b\u5f55\u5bc6\u7801\uff1a %s\n \u60a8\u7684ssh\u5bc6\u94a5\u6587\u4ef6\u5bc6\u7801\uff1a %s\n \u5bc6\u94a5\u4e0b\u8f7d\u5730\u5740\uff1a %s/juser/key/down/?uuid=%s\n \u8bf4\u660e\uff1a \u8bf7\u767b\u9646\u8df3\u677f\u673a\u540e\u53f0\u4e0b\u8f7d\u5bc6\u94a5, \u7136\u540e\u4f7f\u7528\u5bc6\u94a5\u767b\u9646\u8df3\u677f\u673a\uff01\n ' % (user.name, user.username, user_role.get(user.role, u'\u666e\u901a\u7528\u6237'), kwargs.get('password'), kwargs.get('ssh_key_pwd'), URL, user.uuid)) send_mail(mail_title, mail_msg, MAIL_FROM, [user.email], fail_silently=False)
[ "def", "user_add_mail", "(", "user", ",", "kwargs", ")", ":", "user_role", "=", "{", "'SU'", ":", "u'\\u8d85\\u7ea7\\u7ba1\\u7406\\u5458'", ",", "'GA'", ":", "u'\\u7ec4\\u7ba1\\u7406\\u5458'", ",", "'CU'", ":", "u'\\u666e\\u901a\\u7528\\u6237'", "}", "mail_title", "=", "(", "u'\\u606d\\u559c\\u4f60\\u7684\\u8df3\\u677f\\u673a\\u7528\\u6237 %s \\u6dfb\\u52a0\\u6210\\u529f Jumpserver'", "%", "user", ".", "name", ")", "mail_msg", "=", "(", "u'\\n Hi, %s\\n \\u60a8\\u7684\\u7528\\u6237\\u540d\\uff1a %s\\n \\u60a8\\u7684\\u6743\\u9650\\uff1a %s\\n \\u60a8\\u7684web\\u767b\\u5f55\\u5bc6\\u7801\\uff1a %s\\n \\u60a8\\u7684ssh\\u5bc6\\u94a5\\u6587\\u4ef6\\u5bc6\\u7801\\uff1a %s\\n \\u5bc6\\u94a5\\u4e0b\\u8f7d\\u5730\\u5740\\uff1a %s/juser/key/down/?uuid=%s\\n \\u8bf4\\u660e\\uff1a \\u8bf7\\u767b\\u9646\\u8df3\\u677f\\u673a\\u540e\\u53f0\\u4e0b\\u8f7d\\u5bc6\\u94a5, \\u7136\\u540e\\u4f7f\\u7528\\u5bc6\\u94a5\\u767b\\u9646\\u8df3\\u677f\\u673a\\uff01\\n '", "%", "(", "user", ".", "name", ",", "user", ".", "username", ",", "user_role", ".", "get", "(", "user", ".", "role", ",", "u'\\u666e\\u901a\\u7528\\u6237'", ")", ",", "kwargs", ".", "get", "(", "'password'", ")", ",", "kwargs", ".", "get", "(", "'ssh_key_pwd'", ")", ",", "URL", ",", "user", ".", "uuid", ")", ")", "send_mail", "(", "mail_title", ",", "mail_msg", ",", "MAIL_FROM", ",", "[", "user", ".", "email", "]", ",", "fail_silently", "=", "False", ")" ]
add user send mail .
train
false
21,187
def client_range_to_segment_range(client_start, client_end, segment_size): segment_start = ((int((client_start // segment_size)) * segment_size) if (client_start is not None) else None) segment_end = (None if (client_end is None) else ((((int((client_end // segment_size)) + 1) * segment_size) - 1) if (client_start is not None) else (int(math.ceil(((float(client_end) / segment_size) + 1))) * segment_size))) return (segment_start, segment_end)
[ "def", "client_range_to_segment_range", "(", "client_start", ",", "client_end", ",", "segment_size", ")", ":", "segment_start", "=", "(", "(", "int", "(", "(", "client_start", "//", "segment_size", ")", ")", "*", "segment_size", ")", "if", "(", "client_start", "is", "not", "None", ")", "else", "None", ")", "segment_end", "=", "(", "None", "if", "(", "client_end", "is", "None", ")", "else", "(", "(", "(", "(", "int", "(", "(", "client_end", "//", "segment_size", ")", ")", "+", "1", ")", "*", "segment_size", ")", "-", "1", ")", "if", "(", "client_start", "is", "not", "None", ")", "else", "(", "int", "(", "math", ".", "ceil", "(", "(", "(", "float", "(", "client_end", ")", "/", "segment_size", ")", "+", "1", ")", ")", ")", "*", "segment_size", ")", ")", ")", "return", "(", "segment_start", ",", "segment_end", ")" ]
takes a byterange from the client and converts it into a byterange spanning the necessary segments .
train
false
21,188
def circvar(samples, high=(2 * pi), low=0, axis=None): (samples, ang) = _circfuncs_common(samples, high, low) S = sin(ang).mean(axis=axis) C = cos(ang).mean(axis=axis) R = hypot(S, C) return ((((((high - low) / 2.0) / pi) ** 2) * 2) * log((1 / R)))
[ "def", "circvar", "(", "samples", ",", "high", "=", "(", "2", "*", "pi", ")", ",", "low", "=", "0", ",", "axis", "=", "None", ")", ":", "(", "samples", ",", "ang", ")", "=", "_circfuncs_common", "(", "samples", ",", "high", ",", "low", ")", "S", "=", "sin", "(", "ang", ")", ".", "mean", "(", "axis", "=", "axis", ")", "C", "=", "cos", "(", "ang", ")", ".", "mean", "(", "axis", "=", "axis", ")", "R", "=", "hypot", "(", "S", ",", "C", ")", "return", "(", "(", "(", "(", "(", "(", "high", "-", "low", ")", "/", "2.0", ")", "/", "pi", ")", "**", "2", ")", "*", "2", ")", "*", "log", "(", "(", "1", "/", "R", ")", ")", ")" ]
compute the circular variance for samples assumed to be in a range parameters samples : array_like input array .
train
false
21,189
@pytest.mark.parametrize('repo_type, repo_url, repo_name', [('git', 'https://github.com/hello/world.git', 'world'), ('hg', 'https://bitbucket.org/foo/bar', 'bar')]) def test_clone_should_invoke_vcs_command(mocker, clone_dir, repo_type, repo_url, repo_name): mocker.patch('cookiecutter.vcs.is_vcs_installed', autospec=True, return_value=True) mock_subprocess = mocker.patch('cookiecutter.vcs.subprocess.check_output', autospec=True) expected_repo_dir = os.path.normpath(os.path.join(clone_dir, repo_name)) branch = 'foobar' repo_dir = vcs.clone(repo_url, checkout=branch, clone_to_dir=clone_dir, no_input=True) assert (repo_dir == expected_repo_dir) mock_subprocess.assert_any_call([repo_type, 'clone', repo_url], cwd=clone_dir, stderr=subprocess.STDOUT) mock_subprocess.assert_any_call([repo_type, 'checkout', branch], cwd=expected_repo_dir, stderr=subprocess.STDOUT)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'repo_type, repo_url, repo_name'", ",", "[", "(", "'git'", ",", "'https://github.com/hello/world.git'", ",", "'world'", ")", ",", "(", "'hg'", ",", "'https://bitbucket.org/foo/bar'", ",", "'bar'", ")", "]", ")", "def", "test_clone_should_invoke_vcs_command", "(", "mocker", ",", "clone_dir", ",", "repo_type", ",", "repo_url", ",", "repo_name", ")", ":", "mocker", ".", "patch", "(", "'cookiecutter.vcs.is_vcs_installed'", ",", "autospec", "=", "True", ",", "return_value", "=", "True", ")", "mock_subprocess", "=", "mocker", ".", "patch", "(", "'cookiecutter.vcs.subprocess.check_output'", ",", "autospec", "=", "True", ")", "expected_repo_dir", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "clone_dir", ",", "repo_name", ")", ")", "branch", "=", "'foobar'", "repo_dir", "=", "vcs", ".", "clone", "(", "repo_url", ",", "checkout", "=", "branch", ",", "clone_to_dir", "=", "clone_dir", ",", "no_input", "=", "True", ")", "assert", "(", "repo_dir", "==", "expected_repo_dir", ")", "mock_subprocess", ".", "assert_any_call", "(", "[", "repo_type", ",", "'clone'", ",", "repo_url", "]", ",", "cwd", "=", "clone_dir", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "mock_subprocess", ".", "assert_any_call", "(", "[", "repo_type", ",", "'checkout'", ",", "branch", "]", ",", "cwd", "=", "expected_repo_dir", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")" ]
when clone() is called with a git/hg repo .
train
false
21,190
def subXMLRefs(s): return re_entcharrefssub(_replXMLRef, s)
[ "def", "subXMLRefs", "(", "s", ")", ":", "return", "re_entcharrefssub", "(", "_replXMLRef", ",", "s", ")" ]
return the given html string with entity and char references replaced .
train
false
21,192
def is_modern_windows_install(version): version = LooseVersion(str(version)) if (is_win() and (version >= LooseVersion('2.1'))): return True else: return False
[ "def", "is_modern_windows_install", "(", "version", ")", ":", "version", "=", "LooseVersion", "(", "str", "(", "version", ")", ")", "if", "(", "is_win", "(", ")", "and", "(", "version", ">=", "LooseVersion", "(", "'2.1'", ")", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
the 2 .
train
true
21,193
def body_quopri_len(str): count = 0 for c in str: if bqre.match(c): count += 3 else: count += 1 return count
[ "def", "body_quopri_len", "(", "str", ")", ":", "count", "=", "0", "for", "c", "in", "str", ":", "if", "bqre", ".", "match", "(", "c", ")", ":", "count", "+=", "3", "else", ":", "count", "+=", "1", "return", "count" ]
return the length of str when it is encoded with body quopri .
train
false
21,194
def isValidImage(field_data, all_data): from PIL import Image from cStringIO import StringIO try: content = field_data['content'] except TypeError: raise ValidationError, gettext('No file was submitted. Check the encoding type on the form.') try: Image.open(StringIO(content)) except IOError: raise ValidationError, gettext('Upload a valid image. The file you uploaded was either not an image or a corrupted image.')
[ "def", "isValidImage", "(", "field_data", ",", "all_data", ")", ":", "from", "PIL", "import", "Image", "from", "cStringIO", "import", "StringIO", "try", ":", "content", "=", "field_data", "[", "'content'", "]", "except", "TypeError", ":", "raise", "ValidationError", ",", "gettext", "(", "'No file was submitted. Check the encoding type on the form.'", ")", "try", ":", "Image", ".", "open", "(", "StringIO", "(", "content", ")", ")", "except", "IOError", ":", "raise", "ValidationError", ",", "gettext", "(", "'Upload a valid image. The file you uploaded was either not an image or a corrupted image.'", ")" ]
checks that the file-upload field data contains a valid image .
train
false
21,195
def fstime_floor_secs(ns): return (int(ns) / (10 ** 9))
[ "def", "fstime_floor_secs", "(", "ns", ")", ":", "return", "(", "int", "(", "ns", ")", "/", "(", "10", "**", "9", ")", ")" ]
return largest integer not greater than ns / 10e8 .
train
false
21,196
def conflicting_single_link(scheme, link): if link.sink_channel.single: existing = scheme.find_links(sink_node=link.sink_node, sink_channel=link.sink_channel) if existing: assert (len(existing) == 1) return existing[0] return None
[ "def", "conflicting_single_link", "(", "scheme", ",", "link", ")", ":", "if", "link", ".", "sink_channel", ".", "single", ":", "existing", "=", "scheme", ".", "find_links", "(", "sink_node", "=", "link", ".", "sink_node", ",", "sink_channel", "=", "link", ".", "sink_channel", ")", "if", "existing", ":", "assert", "(", "len", "(", "existing", ")", "==", "1", ")", "return", "existing", "[", "0", "]", "return", "None" ]
find and return an existing link in scheme connected to the same input channel as link if the channel has the single flag .
train
false
21,197
def STOU(val): return val
[ "def", "STOU", "(", "val", ")", ":", "return", "val" ]
sys_code -> utf-8 .
train
false
21,198
@pytest.mark.cmd @pytest.mark.django_db def test_export_path(capfd, export_dir, cd_export_dir): call_command('export', '--path=/language0') (out, err) = capfd.readouterr() assert ('language0.zip' in out) assert ('language1.zip' not in out) call_command('export', '--path=/language0/project0') (out, err) = capfd.readouterr() assert ('language0-project0.zip' in out) call_command('export', '--path=/language0/project0/store0.po') (out, err) = capfd.readouterr() assert ('store0.po' in out) call_command('export', '--path=/') (out, err) = capfd.readouterr() assert ('export.zip' in out)
[ "@", "pytest", ".", "mark", ".", "cmd", "@", "pytest", ".", "mark", ".", "django_db", "def", "test_export_path", "(", "capfd", ",", "export_dir", ",", "cd_export_dir", ")", ":", "call_command", "(", "'export'", ",", "'--path=/language0'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "'language0.zip'", "in", "out", ")", "assert", "(", "'language1.zip'", "not", "in", "out", ")", "call_command", "(", "'export'", ",", "'--path=/language0/project0'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "'language0-project0.zip'", "in", "out", ")", "call_command", "(", "'export'", ",", "'--path=/language0/project0/store0.po'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "'store0.po'", "in", "out", ")", "call_command", "(", "'export'", ",", "'--path=/'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "'export.zip'", "in", "out", ")" ]
export a path testing variants of lang .
train
false
21,200
def convert_regex_to_flask_path(url_path): for token in [u'$']: url_path = url_path.replace(token, u'') def caller(reg): (match_name, match_pattern) = reg.groups() return u'<regex("{0}"):{1}>'.format(match_pattern, match_name) url_path = re.sub(u'\\(\\?P<(.*?)>(.*?)\\)', caller, url_path) if url_path.endswith(u'/?'): url_path = url_path.rstrip(u'/?') return url_path
[ "def", "convert_regex_to_flask_path", "(", "url_path", ")", ":", "for", "token", "in", "[", "u'$'", "]", ":", "url_path", "=", "url_path", ".", "replace", "(", "token", ",", "u''", ")", "def", "caller", "(", "reg", ")", ":", "(", "match_name", ",", "match_pattern", ")", "=", "reg", ".", "groups", "(", ")", "return", "u'<regex(\"{0}\"):{1}>'", ".", "format", "(", "match_pattern", ",", "match_name", ")", "url_path", "=", "re", ".", "sub", "(", "u'\\\\(\\\\?P<(.*?)>(.*?)\\\\)'", ",", "caller", ",", "url_path", ")", "if", "url_path", ".", "endswith", "(", "u'/?'", ")", ":", "url_path", "=", "url_path", ".", "rstrip", "(", "u'/?'", ")", "return", "url_path" ]
converts a regex matching url to one that can be used with flask .
train
false
21,201
def _get_widget_content(content, response_format='html'): widget_content = '' regexp = '<!-- widget_content -->(?P<widget_content>.*?)<!-- /widget_content -->' if (response_format == 'ajax'): try: ajax_content = json.loads(content) widget_content = ajax_content['response']['content']['module_content'] except: blocks = re.finditer(regexp, content, re.DOTALL) for block in blocks: widget_content = block.group('widget_content').strip() else: blocks = re.finditer(regexp, content, re.DOTALL) for block in blocks: widget_content = block.group('widget_content').strip() return Markup(widget_content)
[ "def", "_get_widget_content", "(", "content", ",", "response_format", "=", "'html'", ")", ":", "widget_content", "=", "''", "regexp", "=", "'<!-- widget_content -->(?P<widget_content>.*?)<!-- /widget_content -->'", "if", "(", "response_format", "==", "'ajax'", ")", ":", "try", ":", "ajax_content", "=", "json", ".", "loads", "(", "content", ")", "widget_content", "=", "ajax_content", "[", "'response'", "]", "[", "'content'", "]", "[", "'module_content'", "]", "except", ":", "blocks", "=", "re", ".", "finditer", "(", "regexp", ",", "content", ",", "re", ".", "DOTALL", ")", "for", "block", "in", "blocks", ":", "widget_content", "=", "block", ".", "group", "(", "'widget_content'", ")", ".", "strip", "(", ")", "else", ":", "blocks", "=", "re", ".", "finditer", "(", "regexp", ",", "content", ",", "re", ".", "DOTALL", ")", "for", "block", "in", "blocks", ":", "widget_content", "=", "block", ".", "group", "(", "'widget_content'", ")", ".", "strip", "(", ")", "return", "Markup", "(", "widget_content", ")" ]
extracts widget content from rendred html .
train
false
21,202
@task def rebuild_search(ctx): from website.app import init_app import requests from website import settings init_app(routes=False, set_backends=True) if (not settings.ELASTIC_URI.startswith('http')): protocol = ('http://' if settings.DEBUG_MODE else 'https://') else: protocol = '' url = '{protocol}{uri}/{index}'.format(protocol=protocol, uri=settings.ELASTIC_URI.rstrip('/'), index=settings.ELASTIC_INDEX) print 'Deleting index {}'.format(settings.ELASTIC_INDEX) print '----- DELETE {}*'.format(url) requests.delete((url + '*')) print 'Creating index {}'.format(settings.ELASTIC_INDEX) print '----- PUT {}'.format(url) requests.put(url) migrate_search(ctx)
[ "@", "task", "def", "rebuild_search", "(", "ctx", ")", ":", "from", "website", ".", "app", "import", "init_app", "import", "requests", "from", "website", "import", "settings", "init_app", "(", "routes", "=", "False", ",", "set_backends", "=", "True", ")", "if", "(", "not", "settings", ".", "ELASTIC_URI", ".", "startswith", "(", "'http'", ")", ")", ":", "protocol", "=", "(", "'http://'", "if", "settings", ".", "DEBUG_MODE", "else", "'https://'", ")", "else", ":", "protocol", "=", "''", "url", "=", "'{protocol}{uri}/{index}'", ".", "format", "(", "protocol", "=", "protocol", ",", "uri", "=", "settings", ".", "ELASTIC_URI", ".", "rstrip", "(", "'/'", ")", ",", "index", "=", "settings", ".", "ELASTIC_INDEX", ")", "print", "'Deleting index {}'", ".", "format", "(", "settings", ".", "ELASTIC_INDEX", ")", "print", "'----- DELETE {}*'", ".", "format", "(", "url", ")", "requests", ".", "delete", "(", "(", "url", "+", "'*'", ")", ")", "print", "'Creating index {}'", ".", "format", "(", "settings", ".", "ELASTIC_INDEX", ")", "print", "'----- PUT {}'", ".", "format", "(", "url", ")", "requests", ".", "put", "(", "url", ")", "migrate_search", "(", "ctx", ")" ]
delete and recreate the index for elasticsearch .
train
false
21,203
def test_set_cookies_accept(config_stub, qtbot, monkeypatch): config_stub.data = CONFIG_ALL_COOKIES ram_jar = cookies.RAMCookieJar() cookie = QNetworkCookie('foo', 'bar') url = QUrl('http://example.com/') with qtbot.waitSignal(ram_jar.changed): assert ram_jar.setCookiesFromUrl([cookie], url) all_cookies = ram_jar.cookiesForUrl(url) assert (len(all_cookies) == 1) saved_cookie = all_cookies[0] expected = (cookie.name(), cookie.value()) assert saved_cookie.name(), (saved_cookie.value() == expected)
[ "def", "test_set_cookies_accept", "(", "config_stub", ",", "qtbot", ",", "monkeypatch", ")", ":", "config_stub", ".", "data", "=", "CONFIG_ALL_COOKIES", "ram_jar", "=", "cookies", ".", "RAMCookieJar", "(", ")", "cookie", "=", "QNetworkCookie", "(", "'foo'", ",", "'bar'", ")", "url", "=", "QUrl", "(", "'http://example.com/'", ")", "with", "qtbot", ".", "waitSignal", "(", "ram_jar", ".", "changed", ")", ":", "assert", "ram_jar", ".", "setCookiesFromUrl", "(", "[", "cookie", "]", ",", "url", ")", "all_cookies", "=", "ram_jar", ".", "cookiesForUrl", "(", "url", ")", "assert", "(", "len", "(", "all_cookies", ")", "==", "1", ")", "saved_cookie", "=", "all_cookies", "[", "0", "]", "expected", "=", "(", "cookie", ".", "name", "(", ")", ",", "cookie", ".", "value", "(", ")", ")", "assert", "saved_cookie", ".", "name", "(", ")", ",", "(", "saved_cookie", ".", "value", "(", ")", "==", "expected", ")" ]
test setcookiesfromurl with cookies enabled .
train
false
21,204
def rand_int(start=1, end=10): return random.randint(start, end)
[ "def", "rand_int", "(", "start", "=", "1", ",", "end", "=", "10", ")", ":", "return", "random", ".", "randint", "(", "start", ",", "end", ")" ]
returns a random integer number between the start and end number .
train
false
21,207
def test_inheritance_cycle(): class CycleA: pass class CycleB: pass try: CycleA.__bases__ = (CycleA,) AssertUnreachable() except TypeError: pass try: CycleA.__bases__ = (CycleB,) CycleB.__bases__ = (CycleA,) AssertUnreachable() except TypeError: pass
[ "def", "test_inheritance_cycle", "(", ")", ":", "class", "CycleA", ":", "pass", "class", "CycleB", ":", "pass", "try", ":", "CycleA", ".", "__bases__", "=", "(", "CycleA", ",", ")", "AssertUnreachable", "(", ")", "except", "TypeError", ":", "pass", "try", ":", "CycleA", ".", "__bases__", "=", "(", "CycleB", ",", ")", "CycleB", ".", "__bases__", "=", "(", "CycleA", ",", ")", "AssertUnreachable", "(", ")", "except", "TypeError", ":", "pass" ]
test for inheritance cycle .
train
false
21,208
def p_struct(p): val = _fill_in_struct(p[1], p[3]) _add_thrift_meta('structs', val)
[ "def", "p_struct", "(", "p", ")", ":", "val", "=", "_fill_in_struct", "(", "p", "[", "1", "]", ",", "p", "[", "3", "]", ")", "_add_thrift_meta", "(", "'structs'", ",", "val", ")" ]
struct : seen_struct { field_seq } .
train
false
21,209
def getAreaLoop(loop): areaLoopDouble = 0.0 for (pointIndex, point) in enumerate(loop): pointEnd = loop[((pointIndex + 1) % len(loop))] areaLoopDouble += ((point.real * pointEnd.imag) - (pointEnd.real * point.imag)) return (0.5 * areaLoopDouble)
[ "def", "getAreaLoop", "(", "loop", ")", ":", "areaLoopDouble", "=", "0.0", "for", "(", "pointIndex", ",", "point", ")", "in", "enumerate", "(", "loop", ")", ":", "pointEnd", "=", "loop", "[", "(", "(", "pointIndex", "+", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "areaLoopDouble", "+=", "(", "(", "point", ".", "real", "*", "pointEnd", ".", "imag", ")", "-", "(", "pointEnd", ".", "real", "*", "point", ".", "imag", ")", ")", "return", "(", "0.5", "*", "areaLoopDouble", ")" ]
get the area of a complex polygon .
train
false
21,210
def _get_unused_lun_ids(mappings): used_luns = _get_used_lun_ids_for_mappings(mappings) unused_luns = (set(range(utils.MAX_LUNS_PER_HOST)) - set(used_luns)) return unused_luns
[ "def", "_get_unused_lun_ids", "(", "mappings", ")", ":", "used_luns", "=", "_get_used_lun_ids_for_mappings", "(", "mappings", ")", "unused_luns", "=", "(", "set", "(", "range", "(", "utils", ".", "MAX_LUNS_PER_HOST", ")", ")", "-", "set", "(", "used_luns", ")", ")", "return", "unused_luns" ]
returns unused lun ids given mappings .
train
false
21,211
def CDL3WHITESOLDIERS(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDL3WHITESOLDIERS)
[ "def", "CDL3WHITESOLDIERS", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDL3WHITESOLDIERS", ")" ]
three advancing white soldiers .
train
false
21,212
def get_unique_name(check, prefix='', suffix='', length=None, skip=None): if length: _name = '_'.join([_ for _ in (prefix, '%s', suffix) if _]) for _ in xrange(1000): name = (_name % generate_random_string(length)) if check(name): return name else: _name = '_'.join([_ for _ in (prefix, '%s', suffix) if _]) for i in xrange(skip, (skip + 1000)): name = (_name % i) if check(name): return name raise StopIteration(('Fail to get unique name in 1000 iterations. (%s)' % _name))
[ "def", "get_unique_name", "(", "check", ",", "prefix", "=", "''", ",", "suffix", "=", "''", ",", "length", "=", "None", ",", "skip", "=", "None", ")", ":", "if", "length", ":", "_name", "=", "'_'", ".", "join", "(", "[", "_", "for", "_", "in", "(", "prefix", ",", "'%s'", ",", "suffix", ")", "if", "_", "]", ")", "for", "_", "in", "xrange", "(", "1000", ")", ":", "name", "=", "(", "_name", "%", "generate_random_string", "(", "length", ")", ")", "if", "check", "(", "name", ")", ":", "return", "name", "else", ":", "_name", "=", "'_'", ".", "join", "(", "[", "_", "for", "_", "in", "(", "prefix", ",", "'%s'", ",", "suffix", ")", "if", "_", "]", ")", "for", "i", "in", "xrange", "(", "skip", ",", "(", "skip", "+", "1000", ")", ")", ":", "name", "=", "(", "_name", "%", "i", ")", "if", "check", "(", "name", ")", ":", "return", "name", "raise", "StopIteration", "(", "(", "'Fail to get unique name in 1000 iterations. (%s)'", "%", "_name", ")", ")" ]
get unique name according to check function .
train
false
21,213
def _heappushpop_max(heap, item): if (heap and cmp_lt(item, heap[0])): (item, heap[0]) = (heap[0], item) _siftup_max(heap, 0) return item
[ "def", "_heappushpop_max", "(", "heap", ",", "item", ")", ":", "if", "(", "heap", "and", "cmp_lt", "(", "item", ",", "heap", "[", "0", "]", ")", ")", ":", "(", "item", ",", "heap", "[", "0", "]", ")", "=", "(", "heap", "[", "0", "]", ",", "item", ")", "_siftup_max", "(", "heap", ",", "0", ")", "return", "item" ]
maxheap version of a heappush followed by a heappop .
train
false
21,214
def compare_medians_ms(group_1, group_2, axis=None): (med_1, med_2) = (ma.median(group_1, axis=axis), ma.median(group_2, axis=axis)) (std_1, std_2) = (mstats.stde_median(group_1, axis=axis), mstats.stde_median(group_2, axis=axis)) W = (np.abs((med_1 - med_2)) / ma.sqrt(((std_1 ** 2) + (std_2 ** 2)))) return (1 - norm.cdf(W))
[ "def", "compare_medians_ms", "(", "group_1", ",", "group_2", ",", "axis", "=", "None", ")", ":", "(", "med_1", ",", "med_2", ")", "=", "(", "ma", ".", "median", "(", "group_1", ",", "axis", "=", "axis", ")", ",", "ma", ".", "median", "(", "group_2", ",", "axis", "=", "axis", ")", ")", "(", "std_1", ",", "std_2", ")", "=", "(", "mstats", ".", "stde_median", "(", "group_1", ",", "axis", "=", "axis", ")", ",", "mstats", ".", "stde_median", "(", "group_2", ",", "axis", "=", "axis", ")", ")", "W", "=", "(", "np", ".", "abs", "(", "(", "med_1", "-", "med_2", ")", ")", "/", "ma", ".", "sqrt", "(", "(", "(", "std_1", "**", "2", ")", "+", "(", "std_2", "**", "2", ")", ")", ")", ")", "return", "(", "1", "-", "norm", ".", "cdf", "(", "W", ")", ")" ]
compares the medians from two independent groups along the given axis .
train
false
21,217
def master_event(type, master=None): event_map = {'connected': '__master_connected', 'disconnected': '__master_disconnected', 'failback': '__master_failback', 'alive': '__master_alive'} if ((type == 'alive') and (master is not None)): return '{0}_{1}'.format(event_map.get(type), master) return event_map.get(type, None)
[ "def", "master_event", "(", "type", ",", "master", "=", "None", ")", ":", "event_map", "=", "{", "'connected'", ":", "'__master_connected'", ",", "'disconnected'", ":", "'__master_disconnected'", ",", "'failback'", ":", "'__master_failback'", ",", "'alive'", ":", "'__master_alive'", "}", "if", "(", "(", "type", "==", "'alive'", ")", "and", "(", "master", "is", "not", "None", ")", ")", ":", "return", "'{0}_{1}'", ".", "format", "(", "event_map", ".", "get", "(", "type", ")", ",", "master", ")", "return", "event_map", ".", "get", "(", "type", ",", "None", ")" ]
centralized master event function which will return event type based on event_map .
train
true
21,219
def make_next_param(login_url, current_url): l = urlparse(login_url) c = urlparse(current_url) if (((not l.scheme) or (l.scheme == c.scheme)) and ((not l.netloc) or (l.netloc == c.netloc))): return urlunparse(('', '', c.path, c.params, c.query, '')) return current_url
[ "def", "make_next_param", "(", "login_url", ",", "current_url", ")", ":", "l", "=", "urlparse", "(", "login_url", ")", "c", "=", "urlparse", "(", "current_url", ")", "if", "(", "(", "(", "not", "l", ".", "scheme", ")", "or", "(", "l", ".", "scheme", "==", "c", ".", "scheme", ")", ")", "and", "(", "(", "not", "l", ".", "netloc", ")", "or", "(", "l", ".", "netloc", "==", "c", ".", "netloc", ")", ")", ")", ":", "return", "urlunparse", "(", "(", "''", ",", "''", ",", "c", ".", "path", ",", "c", ".", "params", ",", "c", ".", "query", ",", "''", ")", ")", "return", "current_url" ]
reduces the scheme and host from a given url so it can be passed to the given login url more efficiently .
train
true
21,220
def ip_address(address): try: return IPv4Address(address) except (AddressValueError, NetmaskValueError): pass try: return IPv6Address(address) except (AddressValueError, NetmaskValueError): pass raise ValueError(('%r does not appear to be an IPv4 or IPv6 address' % address))
[ "def", "ip_address", "(", "address", ")", ":", "try", ":", "return", "IPv4Address", "(", "address", ")", "except", "(", "AddressValueError", ",", "NetmaskValueError", ")", ":", "pass", "try", ":", "return", "IPv6Address", "(", "address", ")", "except", "(", "AddressValueError", ",", "NetmaskValueError", ")", ":", "pass", "raise", "ValueError", "(", "(", "'%r does not appear to be an IPv4 or IPv6 address'", "%", "address", ")", ")" ]
take an ip string/int and return an object of the correct type .
train
true
21,221
def _getitem_array_generic(context, builder, return_type, aryty, ary, index_types, indices): (dataptr, view_shapes, view_strides) = basic_indexing(context, builder, aryty, ary, index_types, indices) if isinstance(return_type, types.Buffer): retary = make_view(context, builder, aryty, ary, return_type, dataptr, view_shapes, view_strides) return retary._getvalue() else: assert (not view_shapes) return load_item(context, builder, aryty, dataptr)
[ "def", "_getitem_array_generic", "(", "context", ",", "builder", ",", "return_type", ",", "aryty", ",", "ary", ",", "index_types", ",", "indices", ")", ":", "(", "dataptr", ",", "view_shapes", ",", "view_strides", ")", "=", "basic_indexing", "(", "context", ",", "builder", ",", "aryty", ",", "ary", ",", "index_types", ",", "indices", ")", "if", "isinstance", "(", "return_type", ",", "types", ".", "Buffer", ")", ":", "retary", "=", "make_view", "(", "context", ",", "builder", ",", "aryty", ",", "ary", ",", "return_type", ",", "dataptr", ",", "view_shapes", ",", "view_strides", ")", "return", "retary", ".", "_getvalue", "(", ")", "else", ":", "assert", "(", "not", "view_shapes", ")", "return", "load_item", "(", "context", ",", "builder", ",", "aryty", ",", "dataptr", ")" ]
return the result of indexing *ary* with the given *indices* .
train
false
21,222
def get_account_for_tenant(test_auth, tenant_id): return ('%s%s' % (test_auth.reseller_prefixes[0], tenant_id))
[ "def", "get_account_for_tenant", "(", "test_auth", ",", "tenant_id", ")", ":", "return", "(", "'%s%s'", "%", "(", "test_auth", ".", "reseller_prefixes", "[", "0", "]", ",", "tenant_id", ")", ")" ]
convenience function reduces unit test churn .
train
false
21,223
def isInThreadPool(): threadpool = reactor.getThreadPool() return ((threadpool is not None) and (current_thread() in threadpool.threads))
[ "def", "isInThreadPool", "(", ")", ":", "threadpool", "=", "reactor", ".", "getThreadPool", "(", ")", "return", "(", "(", "threadpool", "is", "not", "None", ")", "and", "(", "current_thread", "(", ")", "in", "threadpool", ".", "threads", ")", ")" ]
check if we are currently on one of twisted threadpool threads .
train
false
21,225
def _quantile(data, quantile): index = (quantile * (len(data) - 1)) bottom_index = int(floor(index)) top_index = int(ceil(index)) difference = (index - bottom_index) output = (((1 - difference) * data[bottom_index]) + (difference * data[top_index])) return output
[ "def", "_quantile", "(", "data", ",", "quantile", ")", ":", "index", "=", "(", "quantile", "*", "(", "len", "(", "data", ")", "-", "1", ")", ")", "bottom_index", "=", "int", "(", "floor", "(", "index", ")", ")", "top_index", "=", "int", "(", "ceil", "(", "index", ")", ")", "difference", "=", "(", "index", "-", "bottom_index", ")", "output", "=", "(", "(", "(", "1", "-", "difference", ")", "*", "data", "[", "bottom_index", "]", ")", "+", "(", "difference", "*", "data", "[", "top_index", "]", ")", ")", "return", "output" ]
gets a single quantile value for a dataset using r .
train
false
21,226
def compute_alpha(n): coeffs = mp.taylor(eta, 0, (n - 1)) return lagrange_inversion(coeffs)
[ "def", "compute_alpha", "(", "n", ")", ":", "coeffs", "=", "mp", ".", "taylor", "(", "eta", ",", "0", ",", "(", "n", "-", "1", ")", ")", "return", "lagrange_inversion", "(", "coeffs", ")" ]
alpha_n from dlmf 8 .
train
false
21,227
def _abbreviate(text, threshold): if ((text is not None) and (len(text) > threshold)): text = (text[:threshold] + '...') return text
[ "def", "_abbreviate", "(", "text", ",", "threshold", ")", ":", "if", "(", "(", "text", "is", "not", "None", ")", "and", "(", "len", "(", "text", ")", ">", "threshold", ")", ")", ":", "text", "=", "(", "text", "[", ":", "threshold", "]", "+", "'...'", ")", "return", "text" ]
abbreviate the given text to threshold chars and append an ellipsis if its length exceeds threshold; used for logging; note: the resulting text could be longer than threshold due to the ellipsis .
train
true
21,228
def core(): from fabtools.require.deb import package as require_deb_package from fabtools.require.rpm import package as require_rpm_package family = distrib_family() if (not files.exists('/usr/bin/sudo')): raise Exception(('Please install the sudo package and execute adduser %s sudo' % env.user)) if (not files.exists('/usr/bin/docker')): if (family == 'debian'): require_deb_package('curl') elif (family == 'redhat'): require_rpm_package('curl') else: raise UnsupportedFamily(supported=['debian', 'redhat']) run_as_root('curl -sSL https://get.docker.com/ | sh')
[ "def", "core", "(", ")", ":", "from", "fabtools", ".", "require", ".", "deb", "import", "package", "as", "require_deb_package", "from", "fabtools", ".", "require", ".", "rpm", "import", "package", "as", "require_rpm_package", "family", "=", "distrib_family", "(", ")", "if", "(", "not", "files", ".", "exists", "(", "'/usr/bin/sudo'", ")", ")", ":", "raise", "Exception", "(", "(", "'Please install the sudo package and execute adduser %s sudo'", "%", "env", ".", "user", ")", ")", "if", "(", "not", "files", ".", "exists", "(", "'/usr/bin/docker'", ")", ")", ":", "if", "(", "family", "==", "'debian'", ")", ":", "require_deb_package", "(", "'curl'", ")", "elif", "(", "family", "==", "'redhat'", ")", ":", "require_rpm_package", "(", "'curl'", ")", "else", ":", "raise", "UnsupportedFamily", "(", "supported", "=", "[", "'debian'", ",", "'redhat'", "]", ")", "run_as_root", "(", "'curl -sSL https://get.docker.com/ | sh'", ")" ]
require the docker core installation .
train
false
21,229
def display_bitmask(kind, bitmap, value): col1_width = max(map(len, (list(bitmap.keys()) + [kind]))) col2_width = 7 FMT = '{name:>{col1_width}} {value:>{col2_width}} {description}' print(FMT.format(name=kind, value='Value', description='Description', col1_width=col1_width, col2_width=col2_width)) print('{0} {1} {2}'.format(('-' * col1_width), ('-' * col2_width), ('-' * max(map(len, bitmap.values()))))) for (flag_name, description) in bitmap.items(): try: bitmask = getattr(termios, flag_name) bit_val = ('on' if bool((value & bitmask)) else 'off') except AttributeError: bit_val = 'undef' print(FMT.format(name=flag_name, value=bit_val, description=description, col1_width=col1_width, col2_width=col2_width)) print()
[ "def", "display_bitmask", "(", "kind", ",", "bitmap", ",", "value", ")", ":", "col1_width", "=", "max", "(", "map", "(", "len", ",", "(", "list", "(", "bitmap", ".", "keys", "(", ")", ")", "+", "[", "kind", "]", ")", ")", ")", "col2_width", "=", "7", "FMT", "=", "'{name:>{col1_width}} {value:>{col2_width}} {description}'", "print", "(", "FMT", ".", "format", "(", "name", "=", "kind", ",", "value", "=", "'Value'", ",", "description", "=", "'Description'", ",", "col1_width", "=", "col1_width", ",", "col2_width", "=", "col2_width", ")", ")", "print", "(", "'{0} {1} {2}'", ".", "format", "(", "(", "'-'", "*", "col1_width", ")", ",", "(", "'-'", "*", "col2_width", ")", ",", "(", "'-'", "*", "max", "(", "map", "(", "len", ",", "bitmap", ".", "values", "(", ")", ")", ")", ")", ")", ")", "for", "(", "flag_name", ",", "description", ")", "in", "bitmap", ".", "items", "(", ")", ":", "try", ":", "bitmask", "=", "getattr", "(", "termios", ",", "flag_name", ")", "bit_val", "=", "(", "'on'", "if", "bool", "(", "(", "value", "&", "bitmask", ")", ")", "else", "'off'", ")", "except", "AttributeError", ":", "bit_val", "=", "'undef'", "print", "(", "FMT", ".", "format", "(", "name", "=", "flag_name", ",", "value", "=", "bit_val", ",", "description", "=", "description", ",", "col1_width", "=", "col1_width", ",", "col2_width", "=", "col2_width", ")", ")", "print", "(", ")" ]
display all matching bitmask values for value given bitmap .
train
false
21,231
@task_failure.connect def process_failure_signal(exception, traceback, sender, task_id, signal, args, kwargs, einfo, **kw): exc_info = (type(exception), exception, traceback) log.error(u'Celery TASK exception: {0.__name__}: {1}'.format(*exc_info), exc_info=exc_info, extra={'data': {'task_id': task_id, 'sender': sender, 'args': args, 'kwargs': kwargs}})
[ "@", "task_failure", ".", "connect", "def", "process_failure_signal", "(", "exception", ",", "traceback", ",", "sender", ",", "task_id", ",", "signal", ",", "args", ",", "kwargs", ",", "einfo", ",", "**", "kw", ")", ":", "exc_info", "=", "(", "type", "(", "exception", ")", ",", "exception", ",", "traceback", ")", "log", ".", "error", "(", "u'Celery TASK exception: {0.__name__}: {1}'", ".", "format", "(", "*", "exc_info", ")", ",", "exc_info", "=", "exc_info", ",", "extra", "=", "{", "'data'", ":", "{", "'task_id'", ":", "task_id", ",", "'sender'", ":", "sender", ",", "'args'", ":", "args", ",", "'kwargs'", ":", "kwargs", "}", "}", ")" ]
catch any task failure signals from within our worker processes and log them as exceptions .
train
false
21,233
def get_credit_requirement_status(course_key, username, namespace=None, name=None): requirements = CreditRequirement.get_course_requirements(course_key, namespace=namespace, name=name) requirement_statuses = CreditRequirementStatus.get_statuses(requirements, username) requirement_statuses = dict(((o.requirement, o) for o in requirement_statuses)) statuses = [] for requirement in requirements: requirement_status = requirement_statuses.get(requirement) statuses.append({'namespace': requirement.namespace, 'name': requirement.name, 'display_name': requirement.display_name, 'criteria': requirement.criteria, 'reason': (requirement_status.reason if requirement_status else None), 'status': (requirement_status.status if requirement_status else None), 'status_date': (requirement_status.modified if requirement_status else None), 'order': requirement.order}) return statuses
[ "def", "get_credit_requirement_status", "(", "course_key", ",", "username", ",", "namespace", "=", "None", ",", "name", "=", "None", ")", ":", "requirements", "=", "CreditRequirement", ".", "get_course_requirements", "(", "course_key", ",", "namespace", "=", "namespace", ",", "name", "=", "name", ")", "requirement_statuses", "=", "CreditRequirementStatus", ".", "get_statuses", "(", "requirements", ",", "username", ")", "requirement_statuses", "=", "dict", "(", "(", "(", "o", ".", "requirement", ",", "o", ")", "for", "o", "in", "requirement_statuses", ")", ")", "statuses", "=", "[", "]", "for", "requirement", "in", "requirements", ":", "requirement_status", "=", "requirement_statuses", ".", "get", "(", "requirement", ")", "statuses", ".", "append", "(", "{", "'namespace'", ":", "requirement", ".", "namespace", ",", "'name'", ":", "requirement", ".", "name", ",", "'display_name'", ":", "requirement", ".", "display_name", ",", "'criteria'", ":", "requirement", ".", "criteria", ",", "'reason'", ":", "(", "requirement_status", ".", "reason", "if", "requirement_status", "else", "None", ")", ",", "'status'", ":", "(", "requirement_status", ".", "status", "if", "requirement_status", "else", "None", ")", ",", "'status_date'", ":", "(", "requirement_status", ".", "modified", "if", "requirement_status", "else", "None", ")", ",", "'order'", ":", "requirement", ".", "order", "}", ")", "return", "statuses" ]
retrieve the users status for each credit requirement in the course .
train
false
21,234
def removeTrueFromDictionary(dictionary, key): if (key in dictionary): if getBooleanFromValue(dictionary[key]): del dictionary[key]
[ "def", "removeTrueFromDictionary", "(", "dictionary", ",", "key", ")", ":", "if", "(", "key", "in", "dictionary", ")", ":", "if", "getBooleanFromValue", "(", "dictionary", "[", "key", "]", ")", ":", "del", "dictionary", "[", "key", "]" ]
remove key from the dictionary in the value is true .
train
false
21,236
def addToProfileMenu(profileSelection, profileType, repository): pluginFileNames = skeinforge_profile.getPluginFileNames() craftTypeName = skeinforge_profile.getCraftTypeName() pluginModule = skeinforge_profile.getCraftTypePluginModule() profilePluginSettings = settings.getReadRepository(pluginModule.getNewRepository()) for pluginFileName in pluginFileNames: skeinforge_profile.ProfileTypeMenuRadio().getFromMenuButtonDisplay(profileType, pluginFileName, repository, (craftTypeName == pluginFileName)) for profileName in profilePluginSettings.profileList.value: skeinforge_profile.ProfileSelectionMenuRadio().getFromMenuButtonDisplay(profileSelection, profileName, repository, (profileName == profilePluginSettings.profileListbox.value))
[ "def", "addToProfileMenu", "(", "profileSelection", ",", "profileType", ",", "repository", ")", ":", "pluginFileNames", "=", "skeinforge_profile", ".", "getPluginFileNames", "(", ")", "craftTypeName", "=", "skeinforge_profile", ".", "getCraftTypeName", "(", ")", "pluginModule", "=", "skeinforge_profile", ".", "getCraftTypePluginModule", "(", ")", "profilePluginSettings", "=", "settings", ".", "getReadRepository", "(", "pluginModule", ".", "getNewRepository", "(", ")", ")", "for", "pluginFileName", "in", "pluginFileNames", ":", "skeinforge_profile", ".", "ProfileTypeMenuRadio", "(", ")", ".", "getFromMenuButtonDisplay", "(", "profileType", ",", "pluginFileName", ",", "repository", ",", "(", "craftTypeName", "==", "pluginFileName", ")", ")", "for", "profileName", "in", "profilePluginSettings", ".", "profileList", ".", "value", ":", "skeinforge_profile", ".", "ProfileSelectionMenuRadio", "(", ")", ".", "getFromMenuButtonDisplay", "(", "profileSelection", ",", "profileName", ",", "repository", ",", "(", "profileName", "==", "profilePluginSettings", ".", "profileListbox", ".", "value", ")", ")" ]
add a profile menu .
train
false
21,237
def list_languages(): translate_client = translate.Client() results = translate_client.get_languages() for language in results: print u'{name} ({language})'.format(**language)
[ "def", "list_languages", "(", ")", ":", "translate_client", "=", "translate", ".", "Client", "(", ")", "results", "=", "translate_client", ".", "get_languages", "(", ")", "for", "language", "in", "results", ":", "print", "u'{name} ({language})'", ".", "format", "(", "**", "language", ")" ]
lists all available languages .
train
false
21,239
def lowerstrip(s, all=False): return strip_punc(s.lower().strip(), all=all)
[ "def", "lowerstrip", "(", "s", ",", "all", "=", "False", ")", ":", "return", "strip_punc", "(", "s", ".", "lower", "(", ")", ".", "strip", "(", ")", ",", "all", "=", "all", ")" ]
makes text all lowercase and strips punctuation and whitespace .
train
false
21,240
def _has_access_course_key(user, action, course_key): checkers = {'staff': (lambda : _has_staff_access_to_location(user, None, course_key)), 'instructor': (lambda : _has_instructor_access_to_location(user, None, course_key))} return _dispatch(checkers, action, user, course_key)
[ "def", "_has_access_course_key", "(", "user", ",", "action", ",", "course_key", ")", ":", "checkers", "=", "{", "'staff'", ":", "(", "lambda", ":", "_has_staff_access_to_location", "(", "user", ",", "None", ",", "course_key", ")", ")", ",", "'instructor'", ":", "(", "lambda", ":", "_has_instructor_access_to_location", "(", "user", ",", "None", ",", "course_key", ")", ")", "}", "return", "_dispatch", "(", "checkers", ",", "action", ",", "user", ",", "course_key", ")" ]
check if user has access to the course with this course_key valid actions: staff : true if the user has staff access to this location instructor : true if the user has staff access to this location .
train
false
21,241
def get_export_url(component): return get_site_url(reverse('git-export', kwargs={'project': component.project.slug, 'subproject': component.slug, 'path': ''}))
[ "def", "get_export_url", "(", "component", ")", ":", "return", "get_site_url", "(", "reverse", "(", "'git-export'", ",", "kwargs", "=", "{", "'project'", ":", "component", ".", "project", ".", "slug", ",", "'subproject'", ":", "component", ".", "slug", ",", "'path'", ":", "''", "}", ")", ")" ]
returns git export url for component .
train
false
21,242
def apply_and_enforce(func, args, kwargs, meta): df = func(*args, **kwargs) if isinstance(df, (pd.DataFrame, pd.Series, pd.Index)): if (len(df) == 0): return meta c = (meta.columns if isinstance(df, pd.DataFrame) else meta.name) return _rename(c, df) return df
[ "def", "apply_and_enforce", "(", "func", ",", "args", ",", "kwargs", ",", "meta", ")", ":", "df", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "df", ",", "(", "pd", ".", "DataFrame", ",", "pd", ".", "Series", ",", "pd", ".", "Index", ")", ")", ":", "if", "(", "len", "(", "df", ")", "==", "0", ")", ":", "return", "meta", "c", "=", "(", "meta", ".", "columns", "if", "isinstance", "(", "df", ",", "pd", ".", "DataFrame", ")", "else", "meta", ".", "name", ")", "return", "_rename", "(", "c", ",", "df", ")", "return", "df" ]
apply a function .
train
false
21,243
def formatEventAsClassicLogText(event, formatTime=formatTime): eventText = formatEvent(event) if ('log_failure' in event): try: traceback = event['log_failure'].getTraceback() except: traceback = u'(UNABLE TO OBTAIN TRACEBACK FROM EVENT)\n' eventText = u'\n'.join((eventText, traceback)) if (not eventText): return None eventText = eventText.replace(u'\n', u'\n DCTB ') timeStamp = formatTime(event.get('log_time', None)) system = event.get('log_system', None) if (system is None): level = event.get('log_level', None) if (level is None): levelName = u'-' else: levelName = level.name system = u'{namespace}#{level}'.format(namespace=event.get('log_namespace', u'-'), level=levelName) else: try: system = unicode(system) except Exception: system = u'UNFORMATTABLE' return u'{timeStamp} [{system}] {event}\n'.format(timeStamp=timeStamp, system=system, event=eventText)
[ "def", "formatEventAsClassicLogText", "(", "event", ",", "formatTime", "=", "formatTime", ")", ":", "eventText", "=", "formatEvent", "(", "event", ")", "if", "(", "'log_failure'", "in", "event", ")", ":", "try", ":", "traceback", "=", "event", "[", "'log_failure'", "]", ".", "getTraceback", "(", ")", "except", ":", "traceback", "=", "u'(UNABLE TO OBTAIN TRACEBACK FROM EVENT)\\n'", "eventText", "=", "u'\\n'", ".", "join", "(", "(", "eventText", ",", "traceback", ")", ")", "if", "(", "not", "eventText", ")", ":", "return", "None", "eventText", "=", "eventText", ".", "replace", "(", "u'\\n'", ",", "u'\\n DCTB '", ")", "timeStamp", "=", "formatTime", "(", "event", ".", "get", "(", "'log_time'", ",", "None", ")", ")", "system", "=", "event", ".", "get", "(", "'log_system'", ",", "None", ")", "if", "(", "system", "is", "None", ")", ":", "level", "=", "event", ".", "get", "(", "'log_level'", ",", "None", ")", "if", "(", "level", "is", "None", ")", ":", "levelName", "=", "u'-'", "else", ":", "levelName", "=", "level", ".", "name", "system", "=", "u'{namespace}#{level}'", ".", "format", "(", "namespace", "=", "event", ".", "get", "(", "'log_namespace'", ",", "u'-'", ")", ",", "level", "=", "levelName", ")", "else", ":", "try", ":", "system", "=", "unicode", "(", "system", ")", "except", "Exception", ":", "system", "=", "u'UNFORMATTABLE'", "return", "u'{timeStamp} [{system}] {event}\\n'", ".", "format", "(", "timeStamp", "=", "timeStamp", ",", "system", "=", "system", ",", "event", "=", "eventText", ")" ]
format an event as a line of human-readable text for .
train
false
21,244
def _format_callbacks(cb): size = len(cb) if (not size): cb = '' def format_cb(callback): return events._format_callback_source(callback, ()) if (size == 1): cb = format_cb(cb[0]) elif (size == 2): cb = '{}, {}'.format(format_cb(cb[0]), format_cb(cb[1])) elif (size > 2): cb = '{}, <{} more>, {}'.format(format_cb(cb[0]), (size - 2), format_cb(cb[(-1)])) return ('cb=[%s]' % cb)
[ "def", "_format_callbacks", "(", "cb", ")", ":", "size", "=", "len", "(", "cb", ")", "if", "(", "not", "size", ")", ":", "cb", "=", "''", "def", "format_cb", "(", "callback", ")", ":", "return", "events", ".", "_format_callback_source", "(", "callback", ",", "(", ")", ")", "if", "(", "size", "==", "1", ")", ":", "cb", "=", "format_cb", "(", "cb", "[", "0", "]", ")", "elif", "(", "size", "==", "2", ")", ":", "cb", "=", "'{}, {}'", ".", "format", "(", "format_cb", "(", "cb", "[", "0", "]", ")", ",", "format_cb", "(", "cb", "[", "1", "]", ")", ")", "elif", "(", "size", ">", "2", ")", ":", "cb", "=", "'{}, <{} more>, {}'", ".", "format", "(", "format_cb", "(", "cb", "[", "0", "]", ")", ",", "(", "size", "-", "2", ")", ",", "format_cb", "(", "cb", "[", "(", "-", "1", ")", "]", ")", ")", "return", "(", "'cb=[%s]'", "%", "cb", ")" ]
helper function for future .
train
false
21,246
def applied_items(): return _inventory.applied_items
[ "def", "applied_items", "(", ")", ":", "return", "_inventory", ".", "applied_items" ]
access to the cached applied item inventory .
train
false
21,247
def _cx_oracle_req(): return 'Need "cx_Oracle" and Oracle Client installed for this function exist'
[ "def", "_cx_oracle_req", "(", ")", ":", "return", "'Need \"cx_Oracle\" and Oracle Client installed for this function exist'" ]
fallback function stub .
train
false
21,248
def _PropertyKeyToString(key, default_property): key_path = key.to_path() if ((len(key_path) == 2) and (key_path[0] == '__kind__') and isinstance(key_path[1], basestring)): return (key_path[1], default_property) if ((len(key_path) == 4) and (key_path[0] == '__kind__') and isinstance(key_path[1], basestring) and (key_path[2] == '__property__') and isinstance(key_path[3], basestring)): return (key_path[1], key_path[3]) Check(False, 'invalid Key for __property__ table')
[ "def", "_PropertyKeyToString", "(", "key", ",", "default_property", ")", ":", "key_path", "=", "key", ".", "to_path", "(", ")", "if", "(", "(", "len", "(", "key_path", ")", "==", "2", ")", "and", "(", "key_path", "[", "0", "]", "==", "'__kind__'", ")", "and", "isinstance", "(", "key_path", "[", "1", "]", ",", "basestring", ")", ")", ":", "return", "(", "key_path", "[", "1", "]", ",", "default_property", ")", "if", "(", "(", "len", "(", "key_path", ")", "==", "4", ")", "and", "(", "key_path", "[", "0", "]", "==", "'__kind__'", ")", "and", "isinstance", "(", "key_path", "[", "1", "]", ",", "basestring", ")", "and", "(", "key_path", "[", "2", "]", "==", "'__property__'", ")", "and", "isinstance", "(", "key_path", "[", "3", "]", ",", "basestring", ")", ")", ":", "return", "(", "key_path", "[", "1", "]", ",", "key_path", "[", "3", "]", ")", "Check", "(", "False", ",", "'invalid Key for __property__ table'", ")" ]
extract property name from __property__ key .
train
false
21,249
def body_line_iterator(msg, decode=False): for subpart in msg.walk(): payload = subpart.get_payload(decode=decode) if isinstance(payload, basestring): for line in StringIO(payload): (yield line)
[ "def", "body_line_iterator", "(", "msg", ",", "decode", "=", "False", ")", ":", "for", "subpart", "in", "msg", ".", "walk", "(", ")", ":", "payload", "=", "subpart", ".", "get_payload", "(", "decode", "=", "decode", ")", "if", "isinstance", "(", "payload", ",", "basestring", ")", ":", "for", "line", "in", "StringIO", "(", "payload", ")", ":", "(", "yield", "line", ")" ]
iterate over the parts .
train
true
21,252
def all_locale_paths(): from django.conf import settings globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') return ([globalpath] + list(settings.LOCALE_PATHS))
[ "def", "all_locale_paths", "(", ")", ":", "from", "django", ".", "conf", "import", "settings", "globalpath", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "sys", ".", "modules", "[", "settings", ".", "__module__", "]", ".", "__file__", ")", ",", "'locale'", ")", "return", "(", "[", "globalpath", "]", "+", "list", "(", "settings", ".", "LOCALE_PATHS", ")", ")" ]
returns a list of paths to user-provides languages files .
train
false
21,255
@treeio_login_required @handle_response_format def subscription_index(request, response_format='html'): query = Q(status__hidden=False) if request.GET: if (('status' in request.GET) and request.GET['status']): query = _get_filter_query(request.GET) else: query = (query & _get_filter_query(request.GET)) subscriptions = Object.filter_by_request(request, Subscription.objects.filter(query), mode='r') filters = OrderFilterForm(request.user.profile, '', request.GET) ordered_products = subscriptions.orderedproduct_set.all() orders = ordered_products.order_set.all() statuses = Object.filter_by_request(request, SaleStatus.objects, mode='r') return render_to_response('sales/index', {'orders': orders, 'products': ordered_products, 'filters': filters, 'statuses': statuses}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "subscription_index", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "query", "=", "Q", "(", "status__hidden", "=", "False", ")", "if", "request", ".", "GET", ":", "if", "(", "(", "'status'", "in", "request", ".", "GET", ")", "and", "request", ".", "GET", "[", "'status'", "]", ")", ":", "query", "=", "_get_filter_query", "(", "request", ".", "GET", ")", "else", ":", "query", "=", "(", "query", "&", "_get_filter_query", "(", "request", ".", "GET", ")", ")", "subscriptions", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Subscription", ".", "objects", ".", "filter", "(", "query", ")", ",", "mode", "=", "'r'", ")", "filters", "=", "OrderFilterForm", "(", "request", ".", "user", ".", "profile", ",", "''", ",", "request", ".", "GET", ")", "ordered_products", "=", "subscriptions", ".", "orderedproduct_set", ".", "all", "(", ")", "orders", "=", "ordered_products", ".", "order_set", ".", "all", "(", ")", "statuses", "=", "Object", ".", "filter_by_request", "(", "request", ",", "SaleStatus", ".", "objects", ",", "mode", "=", "'r'", ")", "return", "render_to_response", "(", "'sales/index'", ",", "{", "'orders'", ":", "orders", ",", "'products'", ":", "ordered_products", ",", "'filters'", ":", "filters", ",", "'statuses'", ":", "statuses", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
subscription index page .
train
false
21,256
def create_username(caller, string_input): menutree = caller.ndb._menutree string_input = string_input.strip() player = managers.players.get_player_from_name(string_input) if player: text = dedent('\n |rThe account {} already exists.|n\n Enter another username or leave blank to go back.\n '.strip('\n')).format(string_input) options = ({'key': '', 'goto': 'start'}, {'key': '_default', 'goto': 'create_username'}) elif (not RE_VALID_USERNAME.search(string_input)): text = dedent("\n |rThis username isn't valid.|n\n Only letters are accepted, without special characters.\n The username must be at least 3 characters long.\n Enter another username or leave blank to go back.\n ".strip('\n')) options = ({'key': '', 'goto': 'start'}, {'key': '_default', 'goto': 'create_username'}) else: menutree.playername = string_input caller.msg('', options={'echo': False}) text = "Enter this account's new password." options = ({'key': '_default', 'goto': 'create_password'},) return (text, options)
[ "def", "create_username", "(", "caller", ",", "string_input", ")", ":", "menutree", "=", "caller", ".", "ndb", ".", "_menutree", "string_input", "=", "string_input", ".", "strip", "(", ")", "player", "=", "managers", ".", "players", ".", "get_player_from_name", "(", "string_input", ")", "if", "player", ":", "text", "=", "dedent", "(", "'\\n |rThe account {} already exists.|n\\n Enter another username or leave blank to go back.\\n '", ".", "strip", "(", "'\\n'", ")", ")", ".", "format", "(", "string_input", ")", "options", "=", "(", "{", "'key'", ":", "''", ",", "'goto'", ":", "'start'", "}", ",", "{", "'key'", ":", "'_default'", ",", "'goto'", ":", "'create_username'", "}", ")", "elif", "(", "not", "RE_VALID_USERNAME", ".", "search", "(", "string_input", ")", ")", ":", "text", "=", "dedent", "(", "\"\\n |rThis username isn't valid.|n\\n Only letters are accepted, without special characters.\\n The username must be at least 3 characters long.\\n Enter another username or leave blank to go back.\\n \"", ".", "strip", "(", "'\\n'", ")", ")", "options", "=", "(", "{", "'key'", ":", "''", ",", "'goto'", ":", "'start'", "}", ",", "{", "'key'", ":", "'_default'", ",", "'goto'", ":", "'create_username'", "}", ")", "else", ":", "menutree", ".", "playername", "=", "string_input", "caller", ".", "msg", "(", "''", ",", "options", "=", "{", "'echo'", ":", "False", "}", ")", "text", "=", "\"Enter this account's new password.\"", "options", "=", "(", "{", "'key'", ":", "'_default'", ",", "'goto'", ":", "'create_password'", "}", ",", ")", "return", "(", "text", ",", "options", ")" ]
prompt to enter a valid username .
train
false
21,258
def get_usage_multi(prefix_slices): keys = [_make_ratelimit_cache_key(k, t) for (k, t) in prefix_slices] try: values = g.ratelimitcache.get_multi(keys) return [values.get(k, 0) for k in keys] except pylibmc.Error as e: raise RatelimitError(e)
[ "def", "get_usage_multi", "(", "prefix_slices", ")", ":", "keys", "=", "[", "_make_ratelimit_cache_key", "(", "k", ",", "t", ")", "for", "(", "k", ",", "t", ")", "in", "prefix_slices", "]", "try", ":", "values", "=", "g", ".", "ratelimitcache", ".", "get_multi", "(", "keys", ")", "return", "[", "values", ".", "get", "(", "k", ",", "0", ")", "for", "k", "in", "keys", "]", "except", "pylibmc", ".", "Error", "as", "e", ":", "raise", "RatelimitError", "(", "e", ")" ]
return the current usage of several rate limits .
train
false
21,260
def user_timezone_locale_prefs(request): cached_value = request_cache.get_cache(CACHE_NAME) if (not cached_value): user_prefs = {'user_timezone': None, 'user_language': None} if (hasattr(request, 'user') and request.user.is_authenticated()): try: user_preferences = get_user_preferences(request.user) except (UserNotFound, UserAPIInternalError): cached_value.update(user_prefs) else: user_prefs = {key: user_preferences.get(pref_name, None) for (key, pref_name) in RETRIEVABLE_PREFERENCES.iteritems()} cached_value.update(user_prefs) return cached_value
[ "def", "user_timezone_locale_prefs", "(", "request", ")", ":", "cached_value", "=", "request_cache", ".", "get_cache", "(", "CACHE_NAME", ")", "if", "(", "not", "cached_value", ")", ":", "user_prefs", "=", "{", "'user_timezone'", ":", "None", ",", "'user_language'", ":", "None", "}", "if", "(", "hasattr", "(", "request", ",", "'user'", ")", "and", "request", ".", "user", ".", "is_authenticated", "(", ")", ")", ":", "try", ":", "user_preferences", "=", "get_user_preferences", "(", "request", ".", "user", ")", "except", "(", "UserNotFound", ",", "UserAPIInternalError", ")", ":", "cached_value", ".", "update", "(", "user_prefs", ")", "else", ":", "user_prefs", "=", "{", "key", ":", "user_preferences", ".", "get", "(", "pref_name", ",", "None", ")", "for", "(", "key", ",", "pref_name", ")", "in", "RETRIEVABLE_PREFERENCES", ".", "iteritems", "(", ")", "}", "cached_value", ".", "update", "(", "user_prefs", ")", "return", "cached_value" ]
checks if request has an authenticated user .
train
false
21,263
def check_for_unsupported_version(): fnull = open(os.devnull, 'w') process = subprocess.call(['/usr/bin/dpkg-query', '-l', 'rbp-mediacenter-osmc'], stderr=fnull, stdout=fnull) fnull.close() if (process == 0): ok = xbmcgui.Dialog().ok(lang(32017), lang(32018), lang(32019)) return 'alpha' else: return 'proceed'
[ "def", "check_for_unsupported_version", "(", ")", ":", "fnull", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "process", "=", "subprocess", ".", "call", "(", "[", "'/usr/bin/dpkg-query'", ",", "'-l'", ",", "'rbp-mediacenter-osmc'", "]", ",", "stderr", "=", "fnull", ",", "stdout", "=", "fnull", ")", "fnull", ".", "close", "(", ")", "if", "(", "process", "==", "0", ")", ":", "ok", "=", "xbmcgui", ".", "Dialog", "(", ")", ".", "ok", "(", "lang", "(", "32017", ")", ",", "lang", "(", "32018", ")", ",", "lang", "(", "32019", ")", ")", "return", "'alpha'", "else", ":", "return", "'proceed'" ]
checks if this version is an alpha .
train
false
21,270
def run_migrations_online(): connectable = settings.engine with connectable.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata, compare_type=COMPARE_TYPE) with context.begin_transaction(): context.run_migrations()
[ "def", "run_migrations_online", "(", ")", ":", "connectable", "=", "settings", ".", "engine", "with", "connectable", ".", "connect", "(", ")", "as", "connection", ":", "context", ".", "configure", "(", "connection", "=", "connection", ",", "target_metadata", "=", "target_metadata", ",", "compare_type", "=", "COMPARE_TYPE", ")", "with", "context", ".", "begin_transaction", "(", ")", ":", "context", ".", "run_migrations", "(", ")" ]
run migrations in online mode .
train
true
21,272
def get_cache(): return requests.Session().cache
[ "def", "get_cache", "(", ")", ":", "return", "requests", ".", "Session", "(", ")", ".", "cache" ]
used to get cache client .
train
false
21,273
def _filetime_from_timestamp(timestamp): moment = datetime.fromtimestamp(timestamp) delta_from_utc = (moment - datetime.utcfromtimestamp(timestamp)) return dt_to_filetime(moment, delta_from_utc)
[ "def", "_filetime_from_timestamp", "(", "timestamp", ")", ":", "moment", "=", "datetime", ".", "fromtimestamp", "(", "timestamp", ")", "delta_from_utc", "=", "(", "moment", "-", "datetime", ".", "utcfromtimestamp", "(", "timestamp", ")", ")", "return", "dt_to_filetime", "(", "moment", ",", "delta_from_utc", ")" ]
see filetimes .
train
false
21,276
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
21,277
def _get_win_folder_from_registry(csidl_name): import _winreg shell_folder_name = {'CSIDL_APPDATA': 'AppData', 'CSIDL_COMMON_APPDATA': 'Common AppData', 'CSIDL_LOCAL_APPDATA': 'Local AppData'}[csidl_name] key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders') (directory, _type) = _winreg.QueryValueEx(key, shell_folder_name) return directory
[ "def", "_get_win_folder_from_registry", "(", "csidl_name", ")", ":", "import", "_winreg", "shell_folder_name", "=", "{", "'CSIDL_APPDATA'", ":", "'AppData'", ",", "'CSIDL_COMMON_APPDATA'", ":", "'Common AppData'", ",", "'CSIDL_LOCAL_APPDATA'", ":", "'Local AppData'", "}", "[", "csidl_name", "]", "key", "=", "_winreg", ".", "OpenKey", "(", "_winreg", ".", "HKEY_CURRENT_USER", ",", "'Software\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Explorer\\\\Shell Folders'", ")", "(", "directory", ",", "_type", ")", "=", "_winreg", ".", "QueryValueEx", "(", "key", ",", "shell_folder_name", ")", "return", "directory" ]
this is a fallback technique at best .
train
true
21,278
@with_setup(setup, teardown) def test_show_reconstructions(): rows = 5 cols = 10 m = (rows * cols) model = show_reconstruct.load_model('dbm.pkl', m) dataset = show_reconstruct.load_dataset(model.dataset_yaml_src, use_test_set='n') batch = model.visible_layer.space.make_theano_batch() reconstruction = model.reconstruct(batch) recons_func = function([batch], reconstruction) vis_batch = dataset.get_batch_topo(m) patch_viewer = show_reconstruct.init_viewer(dataset, rows, cols, vis_batch) show_reconstruct.update_viewer(dataset, batch, rows, cols, patch_viewer, recons_func, vis_batch)
[ "@", "with_setup", "(", "setup", ",", "teardown", ")", "def", "test_show_reconstructions", "(", ")", ":", "rows", "=", "5", "cols", "=", "10", "m", "=", "(", "rows", "*", "cols", ")", "model", "=", "show_reconstruct", ".", "load_model", "(", "'dbm.pkl'", ",", "m", ")", "dataset", "=", "show_reconstruct", ".", "load_dataset", "(", "model", ".", "dataset_yaml_src", ",", "use_test_set", "=", "'n'", ")", "batch", "=", "model", ".", "visible_layer", ".", "space", ".", "make_theano_batch", "(", ")", "reconstruction", "=", "model", ".", "reconstruct", "(", "batch", ")", "recons_func", "=", "function", "(", "[", "batch", "]", ",", "reconstruction", ")", "vis_batch", "=", "dataset", ".", "get_batch_topo", "(", "m", ")", "patch_viewer", "=", "show_reconstruct", ".", "init_viewer", "(", "dataset", ",", "rows", ",", "cols", ",", "vis_batch", ")", "show_reconstruct", ".", "update_viewer", "(", "dataset", ",", "batch", ",", "rows", ",", "cols", ",", "patch_viewer", ",", "recons_func", ",", "vis_batch", ")" ]
test the reconstruction update_viewer function .
train
false
21,282
def home_page_is_products(doc, method): home_page_is_products = cint(frappe.db.get_single_value(u'Products Settings', u'home_page_is_products')) if home_page_is_products: doc.home_page = u'products'
[ "def", "home_page_is_products", "(", "doc", ",", "method", ")", ":", "home_page_is_products", "=", "cint", "(", "frappe", ".", "db", ".", "get_single_value", "(", "u'Products Settings'", ",", "u'home_page_is_products'", ")", ")", "if", "home_page_is_products", ":", "doc", ".", "home_page", "=", "u'products'" ]
called on saving website settings .
train
false
21,283
def migrate_hosted_facts(facts): if ('master' in facts): if ('router_selector' in facts['master']): if ('hosted' not in facts): facts['hosted'] = {} if ('router' not in facts['hosted']): facts['hosted']['router'] = {} facts['hosted']['router']['selector'] = facts['master'].pop('router_selector') if ('registry_selector' in facts['master']): if ('hosted' not in facts): facts['hosted'] = {} if ('registry' not in facts['hosted']): facts['hosted']['registry'] = {} facts['hosted']['registry']['selector'] = facts['master'].pop('registry_selector') return facts
[ "def", "migrate_hosted_facts", "(", "facts", ")", ":", "if", "(", "'master'", "in", "facts", ")", ":", "if", "(", "'router_selector'", "in", "facts", "[", "'master'", "]", ")", ":", "if", "(", "'hosted'", "not", "in", "facts", ")", ":", "facts", "[", "'hosted'", "]", "=", "{", "}", "if", "(", "'router'", "not", "in", "facts", "[", "'hosted'", "]", ")", ":", "facts", "[", "'hosted'", "]", "[", "'router'", "]", "=", "{", "}", "facts", "[", "'hosted'", "]", "[", "'router'", "]", "[", "'selector'", "]", "=", "facts", "[", "'master'", "]", ".", "pop", "(", "'router_selector'", ")", "if", "(", "'registry_selector'", "in", "facts", "[", "'master'", "]", ")", ":", "if", "(", "'hosted'", "not", "in", "facts", ")", ":", "facts", "[", "'hosted'", "]", "=", "{", "}", "if", "(", "'registry'", "not", "in", "facts", "[", "'hosted'", "]", ")", ":", "facts", "[", "'hosted'", "]", "[", "'registry'", "]", "=", "{", "}", "facts", "[", "'hosted'", "]", "[", "'registry'", "]", "[", "'selector'", "]", "=", "facts", "[", "'master'", "]", ".", "pop", "(", "'registry_selector'", ")", "return", "facts" ]
apply migrations for master facts .
train
false
21,284
def skipIfDBFeature(*features): return _deferredSkip((lambda : any((getattr(connection.features, feature, False) for feature in features))), ('Database has feature(s) %s' % ', '.join(features)))
[ "def", "skipIfDBFeature", "(", "*", "features", ")", ":", "return", "_deferredSkip", "(", "(", "lambda", ":", "any", "(", "(", "getattr", "(", "connection", ".", "features", ",", "feature", ",", "False", ")", "for", "feature", "in", "features", ")", ")", ")", ",", "(", "'Database has feature(s) %s'", "%", "', '", ".", "join", "(", "features", ")", ")", ")" ]
skip a test if a database has the named feature .
train
false
21,285
def regex_findall(value, regex, multiline=False, ignorecase=False): flags = 0 if ignorecase: flags |= re.I if multiline: flags |= re.M return re.findall(regex, value, flags)
[ "def", "regex_findall", "(", "value", ",", "regex", ",", "multiline", "=", "False", ",", "ignorecase", "=", "False", ")", ":", "flags", "=", "0", "if", "ignorecase", ":", "flags", "|=", "re", ".", "I", "if", "multiline", ":", "flags", "|=", "re", ".", "M", "return", "re", ".", "findall", "(", "regex", ",", "value", ",", "flags", ")" ]
perform re .
train
false
21,286
def json_dump(data, filename): with open(filename, u'w') as fh: json.dump(data, fh, cls=JSONEncoder, indent=2)
[ "def", "json_dump", "(", "data", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "u'w'", ")", "as", "fh", ":", "json", ".", "dump", "(", "data", ",", "fh", ",", "cls", "=", "JSONEncoder", ",", "indent", "=", "2", ")" ]
dumps a data structure as json in the named file .
train
false
21,287
@nottest def get_test_ids(nose_selector): _get_tests('id-collection.xml', nose_selector) "\n {'failed': ['2455'],\n 'ids': {1: ('/home/pablo/workspace/w3af/core/controllers/auto_update/tests/test_git_auto_update.py',\n 'core.controllers.auto_update.tests.test_git_auto_update',\n None),\n 2: ('/home/pablo/workspace/w3af/core/controllers/auto_update/tests/test_git_auto_update.py',\n 'core.controllers.auto_update.tests.test_git_auto_update',\n 'TestGitAutoUpdate.test_is_git_repo'),\n " nose_ids = pickle.load(file(ID_FILE)) return nose_ids['ids'].keys()
[ "@", "nottest", "def", "get_test_ids", "(", "nose_selector", ")", ":", "_get_tests", "(", "'id-collection.xml'", ",", "nose_selector", ")", "nose_ids", "=", "pickle", ".", "load", "(", "file", "(", "ID_FILE", ")", ")", "return", "nose_ids", "[", "'ids'", "]", ".", "keys", "(", ")" ]
generate and parse .
train
false
21,289
def dotprint(expr, styles=default_styles, atom=(lambda x: (not isinstance(x, Basic))), maxdepth=None, repeat=True, labelfunc=str, **kwargs): graphstyle.update(kwargs) nodes = [] edges = [] def traverse(e, depth, pos=()): nodes.append(dotnode(e, styles, labelfunc=labelfunc, pos=pos, repeat=repeat)) if (maxdepth and (depth >= maxdepth)): return edges.extend(dotedges(e, atom=atom, pos=pos, repeat=repeat)) [traverse(arg, (depth + 1), (pos + (i,))) for (i, arg) in enumerate(e.args) if (not atom(arg))] traverse(expr, 0) return (template % {'graphstyle': attrprint(graphstyle, delimiter='\n'), 'nodes': '\n'.join(nodes), 'edges': '\n'.join(edges)})
[ "def", "dotprint", "(", "expr", ",", "styles", "=", "default_styles", ",", "atom", "=", "(", "lambda", "x", ":", "(", "not", "isinstance", "(", "x", ",", "Basic", ")", ")", ")", ",", "maxdepth", "=", "None", ",", "repeat", "=", "True", ",", "labelfunc", "=", "str", ",", "**", "kwargs", ")", ":", "graphstyle", ".", "update", "(", "kwargs", ")", "nodes", "=", "[", "]", "edges", "=", "[", "]", "def", "traverse", "(", "e", ",", "depth", ",", "pos", "=", "(", ")", ")", ":", "nodes", ".", "append", "(", "dotnode", "(", "e", ",", "styles", ",", "labelfunc", "=", "labelfunc", ",", "pos", "=", "pos", ",", "repeat", "=", "repeat", ")", ")", "if", "(", "maxdepth", "and", "(", "depth", ">=", "maxdepth", ")", ")", ":", "return", "edges", ".", "extend", "(", "dotedges", "(", "e", ",", "atom", "=", "atom", ",", "pos", "=", "pos", ",", "repeat", "=", "repeat", ")", ")", "[", "traverse", "(", "arg", ",", "(", "depth", "+", "1", ")", ",", "(", "pos", "+", "(", "i", ",", ")", ")", ")", "for", "(", "i", ",", "arg", ")", "in", "enumerate", "(", "e", ".", "args", ")", "if", "(", "not", "atom", "(", "arg", ")", ")", "]", "traverse", "(", "expr", ",", "0", ")", "return", "(", "template", "%", "{", "'graphstyle'", ":", "attrprint", "(", "graphstyle", ",", "delimiter", "=", "'\\n'", ")", ",", "'nodes'", ":", "'\\n'", ".", "join", "(", "nodes", ")", ",", "'edges'", ":", "'\\n'", ".", "join", "(", "edges", ")", "}", ")" ]
dot description of a sympy expression tree options are styles: styles for different classes .
train
false
21,292
def list_history_record(request, record_id): history = History.objects if (not request.user.is_superuser): history.filter(submitter=request.user) history = history.get(id=record_id) return render('editor/list_history_record.mako', request, {'record': history})
[ "def", "list_history_record", "(", "request", ",", "record_id", ")", ":", "history", "=", "History", ".", "objects", "if", "(", "not", "request", ".", "user", ".", "is_superuser", ")", ":", "history", ".", "filter", "(", "submitter", "=", "request", ".", "user", ")", "history", "=", "history", ".", "get", "(", "id", "=", "record_id", ")", "return", "render", "(", "'editor/list_history_record.mako'", ",", "request", ",", "{", "'record'", ":", "history", "}", ")" ]
list a job submission history .
train
false
21,293
def is_loopback(ip_addr): return ipaddress.ip_address(ip_addr).is_loopback
[ "def", "is_loopback", "(", "ip_addr", ")", ":", "return", "ipaddress", ".", "ip_address", "(", "ip_addr", ")", ".", "is_loopback" ]
check if the given ip address is a loopback address .
train
false
21,294
def time_ago(x): delta = timedelta(weeks=1) if ((datetime.utcnow() - x) > delta): return x.strftime('%b %d, %Y') else: date_array = date.distance_of_time_in_words(x, datetime.utcnow()).replace(',', '').split(' ') return ('~%s %s ago' % (date_array[0], date_array[1]))
[ "def", "time_ago", "(", "x", ")", ":", "delta", "=", "timedelta", "(", "weeks", "=", "1", ")", "if", "(", "(", "datetime", ".", "utcnow", "(", ")", "-", "x", ")", ">", "delta", ")", ":", "return", "x", ".", "strftime", "(", "'%b %d, %Y'", ")", "else", ":", "date_array", "=", "date", ".", "distance_of_time_in_words", "(", "x", ",", "datetime", ".", "utcnow", "(", ")", ")", ".", "replace", "(", "','", ",", "''", ")", ".", "split", "(", "' '", ")", "return", "(", "'~%s %s ago'", "%", "(", "date_array", "[", "0", "]", ",", "date_array", "[", "1", "]", ")", ")" ]
convert a datetime to a string .
train
false
21,295
def subgraph(G, nbunch): return G.subgraph(nbunch)
[ "def", "subgraph", "(", "G", ",", "nbunch", ")", ":", "return", "G", ".", "subgraph", "(", "nbunch", ")" ]
return the subgraph induced on nodes in nbunch .
train
false
21,296
def yukaritan(): n = 1 while True: if (not (n % 15)): (yield 'FizzBuzz') elif (not (n % 3)): (yield 'Fizz') elif (not (n % 5)): (yield 'Buzz') else: (yield str(n)) n += 1
[ "def", "yukaritan", "(", ")", ":", "n", "=", "1", "while", "True", ":", "if", "(", "not", "(", "n", "%", "15", ")", ")", ":", "(", "yield", "'FizzBuzz'", ")", "elif", "(", "not", "(", "n", "%", "3", ")", ")", ":", "(", "yield", "'Fizz'", ")", "elif", "(", "not", "(", "n", "%", "5", ")", ")", ":", "(", "yield", "'Buzz'", ")", "else", ":", "(", "yield", "str", "(", "n", ")", ")", "n", "+=", "1" ]
yay fizzbuzz! .
train
false
21,297
def psave(fname, d): f = file(fname, 'w') pickle.dump(d, f) f.close()
[ "def", "psave", "(", "fname", ",", "d", ")", ":", "f", "=", "file", "(", "fname", ",", "'w'", ")", "pickle", ".", "dump", "(", "d", ",", "f", ")", "f", ".", "close", "(", ")" ]
save a pickled object into a file .
train
false
21,298
@with_open_mode('r') @with_sizes('small', 'medium', 'large') def read_whole_file(f): f.seek(0) while f.read(): pass
[ "@", "with_open_mode", "(", "'r'", ")", "@", "with_sizes", "(", "'small'", ",", "'medium'", ",", "'large'", ")", "def", "read_whole_file", "(", "f", ")", ":", "f", ".", "seek", "(", "0", ")", "while", "f", ".", "read", "(", ")", ":", "pass" ]
read whole contents at once .
train
false
21,299
def host_to_ips(host): ips = [] try: for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(host, 0, socket.AF_UNSPEC, socket.SOCK_STREAM): if (family == socket.AF_INET): (ip, port) = sockaddr elif (family == socket.AF_INET6): (ip, port, flow_info, scope_id) = sockaddr ips.append(ip) if (not ips): ips = None except Exception: ips = None return ips
[ "def", "host_to_ips", "(", "host", ")", ":", "ips", "=", "[", "]", "try", ":", "for", "(", "family", ",", "socktype", ",", "proto", ",", "canonname", ",", "sockaddr", ")", "in", "socket", ".", "getaddrinfo", "(", "host", ",", "0", ",", "socket", ".", "AF_UNSPEC", ",", "socket", ".", "SOCK_STREAM", ")", ":", "if", "(", "family", "==", "socket", ".", "AF_INET", ")", ":", "(", "ip", ",", "port", ")", "=", "sockaddr", "elif", "(", "family", "==", "socket", ".", "AF_INET6", ")", ":", "(", "ip", ",", "port", ",", "flow_info", ",", "scope_id", ")", "=", "sockaddr", "ips", ".", "append", "(", "ip", ")", "if", "(", "not", "ips", ")", ":", "ips", "=", "None", "except", "Exception", ":", "ips", "=", "None", "return", "ips" ]
returns a list of ip addresses of a given hostname or none if not found .
train
true
21,300
def undeny(ip): return _access_rule('undeny', ip)
[ "def", "undeny", "(", "ip", ")", ":", "return", "_access_rule", "(", "'undeny'", ",", "ip", ")" ]
remove a rule from the csf denied hosts see :func:_access_rule .
train
false
21,301
def const_number_of_feasible_pop(iterable, key=(lambda x: x), allowequality=True): items = list(iterable) fits = list(map(key, items)) v = list([fits[i][1] for i in range(len(fits))]) n = v.count(True) return n
[ "def", "const_number_of_feasible_pop", "(", "iterable", ",", "key", "=", "(", "lambda", "x", ":", "x", ")", ",", "allowequality", "=", "True", ")", ":", "items", "=", "list", "(", "iterable", ")", "fits", "=", "list", "(", "map", "(", "key", ",", "items", ")", ")", "v", "=", "list", "(", "[", "fits", "[", "i", "]", "[", "1", "]", "for", "i", "in", "range", "(", "len", "(", "fits", ")", ")", "]", ")", "n", "=", "v", ".", "count", "(", "True", ")", "return", "n" ]
return a subset of items from iterable which are not dominated by any other item in iterable .
train
false
21,302
def expander(where): return _expander(tuple(where))
[ "def", "expander", "(", "where", ")", ":", "return", "_expander", "(", "tuple", "(", "where", ")", ")" ]
an optimized version of insert_many() when *where* is known upfront and used many times .
train
false
21,303
def _ma_transparams(params): newparams = ((1 - np.exp((- params))) / (1 + np.exp((- params)))).copy() tmp = ((1 - np.exp((- params))) / (1 + np.exp((- params)))).copy() for j in range(1, len(params)): b = newparams[j] for kiter in range(j): tmp[kiter] += (b * newparams[((j - kiter) - 1)]) newparams[:j] = tmp[:j] return newparams
[ "def", "_ma_transparams", "(", "params", ")", ":", "newparams", "=", "(", "(", "1", "-", "np", ".", "exp", "(", "(", "-", "params", ")", ")", ")", "/", "(", "1", "+", "np", ".", "exp", "(", "(", "-", "params", ")", ")", ")", ")", ".", "copy", "(", ")", "tmp", "=", "(", "(", "1", "-", "np", ".", "exp", "(", "(", "-", "params", ")", ")", ")", "/", "(", "1", "+", "np", ".", "exp", "(", "(", "-", "params", ")", ")", ")", ")", ".", "copy", "(", ")", "for", "j", "in", "range", "(", "1", ",", "len", "(", "params", ")", ")", ":", "b", "=", "newparams", "[", "j", "]", "for", "kiter", "in", "range", "(", "j", ")", ":", "tmp", "[", "kiter", "]", "+=", "(", "b", "*", "newparams", "[", "(", "(", "j", "-", "kiter", ")", "-", "1", ")", "]", ")", "newparams", "[", ":", "j", "]", "=", "tmp", "[", ":", "j", "]", "return", "newparams" ]
transforms params to induce stationarity/invertability .
train
false
21,304
def generate_key(key_length=64): if hasattr(random, 'SystemRandom'): logging.info('Generating a secure random key using SystemRandom.') choice = random.SystemRandom().choice else: msg = 'WARNING: SystemRandom not present. Generating a random key using random.choice (NOT CRYPTOGRAPHICALLY SECURE).' logging.warning(msg) choice = random.choice return ''.join(map((lambda x: choice((string.digits + string.ascii_letters))), range(key_length)))
[ "def", "generate_key", "(", "key_length", "=", "64", ")", ":", "if", "hasattr", "(", "random", ",", "'SystemRandom'", ")", ":", "logging", ".", "info", "(", "'Generating a secure random key using SystemRandom.'", ")", "choice", "=", "random", ".", "SystemRandom", "(", ")", ".", "choice", "else", ":", "msg", "=", "'WARNING: SystemRandom not present. Generating a random key using random.choice (NOT CRYPTOGRAPHICALLY SECURE).'", "logging", ".", "warning", "(", "msg", ")", "choice", "=", "random", ".", "choice", "return", "''", ".", "join", "(", "map", "(", "(", "lambda", "x", ":", "choice", "(", "(", "string", ".", "digits", "+", "string", ".", "ascii_letters", ")", ")", ")", ",", "range", "(", "key_length", ")", ")", ")" ]
generate a random api key see: URL .
train
true
21,306
def _ppoly_eval_2(coeffs, breaks, xnew, fill=np.nan): a = breaks[0] b = breaks[(-1)] K = coeffs.shape[0] saveshape = np.shape(xnew) xnew = np.ravel(xnew) res = np.empty_like(xnew) mask = ((xnew >= a) & (xnew <= b)) res[(~ mask)] = fill xx = xnew.compress(mask) indxs = (np.searchsorted(breaks, xx) - 1) indxs = indxs.clip(0, len(breaks)) pp = coeffs diff = (xx - breaks.take(indxs)) V = np.vander(diff, N=K) values = np.array([np.dot(V[k, :], pp[:, indxs[k]]) for k in xrange(len(xx))]) res[mask] = values res.shape = saveshape return res
[ "def", "_ppoly_eval_2", "(", "coeffs", ",", "breaks", ",", "xnew", ",", "fill", "=", "np", ".", "nan", ")", ":", "a", "=", "breaks", "[", "0", "]", "b", "=", "breaks", "[", "(", "-", "1", ")", "]", "K", "=", "coeffs", ".", "shape", "[", "0", "]", "saveshape", "=", "np", ".", "shape", "(", "xnew", ")", "xnew", "=", "np", ".", "ravel", "(", "xnew", ")", "res", "=", "np", ".", "empty_like", "(", "xnew", ")", "mask", "=", "(", "(", "xnew", ">=", "a", ")", "&", "(", "xnew", "<=", "b", ")", ")", "res", "[", "(", "~", "mask", ")", "]", "=", "fill", "xx", "=", "xnew", ".", "compress", "(", "mask", ")", "indxs", "=", "(", "np", ".", "searchsorted", "(", "breaks", ",", "xx", ")", "-", "1", ")", "indxs", "=", "indxs", ".", "clip", "(", "0", ",", "len", "(", "breaks", ")", ")", "pp", "=", "coeffs", "diff", "=", "(", "xx", "-", "breaks", ".", "take", "(", "indxs", ")", ")", "V", "=", "np", ".", "vander", "(", "diff", ",", "N", "=", "K", ")", "values", "=", "np", ".", "array", "(", "[", "np", ".", "dot", "(", "V", "[", "k", ",", ":", "]", ",", "pp", "[", ":", ",", "indxs", "[", "k", "]", "]", ")", "for", "k", "in", "xrange", "(", "len", "(", "xx", ")", ")", "]", ")", "res", "[", "mask", "]", "=", "values", "res", ".", "shape", "=", "saveshape", "return", "res" ]
evaluate piecewise polynomial manually .
train
false
21,308
def get_main_app(argv=[]): app = QApplication(argv) app.setApplicationName(__appname__) app.setWindowIcon(newIcon('app')) win = MainWindow((argv[1] if (len(argv) == 2) else None)) win.show() return (app, win)
[ "def", "get_main_app", "(", "argv", "=", "[", "]", ")", ":", "app", "=", "QApplication", "(", "argv", ")", "app", ".", "setApplicationName", "(", "__appname__", ")", "app", ".", "setWindowIcon", "(", "newIcon", "(", "'app'", ")", ")", "win", "=", "MainWindow", "(", "(", "argv", "[", "1", "]", "if", "(", "len", "(", "argv", ")", "==", "2", ")", "else", "None", ")", ")", "win", ".", "show", "(", ")", "return", "(", "app", ",", "win", ")" ]
standard boilerplate qt application code .
train
false