id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
41,193
@receiver(PROBLEM_RAW_SCORE_CHANGED) def problem_raw_score_changed_handler(sender, **kwargs): if (kwargs['raw_possible'] is not None): (weighted_earned, weighted_possible) = weighted_score(kwargs['raw_earned'], kwargs['raw_possible'], kwargs['weight']) else: (weighted_earned, weighted_possible) = (kwargs['raw_earned'], kwargs['raw_possible']) PROBLEM_WEIGHTED_SCORE_CHANGED.send(sender=None, weighted_earned=weighted_earned, weighted_possible=weighted_possible, user_id=kwargs['user_id'], course_id=kwargs['course_id'], usage_id=kwargs['usage_id'], only_if_higher=kwargs['only_if_higher'], score_deleted=kwargs.get('score_deleted', False), modified=kwargs['modified'], score_db_table=kwargs['score_db_table'])
[ "@", "receiver", "(", "PROBLEM_RAW_SCORE_CHANGED", ")", "def", "problem_raw_score_changed_handler", "(", "sender", ",", "**", "kwargs", ")", ":", "if", "(", "kwargs", "[", "'raw_possible'", "]", "is", "not", "None", ")", ":", "(", "weighted_earned", ",", "weighted_possible", ")", "=", "weighted_score", "(", "kwargs", "[", "'raw_earned'", "]", ",", "kwargs", "[", "'raw_possible'", "]", ",", "kwargs", "[", "'weight'", "]", ")", "else", ":", "(", "weighted_earned", ",", "weighted_possible", ")", "=", "(", "kwargs", "[", "'raw_earned'", "]", ",", "kwargs", "[", "'raw_possible'", "]", ")", "PROBLEM_WEIGHTED_SCORE_CHANGED", ".", "send", "(", "sender", "=", "None", ",", "weighted_earned", "=", "weighted_earned", ",", "weighted_possible", "=", "weighted_possible", ",", "user_id", "=", "kwargs", "[", "'user_id'", "]", ",", "course_id", "=", "kwargs", "[", "'course_id'", "]", ",", "usage_id", "=", "kwargs", "[", "'usage_id'", "]", ",", "only_if_higher", "=", "kwargs", "[", "'only_if_higher'", "]", ",", "score_deleted", "=", "kwargs", ".", "get", "(", "'score_deleted'", ",", "False", ")", ",", "modified", "=", "kwargs", "[", "'modified'", "]", ",", "score_db_table", "=", "kwargs", "[", "'score_db_table'", "]", ")" ]
handles the raw score changed signal .
train
false
41,196
def quoteattr(data, entities={}): entities = entities.copy() entities.update({'\n': '
', '\r': '
', ' DCTB ': '	'}) data = escape(data, entities) if ('"' in data): if ("'" in data): data = ('"%s"' % data.replace('"', '"')) else: data = ("'%s'" % data) else: data = ('"%s"' % data) return data
[ "def", "quoteattr", "(", "data", ",", "entities", "=", "{", "}", ")", ":", "entities", "=", "entities", ".", "copy", "(", ")", "entities", ".", "update", "(", "{", "'\\n'", ":", "'
'", ",", "'\\r'", ":", "'
'", ",", "' DCTB '", ":", "'	'", "}", ")", "data", "=", "escape", "(", "data", ",", "entities", ")", "if", "(", "'\"'", "in", "data", ")", ":", "if", "(", "\"'\"", "in", "data", ")", ":", "data", "=", "(", "'\"%s\"'", "%", "data", ".", "replace", "(", "'\"'", ",", "'"'", ")", ")", "else", ":", "data", "=", "(", "\"'%s'\"", "%", "data", ")", "else", ":", "data", "=", "(", "'\"%s\"'", "%", "data", ")", "return", "data" ]
escape and quote an attribute value .
train
false
41,197
def show_firewall(firewall, profile=None): conn = _auth(profile) return conn.show_firewall(firewall)
[ "def", "show_firewall", "(", "firewall", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "show_firewall", "(", "firewall", ")" ]
fetches information of a specific firewall rule cli example: .
train
false
41,198
def localize_attributes(attribs, translations): for (name, atrans) in translations.items(): attrib = attribs[name] localize_common(attrib, atrans)
[ "def", "localize_attributes", "(", "attribs", ",", "translations", ")", ":", "for", "(", "name", ",", "atrans", ")", "in", "translations", ".", "items", "(", ")", ":", "attrib", "=", "attribs", "[", "name", "]", "localize_common", "(", "attrib", ",", "atrans", ")" ]
localize list of attributes .
train
false
41,199
def budget_import_csv(file, table=None): if table: table.import_from_csv_file(file) else: db.import_from_csv_file(file) db.commit()
[ "def", "budget_import_csv", "(", "file", ",", "table", "=", "None", ")", ":", "if", "table", ":", "table", ".", "import_from_csv_file", "(", "file", ")", "else", ":", "db", ".", "import_from_csv_file", "(", "file", ")", "db", ".", "commit", "(", ")" ]
import csv file into database .
train
false
41,200
def extract_instance_type(instance, prefix=''): instance_type = {} sys_meta = utils.metadata_to_dict(instance['system_metadata']) for (key, type_fn) in system_metadata_instance_type_props.items(): type_key = ('%sinstance_type_%s' % (prefix, key)) instance_type[key] = type_fn(sys_meta[type_key]) return instance_type
[ "def", "extract_instance_type", "(", "instance", ",", "prefix", "=", "''", ")", ":", "instance_type", "=", "{", "}", "sys_meta", "=", "utils", ".", "metadata_to_dict", "(", "instance", "[", "'system_metadata'", "]", ")", "for", "(", "key", ",", "type_fn", ")", "in", "system_metadata_instance_type_props", ".", "items", "(", ")", ":", "type_key", "=", "(", "'%sinstance_type_%s'", "%", "(", "prefix", ",", "key", ")", ")", "instance_type", "[", "key", "]", "=", "type_fn", "(", "sys_meta", "[", "type_key", "]", ")", "return", "instance_type" ]
create an instancetype-like object from instances system_metadata information .
train
false
41,203
def _get_pkg_install_time(pkg): iso_time = None if (pkg is not None): location = '/var/lib/dpkg/info/{0}.list'.format(pkg) if os.path.exists(location): iso_time = (datetime.datetime.utcfromtimestamp(int(os.path.getmtime(location))).isoformat() + 'Z') return iso_time
[ "def", "_get_pkg_install_time", "(", "pkg", ")", ":", "iso_time", "=", "None", "if", "(", "pkg", "is", "not", "None", ")", ":", "location", "=", "'/var/lib/dpkg/info/{0}.list'", ".", "format", "(", "pkg", ")", "if", "os", ".", "path", ".", "exists", "(", "location", ")", ":", "iso_time", "=", "(", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "int", "(", "os", ".", "path", ".", "getmtime", "(", "location", ")", ")", ")", ".", "isoformat", "(", ")", "+", "'Z'", ")", "return", "iso_time" ]
return package install time .
train
false
41,204
def is_lib_available(library): try: __import__(library) return True except ImportError: return False
[ "def", "is_lib_available", "(", "library", ")", ":", "try", ":", "__import__", "(", "library", ")", "return", "True", "except", "ImportError", ":", "return", "False" ]
check if a python library is available .
train
false
41,205
def _filterLikelihoods(likelihoods, redThreshold=0.99999, yellowThreshold=0.999): redThreshold = (1.0 - redThreshold) yellowThreshold = (1.0 - yellowThreshold) filteredLikelihoods = [likelihoods[0]] for (i, v) in enumerate(likelihoods[1:]): if (v <= redThreshold): if (likelihoods[i] > redThreshold): filteredLikelihoods.append(v) else: filteredLikelihoods.append(yellowThreshold) else: filteredLikelihoods.append(v) return filteredLikelihoods
[ "def", "_filterLikelihoods", "(", "likelihoods", ",", "redThreshold", "=", "0.99999", ",", "yellowThreshold", "=", "0.999", ")", ":", "redThreshold", "=", "(", "1.0", "-", "redThreshold", ")", "yellowThreshold", "=", "(", "1.0", "-", "yellowThreshold", ")", "filteredLikelihoods", "=", "[", "likelihoods", "[", "0", "]", "]", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "likelihoods", "[", "1", ":", "]", ")", ":", "if", "(", "v", "<=", "redThreshold", ")", ":", "if", "(", "likelihoods", "[", "i", "]", ">", "redThreshold", ")", ":", "filteredLikelihoods", ".", "append", "(", "v", ")", "else", ":", "filteredLikelihoods", ".", "append", "(", "yellowThreshold", ")", "else", ":", "filteredLikelihoods", ".", "append", "(", "v", ")", "return", "filteredLikelihoods" ]
filter the list of raw likelihoods so that we only preserve sharp increases in likelihood .
train
true
41,206
def h1vp(v, z, n=1): if ((not isinstance(n, int)) or (n < 0)): raise ValueError('n must be a non-negative integer.') if (n == 0): return hankel1(v, z) else: return _bessel_diff_formula(v, z, n, hankel1, (-1))
[ "def", "h1vp", "(", "v", ",", "z", ",", "n", "=", "1", ")", ":", "if", "(", "(", "not", "isinstance", "(", "n", ",", "int", ")", ")", "or", "(", "n", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "'n must be a non-negative integer.'", ")", "if", "(", "n", "==", "0", ")", ":", "return", "hankel1", "(", "v", ",", "z", ")", "else", ":", "return", "_bessel_diff_formula", "(", "v", ",", "z", ",", "n", ",", "hankel1", ",", "(", "-", "1", ")", ")" ]
compute nth derivative of hankel function h1v(z) with respect to z .
train
false
41,207
def download_signed_reviewer(request, uuid, **kwargs): extension = get_object_or_404(Extension.objects.without_deleted(), uuid=uuid) version = get_object_or_404(extension.versions.without_deleted(), pk=kwargs['version_id']) def is_reviewer(): return action_allowed(request, 'ContentTools', 'AddonReview') if (request.user.is_authenticated() and is_reviewer()): version.reviewer_sign_if_necessary() log.info(('Downloading reviewers signed add-on: %s version %s from %s' % (extension.pk, version.pk, version.reviewer_signed_file_path))) return _download(request, extension, version, version.reviewer_signed_file_path, public=False) else: raise PermissionDenied
[ "def", "download_signed_reviewer", "(", "request", ",", "uuid", ",", "**", "kwargs", ")", ":", "extension", "=", "get_object_or_404", "(", "Extension", ".", "objects", ".", "without_deleted", "(", ")", ",", "uuid", "=", "uuid", ")", "version", "=", "get_object_or_404", "(", "extension", ".", "versions", ".", "without_deleted", "(", ")", ",", "pk", "=", "kwargs", "[", "'version_id'", "]", ")", "def", "is_reviewer", "(", ")", ":", "return", "action_allowed", "(", "request", ",", "'ContentTools'", ",", "'AddonReview'", ")", "if", "(", "request", ".", "user", ".", "is_authenticated", "(", ")", "and", "is_reviewer", "(", ")", ")", ":", "version", ".", "reviewer_sign_if_necessary", "(", ")", "log", ".", "info", "(", "(", "'Downloading reviewers signed add-on: %s version %s from %s'", "%", "(", "extension", ".", "pk", ",", "version", ".", "pk", ",", "version", ".", "reviewer_signed_file_path", ")", ")", ")", "return", "_download", "(", "request", ",", "extension", ",", "version", ",", "version", ".", "reviewer_signed_file_path", ",", "public", "=", "False", ")", "else", ":", "raise", "PermissionDenied" ]
download an archive for a given extension/version .
train
false
41,208
def get_raw_statics_path(): r2_path = get_r2_path() return os.path.join(r2_path, 'r2', 'public')
[ "def", "get_raw_statics_path", "(", ")", ":", "r2_path", "=", "get_r2_path", "(", ")", "return", "os", ".", "path", ".", "join", "(", "r2_path", ",", "'r2'", ",", "'public'", ")" ]
return the path for the raw statics .
train
false
41,209
def unsubscribe_from_creator(user_id, creator_id): subscribers_model_creator = user_models.UserSubscribersModel.get(creator_id, strict=False) subscriptions_model_user = user_models.UserSubscriptionsModel.get(user_id, strict=False) if (user_id in subscribers_model_creator.subscriber_ids): subscribers_model_creator.subscriber_ids.remove(user_id) subscriptions_model_user.creator_ids.remove(creator_id) subscribers_model_creator.put() subscriptions_model_user.put()
[ "def", "unsubscribe_from_creator", "(", "user_id", ",", "creator_id", ")", ":", "subscribers_model_creator", "=", "user_models", ".", "UserSubscribersModel", ".", "get", "(", "creator_id", ",", "strict", "=", "False", ")", "subscriptions_model_user", "=", "user_models", ".", "UserSubscriptionsModel", ".", "get", "(", "user_id", ",", "strict", "=", "False", ")", "if", "(", "user_id", "in", "subscribers_model_creator", ".", "subscriber_ids", ")", ":", "subscribers_model_creator", ".", "subscriber_ids", ".", "remove", "(", "user_id", ")", "subscriptions_model_user", ".", "creator_ids", ".", "remove", "(", "creator_id", ")", "subscribers_model_creator", ".", "put", "(", ")", "subscriptions_model_user", ".", "put", "(", ")" ]
unsubscribe a user from a creator .
train
false
41,210
def nslookup(host): ret = [] addresses = [] cmd = ['nslookup', salt.utils.network.sanitize_host(host)] lines = __salt__['cmd.run'](cmd, python_shell=False).splitlines() for line in lines: if addresses: addresses.append(line.strip()) continue if line.startswith('Non-authoritative'): continue if ('Addresses' in line): comps = line.split(':', 1) addresses.append(comps[1].strip()) continue if (':' in line): comps = line.split(':', 1) ret.append({comps[0].strip(): comps[1].strip()}) if addresses: ret.append({'Addresses': addresses}) return ret
[ "def", "nslookup", "(", "host", ")", ":", "ret", "=", "[", "]", "addresses", "=", "[", "]", "cmd", "=", "[", "'nslookup'", ",", "salt", ".", "utils", ".", "network", ".", "sanitize_host", "(", "host", ")", "]", "lines", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", "for", "line", "in", "lines", ":", "if", "addresses", ":", "addresses", ".", "append", "(", "line", ".", "strip", "(", ")", ")", "continue", "if", "line", ".", "startswith", "(", "'Non-authoritative'", ")", ":", "continue", "if", "(", "'Addresses'", "in", "line", ")", ":", "comps", "=", "line", ".", "split", "(", "':'", ",", "1", ")", "addresses", ".", "append", "(", "comps", "[", "1", "]", ".", "strip", "(", ")", ")", "continue", "if", "(", "':'", "in", "line", ")", ":", "comps", "=", "line", ".", "split", "(", "':'", ",", "1", ")", "ret", ".", "append", "(", "{", "comps", "[", "0", "]", ".", "strip", "(", ")", ":", "comps", "[", "1", "]", ".", "strip", "(", ")", "}", ")", "if", "addresses", ":", "ret", ".", "append", "(", "{", "'Addresses'", ":", "addresses", "}", ")", "return", "ret" ]
query dns for information about a domain or ip address cli example: .
train
true
41,211
def test_simple_two_model_class_compose_2d(): R = (Rotation2D | Rotation2D) assert issubclass(R, Model) assert (R.n_inputs == 2) assert (R.n_outputs == 2) r1 = R(45, 45) assert_allclose(r1(0, 1), ((-1), 0), atol=1e-10) r2 = R(90, 90) assert_allclose(r2(0, 1), (0, (-1)), atol=1e-10) R2 = (R | R) r3 = R2(45, 45, 45, 45) assert_allclose(r3(0, 1), (0, (-1)), atol=1e-10)
[ "def", "test_simple_two_model_class_compose_2d", "(", ")", ":", "R", "=", "(", "Rotation2D", "|", "Rotation2D", ")", "assert", "issubclass", "(", "R", ",", "Model", ")", "assert", "(", "R", ".", "n_inputs", "==", "2", ")", "assert", "(", "R", ".", "n_outputs", "==", "2", ")", "r1", "=", "R", "(", "45", ",", "45", ")", "assert_allclose", "(", "r1", "(", "0", ",", "1", ")", ",", "(", "(", "-", "1", ")", ",", "0", ")", ",", "atol", "=", "1e-10", ")", "r2", "=", "R", "(", "90", ",", "90", ")", "assert_allclose", "(", "r2", "(", "0", ",", "1", ")", ",", "(", "0", ",", "(", "-", "1", ")", ")", ",", "atol", "=", "1e-10", ")", "R2", "=", "(", "R", "|", "R", ")", "r3", "=", "R2", "(", "45", ",", "45", ",", "45", ",", "45", ")", "assert_allclose", "(", "r3", "(", "0", ",", "1", ")", ",", "(", "0", ",", "(", "-", "1", ")", ")", ",", "atol", "=", "1e-10", ")" ]
a simple example consisting of two rotations .
train
false
41,212
@ensure_csrf_cookie @ensure_valid_course_key def static_tab(request, course_id, tab_slug): course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access(request.user, 'load', course_key) tab = CourseTabList.get_tab_by_slug(course.tabs, tab_slug) if (tab is None): raise Http404 contents = get_static_tab_contents(request, course, tab) if (contents is None): raise Http404 return render_to_response('courseware/static_tab.html', {'course': course, 'tab': tab, 'tab_contents': contents})
[ "@", "ensure_csrf_cookie", "@", "ensure_valid_course_key", "def", "static_tab", "(", "request", ",", "course_id", ",", "tab_slug", ")", ":", "course_key", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "course", "=", "get_course_with_access", "(", "request", ".", "user", ",", "'load'", ",", "course_key", ")", "tab", "=", "CourseTabList", ".", "get_tab_by_slug", "(", "course", ".", "tabs", ",", "tab_slug", ")", "if", "(", "tab", "is", "None", ")", ":", "raise", "Http404", "contents", "=", "get_static_tab_contents", "(", "request", ",", "course", ",", "tab", ")", "if", "(", "contents", "is", "None", ")", ":", "raise", "Http404", "return", "render_to_response", "(", "'courseware/static_tab.html'", ",", "{", "'course'", ":", "course", ",", "'tab'", ":", "tab", ",", "'tab_contents'", ":", "contents", "}", ")" ]
display the courses tab with the given name .
train
false
41,213
def load_maps(dirspec): mapd = {} if (dirspec not in sys.path): sys.path.insert(0, dirspec) for fil in os.listdir(dirspec): if fil.endswith('.py'): mod = import_module(fil[:(-3)]) for (key, item) in mod.__dict__.items(): if key.startswith('__'): continue if (isinstance(item, dict) and ('to' in item) and ('fro' in item)): mapd[item['identifier']] = item return mapd
[ "def", "load_maps", "(", "dirspec", ")", ":", "mapd", "=", "{", "}", "if", "(", "dirspec", "not", "in", "sys", ".", "path", ")", ":", "sys", ".", "path", ".", "insert", "(", "0", ",", "dirspec", ")", "for", "fil", "in", "os", ".", "listdir", "(", "dirspec", ")", ":", "if", "fil", ".", "endswith", "(", "'.py'", ")", ":", "mod", "=", "import_module", "(", "fil", "[", ":", "(", "-", "3", ")", "]", ")", "for", "(", "key", ",", "item", ")", "in", "mod", ".", "__dict__", ".", "items", "(", ")", ":", "if", "key", ".", "startswith", "(", "'__'", ")", ":", "continue", "if", "(", "isinstance", "(", "item", ",", "dict", ")", "and", "(", "'to'", "in", "item", ")", "and", "(", "'fro'", "in", "item", ")", ")", ":", "mapd", "[", "item", "[", "'identifier'", "]", "]", "=", "item", "return", "mapd" ]
load the attribute maps .
train
true
41,214
def _categorize_result(r): from . import conf if ((u'network_error' in r) and (r[u'network_error'] is not None)): r[u'out_db_name'] = u'nerr' r[u'expected'] = u'broken' elif (((r[u'nexceptions'] == 0) and (r[u'nwarnings'] == 0)) or r[u'warning_types'].issubset(conf.noncritical_warnings)): r[u'out_db_name'] = u'good' r[u'expected'] = u'good' elif (r[u'nexceptions'] > 0): r[u'out_db_name'] = u'excp' r[u'expected'] = u'incorrect' elif (r[u'nwarnings'] > 0): r[u'out_db_name'] = u'warn' r[u'expected'] = u'incorrect' else: raise InvalidValidationAttribute(u'Unhandled validation result attributes: {0}'.format(r._attributes))
[ "def", "_categorize_result", "(", "r", ")", ":", "from", ".", "import", "conf", "if", "(", "(", "u'network_error'", "in", "r", ")", "and", "(", "r", "[", "u'network_error'", "]", "is", "not", "None", ")", ")", ":", "r", "[", "u'out_db_name'", "]", "=", "u'nerr'", "r", "[", "u'expected'", "]", "=", "u'broken'", "elif", "(", "(", "(", "r", "[", "u'nexceptions'", "]", "==", "0", ")", "and", "(", "r", "[", "u'nwarnings'", "]", "==", "0", ")", ")", "or", "r", "[", "u'warning_types'", "]", ".", "issubset", "(", "conf", ".", "noncritical_warnings", ")", ")", ":", "r", "[", "u'out_db_name'", "]", "=", "u'good'", "r", "[", "u'expected'", "]", "=", "u'good'", "elif", "(", "r", "[", "u'nexceptions'", "]", ">", "0", ")", ":", "r", "[", "u'out_db_name'", "]", "=", "u'excp'", "r", "[", "u'expected'", "]", "=", "u'incorrect'", "elif", "(", "r", "[", "u'nwarnings'", "]", ">", "0", ")", ":", "r", "[", "u'out_db_name'", "]", "=", "u'warn'", "r", "[", "u'expected'", "]", "=", "u'incorrect'", "else", ":", "raise", "InvalidValidationAttribute", "(", "u'Unhandled validation result attributes: {0}'", ".", "format", "(", "r", ".", "_attributes", ")", ")" ]
set success codes .
train
false
41,216
def get_all_test_cases(module): results = [] for name in dir(module): if (not name.startswith('Test')): continue item = getattr(module, name) if (isinstance(item, (type, types.ClassType)) and issubclass(item, unittest.TestCase)): results.append(item) return results
[ "def", "get_all_test_cases", "(", "module", ")", ":", "results", "=", "[", "]", "for", "name", "in", "dir", "(", "module", ")", ":", "if", "(", "not", "name", ".", "startswith", "(", "'Test'", ")", ")", ":", "continue", "item", "=", "getattr", "(", "module", ",", "name", ")", "if", "(", "isinstance", "(", "item", ",", "(", "type", ",", "types", ".", "ClassType", ")", ")", "and", "issubclass", "(", "item", ",", "unittest", ".", "TestCase", ")", ")", ":", "results", ".", "append", "(", "item", ")", "return", "results" ]
returns a list of all test case classes defined in a given module .
train
false
41,218
def addPointToPath(path, pixelDictionary, point, value, width): path.append(point) if (len(path) < 2): return begin = path[(-2)] addValueSegmentToPixelTable(begin, point, pixelDictionary, value, width)
[ "def", "addPointToPath", "(", "path", ",", "pixelDictionary", ",", "point", ",", "value", ",", "width", ")", ":", "path", ".", "append", "(", "point", ")", "if", "(", "len", "(", "path", ")", "<", "2", ")", ":", "return", "begin", "=", "path", "[", "(", "-", "2", ")", "]", "addValueSegmentToPixelTable", "(", "begin", ",", "point", ",", "pixelDictionary", ",", "value", ",", "width", ")" ]
add a point to a path and the pixel table .
train
false
41,219
def generate_nonce(): return random.randrange(1000000000, 2000000000)
[ "def", "generate_nonce", "(", ")", ":", "return", "random", ".", "randrange", "(", "1000000000", ",", "2000000000", ")" ]
generate pseudorandom nonce that is unlikely to repeat .
train
false
41,220
@login_required @staff_member_required @permission_required('wiki.purge_document') @check_readonly def purge_view(request): selected = request.GET.get('ids', '').split(',') to_purge = Document.deleted_objects.filter(id__in=selected) if (request.method == 'POST'): if request.POST.get('confirm_purge', False): purged = 0 for doc in to_purge: doc.purge() purged += 1 messages.info(request, ('%s document(s) were purged.' % purged)) return HttpResponseRedirect('/admin/wiki/document/') return TemplateResponse(request, 'admin/wiki/purge_documents.html', {'to_purge': to_purge})
[ "@", "login_required", "@", "staff_member_required", "@", "permission_required", "(", "'wiki.purge_document'", ")", "@", "check_readonly", "def", "purge_view", "(", "request", ")", ":", "selected", "=", "request", ".", "GET", ".", "get", "(", "'ids'", ",", "''", ")", ".", "split", "(", "','", ")", "to_purge", "=", "Document", ".", "deleted_objects", ".", "filter", "(", "id__in", "=", "selected", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "if", "request", ".", "POST", ".", "get", "(", "'confirm_purge'", ",", "False", ")", ":", "purged", "=", "0", "for", "doc", "in", "to_purge", ":", "doc", ".", "purge", "(", ")", "purged", "+=", "1", "messages", ".", "info", "(", "request", ",", "(", "'%s document(s) were purged.'", "%", "purged", ")", ")", "return", "HttpResponseRedirect", "(", "'/admin/wiki/document/'", ")", "return", "TemplateResponse", "(", "request", ",", "'admin/wiki/purge_documents.html'", ",", "{", "'to_purge'", ":", "to_purge", "}", ")" ]
interstitial admin view for purging multiple documents .
train
false
41,221
def _valarray(shape, value=np.nan, typecode=None): out = (np.ones(shape, dtype=bool) * value) if (typecode is not None): out = out.astype(typecode) if (not isinstance(out, np.ndarray)): out = np.asarray(out) return out
[ "def", "_valarray", "(", "shape", ",", "value", "=", "np", ".", "nan", ",", "typecode", "=", "None", ")", ":", "out", "=", "(", "np", ".", "ones", "(", "shape", ",", "dtype", "=", "bool", ")", "*", "value", ")", "if", "(", "typecode", "is", "not", "None", ")", ":", "out", "=", "out", ".", "astype", "(", "typecode", ")", "if", "(", "not", "isinstance", "(", "out", ",", "np", ".", "ndarray", ")", ")", ":", "out", "=", "np", ".", "asarray", "(", "out", ")", "return", "out" ]
return an array of all value .
train
false
41,222
def test_edf_stim_channel(): raw = read_raw_edf(edf_stim_channel_path, preload=True, stim_channel=(-1)) true_data = np.loadtxt(edf_txt_stim_channel_path).T (_, ns) = true_data.shape edf_data = raw._data[:, :ns] assert_array_equal(true_data[(-1)], edf_data[(-1)]) assert_array_almost_equal((true_data[0:(-1)] * 1e-06), edf_data[0:(-1)])
[ "def", "test_edf_stim_channel", "(", ")", ":", "raw", "=", "read_raw_edf", "(", "edf_stim_channel_path", ",", "preload", "=", "True", ",", "stim_channel", "=", "(", "-", "1", ")", ")", "true_data", "=", "np", ".", "loadtxt", "(", "edf_txt_stim_channel_path", ")", ".", "T", "(", "_", ",", "ns", ")", "=", "true_data", ".", "shape", "edf_data", "=", "raw", ".", "_data", "[", ":", ",", ":", "ns", "]", "assert_array_equal", "(", "true_data", "[", "(", "-", "1", ")", "]", ",", "edf_data", "[", "(", "-", "1", ")", "]", ")", "assert_array_almost_equal", "(", "(", "true_data", "[", "0", ":", "(", "-", "1", ")", "]", "*", "1e-06", ")", ",", "edf_data", "[", "0", ":", "(", "-", "1", ")", "]", ")" ]
test stim channel for edf file .
train
false
41,223
def in_travis_pr(): try: int(os.getenv(TRAVIS_PR_ENV, '')) return True except ValueError: return False
[ "def", "in_travis_pr", "(", ")", ":", "try", ":", "int", "(", "os", ".", "getenv", "(", "TRAVIS_PR_ENV", ",", "''", ")", ")", "return", "True", "except", "ValueError", ":", "return", "False" ]
detect if we are running in a pull request on travis .
train
false
41,224
def _get_vispy_caller(): records = inspect.stack() for record in records[5:]: module = record[0].f_globals['__name__'] if module.startswith('vispy'): line = str(record[0].f_lineno) func = record[3] cls = record[0].f_locals.get('self', None) clsname = ('' if (cls is None) else (cls.__class__.__name__ + '.')) caller = '{0}:{1}{2}({3}): '.format(module, clsname, func, line) return caller return 'unknown'
[ "def", "_get_vispy_caller", "(", ")", ":", "records", "=", "inspect", ".", "stack", "(", ")", "for", "record", "in", "records", "[", "5", ":", "]", ":", "module", "=", "record", "[", "0", "]", ".", "f_globals", "[", "'__name__'", "]", "if", "module", ".", "startswith", "(", "'vispy'", ")", ":", "line", "=", "str", "(", "record", "[", "0", "]", ".", "f_lineno", ")", "func", "=", "record", "[", "3", "]", "cls", "=", "record", "[", "0", "]", ".", "f_locals", ".", "get", "(", "'self'", ",", "None", ")", "clsname", "=", "(", "''", "if", "(", "cls", "is", "None", ")", "else", "(", "cls", ".", "__class__", ".", "__name__", "+", "'.'", ")", ")", "caller", "=", "'{0}:{1}{2}({3}): '", ".", "format", "(", "module", ",", "clsname", ",", "func", ",", "line", ")", "return", "caller", "return", "'unknown'" ]
helper to get vispy calling function from the stack .
train
true
41,225
def dump_theming_info(): for (namespace, lookup) in edxmako.LOOKUP.items(): print ('--- %s: %s' % (namespace, lookup.template_args['module_directory'])) for directory in lookup.directories: print (' %s' % (directory,)) print ('=' * 80) for (dirname, __, filenames) in os.walk(settings.MAKO_MODULE_DIR): print ('%s ----------------' % (dir,)) for filename in sorted(filenames): if filename.endswith('.pyc'): continue with open(os.path.join(dirname, filename)) as f: content = len(f.read()) print (' %s: %d' % (filename, content))
[ "def", "dump_theming_info", "(", ")", ":", "for", "(", "namespace", ",", "lookup", ")", "in", "edxmako", ".", "LOOKUP", ".", "items", "(", ")", ":", "print", "(", "'--- %s: %s'", "%", "(", "namespace", ",", "lookup", ".", "template_args", "[", "'module_directory'", "]", ")", ")", "for", "directory", "in", "lookup", ".", "directories", ":", "print", "(", "' %s'", "%", "(", "directory", ",", ")", ")", "print", "(", "'='", "*", "80", ")", "for", "(", "dirname", ",", "__", ",", "filenames", ")", "in", "os", ".", "walk", "(", "settings", ".", "MAKO_MODULE_DIR", ")", ":", "print", "(", "'%s ----------------'", "%", "(", "dir", ",", ")", ")", "for", "filename", "in", "sorted", "(", "filenames", ")", ":", "if", "filename", ".", "endswith", "(", "'.pyc'", ")", ":", "continue", "with", "open", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "filename", ")", ")", "as", "f", ":", "content", "=", "len", "(", "f", ".", "read", "(", ")", ")", "print", "(", "' %s: %d'", "%", "(", "filename", ",", "content", ")", ")" ]
dump a bunch of theming information .
train
false
41,226
def cleanup_ghosts(conn=None): conn = (conn if conn else get_current_connection()) for worker in Worker.all(connection=conn): if (conn._ttl(worker.key) == (-1)): ttl = worker.default_worker_ttl conn.expire(worker.key, ttl) logger.info(u'Marked ghosted worker {0} to expire in {1} seconds.'.format(worker.name, ttl))
[ "def", "cleanup_ghosts", "(", "conn", "=", "None", ")", ":", "conn", "=", "(", "conn", "if", "conn", "else", "get_current_connection", "(", ")", ")", "for", "worker", "in", "Worker", ".", "all", "(", "connection", "=", "conn", ")", ":", "if", "(", "conn", ".", "_ttl", "(", "worker", ".", "key", ")", "==", "(", "-", "1", ")", ")", ":", "ttl", "=", "worker", ".", "default_worker_ttl", "conn", ".", "expire", "(", "worker", ".", "key", ",", "ttl", ")", "logger", ".", "info", "(", "u'Marked ghosted worker {0} to expire in {1} seconds.'", ".", "format", "(", "worker", ".", "name", ",", "ttl", ")", ")" ]
rq versions < 0 .
train
false
41,227
def load_platform_subclass(cls, *args, **kwargs): this_platform = get_platform() distribution = get_distribution() subclass = None if (distribution is not None): for sc in get_all_subclasses(cls): if ((sc.distribution is not None) and (sc.distribution == distribution) and (sc.platform == this_platform)): subclass = sc if (subclass is None): for sc in get_all_subclasses(cls): if ((sc.platform == this_platform) and (sc.distribution is None)): subclass = sc if (subclass is None): subclass = cls return super(cls, subclass).__new__(subclass)
[ "def", "load_platform_subclass", "(", "cls", ",", "*", "args", ",", "**", "kwargs", ")", ":", "this_platform", "=", "get_platform", "(", ")", "distribution", "=", "get_distribution", "(", ")", "subclass", "=", "None", "if", "(", "distribution", "is", "not", "None", ")", ":", "for", "sc", "in", "get_all_subclasses", "(", "cls", ")", ":", "if", "(", "(", "sc", ".", "distribution", "is", "not", "None", ")", "and", "(", "sc", ".", "distribution", "==", "distribution", ")", "and", "(", "sc", ".", "platform", "==", "this_platform", ")", ")", ":", "subclass", "=", "sc", "if", "(", "subclass", "is", "None", ")", ":", "for", "sc", "in", "get_all_subclasses", "(", "cls", ")", ":", "if", "(", "(", "sc", ".", "platform", "==", "this_platform", ")", "and", "(", "sc", ".", "distribution", "is", "None", ")", ")", ":", "subclass", "=", "sc", "if", "(", "subclass", "is", "None", ")", ":", "subclass", "=", "cls", "return", "super", "(", "cls", ",", "subclass", ")", ".", "__new__", "(", "subclass", ")" ]
used by modules like user to have different implementations based on detected platform .
train
false
41,228
@utils.arg('secgroup', metavar='<secgroup>', help=_('ID or name of security group.')) @utils.arg('name', metavar='<name>', help=_('Name of security group.')) @utils.arg('description', metavar='<description>', help=_('Description of security group.')) @deprecated_network def do_secgroup_update(cs, args): sg = _get_secgroup(cs, args.secgroup) secgroup = cs.security_groups.update(sg, args.name, args.description) _print_secgroups([secgroup])
[ "@", "utils", ".", "arg", "(", "'secgroup'", ",", "metavar", "=", "'<secgroup>'", ",", "help", "=", "_", "(", "'ID or name of security group.'", ")", ")", "@", "utils", ".", "arg", "(", "'name'", ",", "metavar", "=", "'<name>'", ",", "help", "=", "_", "(", "'Name of security group.'", ")", ")", "@", "utils", ".", "arg", "(", "'description'", ",", "metavar", "=", "'<description>'", ",", "help", "=", "_", "(", "'Description of security group.'", ")", ")", "@", "deprecated_network", "def", "do_secgroup_update", "(", "cs", ",", "args", ")", ":", "sg", "=", "_get_secgroup", "(", "cs", ",", "args", ".", "secgroup", ")", "secgroup", "=", "cs", ".", "security_groups", ".", "update", "(", "sg", ",", "args", ".", "name", ",", "args", ".", "description", ")", "_print_secgroups", "(", "[", "secgroup", "]", ")" ]
update a security group .
train
false
41,229
@register.tag(u'buttons') def bootstrap_buttons(parser, token): kwargs = parse_token_contents(parser, token) kwargs[u'nodelist'] = parser.parse((u'endbuttons',)) parser.delete_first_token() return ButtonsNode(**kwargs)
[ "@", "register", ".", "tag", "(", "u'buttons'", ")", "def", "bootstrap_buttons", "(", "parser", ",", "token", ")", ":", "kwargs", "=", "parse_token_contents", "(", "parser", ",", "token", ")", "kwargs", "[", "u'nodelist'", "]", "=", "parser", ".", "parse", "(", "(", "u'endbuttons'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "ButtonsNode", "(", "**", "kwargs", ")" ]
render buttons for form **tag name**:: buttons **parameters**: submit text for a submit button reset text for a reset button **usage**:: {% buttons %}{% endbuttons %} **example**:: {% buttons submit=ok reset="cancel" %}{% endbuttons %} .
train
false
41,230
def _get_svc(rcd, service_status): ena = None lines = __salt__['cmd.run']('{0} rcvar'.format(rcd)).splitlines() for rcvar in lines: if (rcvar.startswith('$') and ('={0}'.format(service_status) in rcvar)): ena = 'yes' elif rcvar.startswith('#'): svc = rcvar.split(' ', 1)[1] else: continue if (ena and svc): return svc return None
[ "def", "_get_svc", "(", "rcd", ",", "service_status", ")", ":", "ena", "=", "None", "lines", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'{0} rcvar'", ".", "format", "(", "rcd", ")", ")", ".", "splitlines", "(", ")", "for", "rcvar", "in", "lines", ":", "if", "(", "rcvar", ".", "startswith", "(", "'$'", ")", "and", "(", "'={0}'", ".", "format", "(", "service_status", ")", "in", "rcvar", ")", ")", ":", "ena", "=", "'yes'", "elif", "rcvar", ".", "startswith", "(", "'#'", ")", ":", "svc", "=", "rcvar", ".", "split", "(", "' '", ",", "1", ")", "[", "1", "]", "else", ":", "continue", "if", "(", "ena", "and", "svc", ")", ":", "return", "svc", "return", "None" ]
returns a unique service status .
train
true
41,232
def setDebugging(on): Deferred.debug = bool(on)
[ "def", "setDebugging", "(", "on", ")", ":", "Deferred", ".", "debug", "=", "bool", "(", "on", ")" ]
enable or disable l{deferred} debugging .
train
false
41,234
def rotate_swap(lst, dist): n = len(lst) if ((dist == 0) or (dist == n)): return i = p = dist j = (n - p) while (i != j): if (i > j): sublist_swap(lst, (p - i), p, j) i -= j else: sublist_swap(lst, (p - i), ((p + j) - i), i) j -= i sublist_swap(lst, (p - i), p, i)
[ "def", "rotate_swap", "(", "lst", ",", "dist", ")", ":", "n", "=", "len", "(", "lst", ")", "if", "(", "(", "dist", "==", "0", ")", "or", "(", "dist", "==", "n", ")", ")", ":", "return", "i", "=", "p", "=", "dist", "j", "=", "(", "n", "-", "p", ")", "while", "(", "i", "!=", "j", ")", ":", "if", "(", "i", ">", "j", ")", ":", "sublist_swap", "(", "lst", ",", "(", "p", "-", "i", ")", ",", "p", ",", "j", ")", "i", "-=", "j", "else", ":", "sublist_swap", "(", "lst", ",", "(", "p", "-", "i", ")", ",", "(", "(", "p", "+", "j", ")", "-", "i", ")", ",", "i", ")", "j", "-=", "i", "sublist_swap", "(", "lst", ",", "(", "p", "-", "i", ")", ",", "p", ",", "i", ")" ]
a recursive sub-list swapping method .
train
false
41,236
def arg_to_iter(arg): if (arg is None): return [] elif ((not isinstance(arg, dict)) and hasattr(arg, '__iter__')): return arg else: return [arg]
[ "def", "arg_to_iter", "(", "arg", ")", ":", "if", "(", "arg", "is", "None", ")", ":", "return", "[", "]", "elif", "(", "(", "not", "isinstance", "(", "arg", ",", "dict", ")", ")", "and", "hasattr", "(", "arg", ",", "'__iter__'", ")", ")", ":", "return", "arg", "else", ":", "return", "[", "arg", "]" ]
convert an argument to an iterable .
train
false
41,237
def construct_grids(batch): xmin = (batch.x_left_lower_corner + batch.grid_size) xmax = (xmin + (batch.Nx * batch.grid_size)) ymin = (batch.y_left_lower_corner + batch.grid_size) ymax = (ymin + (batch.Ny * batch.grid_size)) xgrid = np.arange(xmin, xmax, batch.grid_size) ygrid = np.arange(ymin, ymax, batch.grid_size) return (xgrid, ygrid)
[ "def", "construct_grids", "(", "batch", ")", ":", "xmin", "=", "(", "batch", ".", "x_left_lower_corner", "+", "batch", ".", "grid_size", ")", "xmax", "=", "(", "xmin", "+", "(", "batch", ".", "Nx", "*", "batch", ".", "grid_size", ")", ")", "ymin", "=", "(", "batch", ".", "y_left_lower_corner", "+", "batch", ".", "grid_size", ")", "ymax", "=", "(", "ymin", "+", "(", "batch", ".", "Ny", "*", "batch", ".", "grid_size", ")", ")", "xgrid", "=", "np", ".", "arange", "(", "xmin", ",", "xmax", ",", "batch", ".", "grid_size", ")", "ygrid", "=", "np", ".", "arange", "(", "ymin", ",", "ymax", ",", "batch", ".", "grid_size", ")", "return", "(", "xgrid", ",", "ygrid", ")" ]
construct the map grid from the batch object parameters batch : batch object the object returned by :func:fetch_species_distributions returns : 1-d arrays the grid corresponding to the values in batch .
train
false
41,238
def auth_admin_url(): global url, username, password logging.debug(((('HTTP basic authentication with username=' + username) + ', password=') + password)) if (username and password): try: password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, username, password) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) opener.open(url) urllib2.install_opener(opener) except urllib2.HTTPError as e1: logging.error(((('HTTPError: ' + str(e1.reason)) + ' ') + str(e1.code))) except urllib2.URLError as e2: logging.error(('URLError: ' + str(e2.reason))) except Exception as e3: logging.error(('Error: ' + str(e3.message)))
[ "def", "auth_admin_url", "(", ")", ":", "global", "url", ",", "username", ",", "password", "logging", ".", "debug", "(", "(", "(", "(", "'HTTP basic authentication with username='", "+", "username", ")", "+", "', password='", ")", "+", "password", ")", ")", "if", "(", "username", "and", "password", ")", ":", "try", ":", "password_mgr", "=", "urllib2", ".", "HTTPPasswordMgrWithDefaultRealm", "(", ")", "password_mgr", ".", "add_password", "(", "None", ",", "url", ",", "username", ",", "password", ")", "handler", "=", "urllib2", ".", "HTTPBasicAuthHandler", "(", "password_mgr", ")", "opener", "=", "urllib2", ".", "build_opener", "(", "handler", ")", "opener", ".", "open", "(", "url", ")", "urllib2", ".", "install_opener", "(", "opener", ")", "except", "urllib2", ".", "HTTPError", "as", "e1", ":", "logging", ".", "error", "(", "(", "(", "(", "'HTTPError: '", "+", "str", "(", "e1", ".", "reason", ")", ")", "+", "' '", ")", "+", "str", "(", "e1", ".", "code", ")", ")", ")", "except", "urllib2", ".", "URLError", "as", "e2", ":", "logging", ".", "error", "(", "(", "'URLError: '", "+", "str", "(", "e2", ".", "reason", ")", ")", ")", "except", "Exception", "as", "e3", ":", "logging", ".", "error", "(", "(", "'Error: '", "+", "str", "(", "e3", ".", "message", ")", ")", ")" ]
returns fetched url .
train
false
41,239
def init_subsystems(subsystem_types, options=None): for s in subsystem_types: if (not Subsystem.is_subsystem_type(s)): raise TypeError(u'{} is not a subclass of `Subsystem`'.format(s)) optionables = Subsystem.closure(subsystem_types) if options: allowed_scopes = {o.options_scope for o in optionables} for scope in options.keys(): if (scope not in allowed_scopes): raise ValueError(u'`{}` is not the scope of any of these subsystems: {}'.format(scope, optionables)) updated_options = (dict(Subsystem._options.items()) if Subsystem._options else {}) if options: updated_options.update(options) Subsystem.set_options(create_options_for_optionables(optionables, options=updated_options))
[ "def", "init_subsystems", "(", "subsystem_types", ",", "options", "=", "None", ")", ":", "for", "s", "in", "subsystem_types", ":", "if", "(", "not", "Subsystem", ".", "is_subsystem_type", "(", "s", ")", ")", ":", "raise", "TypeError", "(", "u'{} is not a subclass of `Subsystem`'", ".", "format", "(", "s", ")", ")", "optionables", "=", "Subsystem", ".", "closure", "(", "subsystem_types", ")", "if", "options", ":", "allowed_scopes", "=", "{", "o", ".", "options_scope", "for", "o", "in", "optionables", "}", "for", "scope", "in", "options", ".", "keys", "(", ")", ":", "if", "(", "scope", "not", "in", "allowed_scopes", ")", ":", "raise", "ValueError", "(", "u'`{}` is not the scope of any of these subsystems: {}'", ".", "format", "(", "scope", ",", "optionables", ")", ")", "updated_options", "=", "(", "dict", "(", "Subsystem", ".", "_options", ".", "items", "(", ")", ")", "if", "Subsystem", ".", "_options", "else", "{", "}", ")", "if", "options", ":", "updated_options", ".", "update", "(", "options", ")", "Subsystem", ".", "set_options", "(", "create_options_for_optionables", "(", "optionables", ",", "options", "=", "updated_options", ")", ")" ]
initialize subsystems for use in tests .
train
false
41,240
def test_tl_sample_wrong_X(): tl = TomekLinks(random_state=RND_SEED) tl.fit(X, Y) assert_raises(RuntimeError, tl.sample, np.random.random((100, 40)), np.array((([0] * 50) + ([1] * 50))))
[ "def", "test_tl_sample_wrong_X", "(", ")", ":", "tl", "=", "TomekLinks", "(", "random_state", "=", "RND_SEED", ")", "tl", ".", "fit", "(", "X", ",", "Y", ")", "assert_raises", "(", "RuntimeError", ",", "tl", ".", "sample", ",", "np", ".", "random", ".", "random", "(", "(", "100", ",", "40", ")", ")", ",", "np", ".", "array", "(", "(", "(", "[", "0", "]", "*", "50", ")", "+", "(", "[", "1", "]", "*", "50", ")", ")", ")", ")" ]
test either if an error is raised when x is different at fitting and sampling .
train
false
41,242
def _TestRemovePhotos(tester, user_cookie, request_dict): validator = tester.validator (user_id, device_id) = tester.GetIdsFromCookie(user_cookie) request_dict = deepcopy(request_dict) user = validator.GetModelObject(User, user_id) actual_dict = tester.SendRequest('remove_photos', user_cookie, request_dict) op_dict = tester._DeriveNotificationOpDict(user_id, device_id, request_dict) for request_ep in request_dict['episodes']: episode_id = request_ep['episode_id'] episode = validator.GetModelObject(Episode, episode_id) for photo_id in request_ep['photo_ids']: post = validator.GetModelObject(Post, DBKey(episode_id, photo_id)) if (not post.IsRemoved()): validator.ValidateUpdateDBObject(Post, episode_id=episode_id, photo_id=photo_id, labels=post.labels.combine().union([Post.REMOVED])) invalidate = {'episodes': [{'episode_id': request_ep['episode_id'], 'get_photos': True} for request_ep in request_dict['episodes']]} validator.ValidateNotification('remove_photos', user_id, op_dict, invalidate) validator.ValidateViewpointAccounting(user.private_vp_id) tester._CompareResponseDicts('remove_photos', user_id, request_dict, {}, actual_dict) return actual_dict
[ "def", "_TestRemovePhotos", "(", "tester", ",", "user_cookie", ",", "request_dict", ")", ":", "validator", "=", "tester", ".", "validator", "(", "user_id", ",", "device_id", ")", "=", "tester", ".", "GetIdsFromCookie", "(", "user_cookie", ")", "request_dict", "=", "deepcopy", "(", "request_dict", ")", "user", "=", "validator", ".", "GetModelObject", "(", "User", ",", "user_id", ")", "actual_dict", "=", "tester", ".", "SendRequest", "(", "'remove_photos'", ",", "user_cookie", ",", "request_dict", ")", "op_dict", "=", "tester", ".", "_DeriveNotificationOpDict", "(", "user_id", ",", "device_id", ",", "request_dict", ")", "for", "request_ep", "in", "request_dict", "[", "'episodes'", "]", ":", "episode_id", "=", "request_ep", "[", "'episode_id'", "]", "episode", "=", "validator", ".", "GetModelObject", "(", "Episode", ",", "episode_id", ")", "for", "photo_id", "in", "request_ep", "[", "'photo_ids'", "]", ":", "post", "=", "validator", ".", "GetModelObject", "(", "Post", ",", "DBKey", "(", "episode_id", ",", "photo_id", ")", ")", "if", "(", "not", "post", ".", "IsRemoved", "(", ")", ")", ":", "validator", ".", "ValidateUpdateDBObject", "(", "Post", ",", "episode_id", "=", "episode_id", ",", "photo_id", "=", "photo_id", ",", "labels", "=", "post", ".", "labels", ".", "combine", "(", ")", ".", "union", "(", "[", "Post", ".", "REMOVED", "]", ")", ")", "invalidate", "=", "{", "'episodes'", ":", "[", "{", "'episode_id'", ":", "request_ep", "[", "'episode_id'", "]", ",", "'get_photos'", ":", "True", "}", "for", "request_ep", "in", "request_dict", "[", "'episodes'", "]", "]", "}", "validator", ".", "ValidateNotification", "(", "'remove_photos'", ",", "user_id", ",", "op_dict", ",", "invalidate", ")", "validator", ".", "ValidateViewpointAccounting", "(", "user", ".", "private_vp_id", ")", "tester", ".", "_CompareResponseDicts", "(", "'remove_photos'", ",", "user_id", ",", "request_dict", ",", "{", "}", ",", "actual_dict", ")", "return", "actual_dict" ]
called by the servicetester in order to test remove_photos service api call .
train
false
41,243
def date_gen(start, end, trading_calendar, delta=timedelta(minutes=1), repeats=None): daily_delta = (not (delta.total_seconds() % timedelta(days=1).total_seconds())) cur = start if daily_delta: cur = cur.replace(hour=0, minute=0, second=0, microsecond=0) def advance_current(cur): '\n Advances the current dt skipping non market days and minutes.\n ' cur = (cur + delta) currently_executing = ((daily_delta and (cur in trading_calendar.all_sessions)) or trading_calendar.is_open_on_minute(cur)) if currently_executing: return cur elif daily_delta: return trading_calendar.minute_to_session_label(cur) else: return trading_calendar.open_and_close_for_session(trading_calendar.minute_to_session_label(cur))[0] while (cur < end): if repeats: for j in range(repeats): (yield cur) else: (yield cur) cur = advance_current(cur)
[ "def", "date_gen", "(", "start", ",", "end", ",", "trading_calendar", ",", "delta", "=", "timedelta", "(", "minutes", "=", "1", ")", ",", "repeats", "=", "None", ")", ":", "daily_delta", "=", "(", "not", "(", "delta", ".", "total_seconds", "(", ")", "%", "timedelta", "(", "days", "=", "1", ")", ".", "total_seconds", "(", ")", ")", ")", "cur", "=", "start", "if", "daily_delta", ":", "cur", "=", "cur", ".", "replace", "(", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ")", "def", "advance_current", "(", "cur", ")", ":", "cur", "=", "(", "cur", "+", "delta", ")", "currently_executing", "=", "(", "(", "daily_delta", "and", "(", "cur", "in", "trading_calendar", ".", "all_sessions", ")", ")", "or", "trading_calendar", ".", "is_open_on_minute", "(", "cur", ")", ")", "if", "currently_executing", ":", "return", "cur", "elif", "daily_delta", ":", "return", "trading_calendar", ".", "minute_to_session_label", "(", "cur", ")", "else", ":", "return", "trading_calendar", ".", "open_and_close_for_session", "(", "trading_calendar", ".", "minute_to_session_label", "(", "cur", ")", ")", "[", "0", "]", "while", "(", "cur", "<", "end", ")", ":", "if", "repeats", ":", "for", "j", "in", "range", "(", "repeats", ")", ":", "(", "yield", "cur", ")", "else", ":", "(", "yield", "cur", ")", "cur", "=", "advance_current", "(", "cur", ")" ]
utility to generate a stream of dates .
train
false
41,244
def hashtags(string): return [b for (a, b) in TWITTER_HASHTAG.findall(string)]
[ "def", "hashtags", "(", "string", ")", ":", "return", "[", "b", "for", "(", "a", ",", "b", ")", "in", "TWITTER_HASHTAG", ".", "findall", "(", "string", ")", "]" ]
returns a list of hashtags from a tweet .
train
false
41,245
def AddMonths(start_date, months): current_date = start_date i = 0 while (i < months): month_days = calendar.monthrange(current_date.year, current_date.month)[1] current_date += timedelta(days=month_days) i += 1 return current_date
[ "def", "AddMonths", "(", "start_date", ",", "months", ")", ":", "current_date", "=", "start_date", "i", "=", "0", "while", "(", "i", "<", "months", ")", ":", "month_days", "=", "calendar", ".", "monthrange", "(", "current_date", ".", "year", ",", "current_date", ".", "month", ")", "[", "1", "]", "current_date", "+=", "timedelta", "(", "days", "=", "month_days", ")", "i", "+=", "1", "return", "current_date" ]
a simple convenience utility for adding months to a given start date .
train
true
41,247
def validate_cluster_configuration(cluster_config): schema = {'$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'required': ['control_node', 'agent_nodes'], 'properties': {'control_node': {'type': 'string'}, 'agent_nodes': {'type': 'array', 'items': {'type': 'object', 'required': ['public', 'private'], 'properties': {'public': {'type': 'string'}, 'private': {'type': 'string'}}}}}, 'additionalProperties': 'true'} v = Draft4Validator(schema, format_checker=FormatChecker()) v.validate(cluster_config)
[ "def", "validate_cluster_configuration", "(", "cluster_config", ")", ":", "schema", "=", "{", "'$schema'", ":", "'http://json-schema.org/draft-04/schema#'", ",", "'type'", ":", "'object'", ",", "'required'", ":", "[", "'control_node'", ",", "'agent_nodes'", "]", ",", "'properties'", ":", "{", "'control_node'", ":", "{", "'type'", ":", "'string'", "}", ",", "'agent_nodes'", ":", "{", "'type'", ":", "'array'", ",", "'items'", ":", "{", "'type'", ":", "'object'", ",", "'required'", ":", "[", "'public'", ",", "'private'", "]", ",", "'properties'", ":", "{", "'public'", ":", "{", "'type'", ":", "'string'", "}", ",", "'private'", ":", "{", "'type'", ":", "'string'", "}", "}", "}", "}", "}", ",", "'additionalProperties'", ":", "'true'", "}", "v", "=", "Draft4Validator", "(", "schema", ",", "format_checker", "=", "FormatChecker", "(", ")", ")", "v", ".", "validate", "(", "cluster_config", ")" ]
validate a provided cluster configuration .
train
false
41,249
def _datetime_property(header): def getter(self): value = self.headers.get(header, None) if (value is not None): try: parts = parsedate(self.headers[header])[:7] return datetime(*(parts + (UTC,))) except Exception: return None def setter(self, value): if isinstance(value, ((float,) + six.integer_types)): self.headers[header] = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(value)) elif isinstance(value, datetime): self.headers[header] = value.strftime('%a, %d %b %Y %H:%M:%S GMT') else: self.headers[header] = value return property(getter, setter, doc=('Retrieve and set the %s header as a datetime, set it with a datetime, int, or str' % header))
[ "def", "_datetime_property", "(", "header", ")", ":", "def", "getter", "(", "self", ")", ":", "value", "=", "self", ".", "headers", ".", "get", "(", "header", ",", "None", ")", "if", "(", "value", "is", "not", "None", ")", ":", "try", ":", "parts", "=", "parsedate", "(", "self", ".", "headers", "[", "header", "]", ")", "[", ":", "7", "]", "return", "datetime", "(", "*", "(", "parts", "+", "(", "UTC", ",", ")", ")", ")", "except", "Exception", ":", "return", "None", "def", "setter", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "(", "float", ",", ")", "+", "six", ".", "integer_types", ")", ")", ":", "self", ".", "headers", "[", "header", "]", "=", "time", ".", "strftime", "(", "'%a, %d %b %Y %H:%M:%S GMT'", ",", "time", ".", "gmtime", "(", "value", ")", ")", "elif", "isinstance", "(", "value", ",", "datetime", ")", ":", "self", ".", "headers", "[", "header", "]", "=", "value", ".", "strftime", "(", "'%a, %d %b %Y %H:%M:%S GMT'", ")", "else", ":", "self", ".", "headers", "[", "header", "]", "=", "value", "return", "property", "(", "getter", ",", "setter", ",", "doc", "=", "(", "'Retrieve and set the %s header as a datetime, set it with a datetime, int, or str'", "%", "header", ")", ")" ]
set and retrieve the datetime value of self .
train
false
41,250
def handle_rss_api(output, kwargs): name = kwargs.get('keyword') if (not name): name = kwargs.get('name') if (not name): return None feed = config.get_config('rss', name) if feed: feed.set_dict(kwargs) else: config.ConfigRSS(name, kwargs) return name
[ "def", "handle_rss_api", "(", "output", ",", "kwargs", ")", ":", "name", "=", "kwargs", ".", "get", "(", "'keyword'", ")", "if", "(", "not", "name", ")", ":", "name", "=", "kwargs", ".", "get", "(", "'name'", ")", "if", "(", "not", "name", ")", ":", "return", "None", "feed", "=", "config", ".", "get_config", "(", "'rss'", ",", "name", ")", "if", "feed", ":", "feed", ".", "set_dict", "(", "kwargs", ")", "else", ":", "config", ".", "ConfigRSS", "(", "name", ",", "kwargs", ")", "return", "name" ]
special handler for api-call set_config [rss] .
train
false
41,253
def parse_xcu(raw, origin=u'%origin%'): ans = {} root = etree.fromstring(raw) for node in XPath(u'//prop[@oor:name="Format"]/value[text()="DICT_SPELL"]/../..')(root): value = XPath(u'descendant::prop[@oor:name="Locations"]/value')(node) if (len(value[0]) == 0): paths = u''.join(XPath(u'descendant::prop[@oor:name="Locations"]/value/text()')(node)).replace(u'%origin%', origin).split() else: paths = [c.text.replace(u'%origin%', origin) for v in value for c in v.iterchildren(u'*') if c.text] (aff, dic) = (paths if paths[0].endswith(u'.aff') else reversed(paths)) locales = u''.join(XPath(u'descendant::prop[@oor:name="Locales"]/value/text()')(node)).split() ans[(dic, aff)] = locales return ans
[ "def", "parse_xcu", "(", "raw", ",", "origin", "=", "u'%origin%'", ")", ":", "ans", "=", "{", "}", "root", "=", "etree", ".", "fromstring", "(", "raw", ")", "for", "node", "in", "XPath", "(", "u'//prop[@oor:name=\"Format\"]/value[text()=\"DICT_SPELL\"]/../..'", ")", "(", "root", ")", ":", "value", "=", "XPath", "(", "u'descendant::prop[@oor:name=\"Locations\"]/value'", ")", "(", "node", ")", "if", "(", "len", "(", "value", "[", "0", "]", ")", "==", "0", ")", ":", "paths", "=", "u''", ".", "join", "(", "XPath", "(", "u'descendant::prop[@oor:name=\"Locations\"]/value/text()'", ")", "(", "node", ")", ")", ".", "replace", "(", "u'%origin%'", ",", "origin", ")", ".", "split", "(", ")", "else", ":", "paths", "=", "[", "c", ".", "text", ".", "replace", "(", "u'%origin%'", ",", "origin", ")", "for", "v", "in", "value", "for", "c", "in", "v", ".", "iterchildren", "(", "u'*'", ")", "if", "c", ".", "text", "]", "(", "aff", ",", "dic", ")", "=", "(", "paths", "if", "paths", "[", "0", "]", ".", "endswith", "(", "u'.aff'", ")", "else", "reversed", "(", "paths", ")", ")", "locales", "=", "u''", ".", "join", "(", "XPath", "(", "u'descendant::prop[@oor:name=\"Locales\"]/value/text()'", ")", "(", "node", ")", ")", ".", "split", "(", ")", "ans", "[", "(", "dic", ",", "aff", ")", "]", "=", "locales", "return", "ans" ]
get the dictionary and affix file names as well as supported locales for each dictionary .
train
false
41,254
def request_config(hass, name, callback, description=None, description_image=None, submit_caption=None, fields=None, link_name=None, link_url=None, entity_picture=None): instance = _get_instance(hass) request_id = instance.request_config(name, callback, description, description_image, submit_caption, fields, link_name, link_url, entity_picture) _REQUESTS[request_id] = instance return request_id
[ "def", "request_config", "(", "hass", ",", "name", ",", "callback", ",", "description", "=", "None", ",", "description_image", "=", "None", ",", "submit_caption", "=", "None", ",", "fields", "=", "None", ",", "link_name", "=", "None", ",", "link_url", "=", "None", ",", "entity_picture", "=", "None", ")", ":", "instance", "=", "_get_instance", "(", "hass", ")", "request_id", "=", "instance", ".", "request_config", "(", "name", ",", "callback", ",", "description", ",", "description_image", ",", "submit_caption", ",", "fields", ",", "link_name", ",", "link_url", ",", "entity_picture", ")", "_REQUESTS", "[", "request_id", "]", "=", "instance", "return", "request_id" ]
create a new request for configuration .
train
false
41,255
def test_oss_init(): oss = OneSidedSelection(random_state=RND_SEED) assert_equal(oss.n_seeds_S, 1) assert_equal(oss.n_jobs, 1) assert_equal(oss.random_state, RND_SEED)
[ "def", "test_oss_init", "(", ")", ":", "oss", "=", "OneSidedSelection", "(", "random_state", "=", "RND_SEED", ")", "assert_equal", "(", "oss", ".", "n_seeds_S", ",", "1", ")", "assert_equal", "(", "oss", ".", "n_jobs", ",", "1", ")", "assert_equal", "(", "oss", ".", "random_state", ",", "RND_SEED", ")" ]
test the initialisation of the object .
train
false
41,256
def _filter_samples(sample_dirs, changed_files): result = [] for sample_dir in sample_dirs: for changed_file in changed_files: if changed_file.startswith(sample_dir): result.append(sample_dir) return list(set(result))
[ "def", "_filter_samples", "(", "sample_dirs", ",", "changed_files", ")", ":", "result", "=", "[", "]", "for", "sample_dir", "in", "sample_dirs", ":", "for", "changed_file", "in", "changed_files", ":", "if", "changed_file", ".", "startswith", "(", "sample_dir", ")", ":", "result", ".", "append", "(", "sample_dir", ")", "return", "list", "(", "set", "(", "result", ")", ")" ]
filers the list of sample directories to only include directories that contain files in the list of changed files .
train
false
41,257
def dump_publickey(type, pkey): bio = _new_mem_buf() if (type == FILETYPE_PEM): write_bio = _lib.PEM_write_bio_PUBKEY elif (type == FILETYPE_ASN1): write_bio = _lib.i2d_PUBKEY_bio else: raise ValueError('type argument must be FILETYPE_PEM or FILETYPE_ASN1') result_code = write_bio(bio, pkey._pkey) if (result_code != 1): _raise_current_error() return _bio_to_string(bio)
[ "def", "dump_publickey", "(", "type", ",", "pkey", ")", ":", "bio", "=", "_new_mem_buf", "(", ")", "if", "(", "type", "==", "FILETYPE_PEM", ")", ":", "write_bio", "=", "_lib", ".", "PEM_write_bio_PUBKEY", "elif", "(", "type", "==", "FILETYPE_ASN1", ")", ":", "write_bio", "=", "_lib", ".", "i2d_PUBKEY_bio", "else", ":", "raise", "ValueError", "(", "'type argument must be FILETYPE_PEM or FILETYPE_ASN1'", ")", "result_code", "=", "write_bio", "(", "bio", ",", "pkey", ".", "_pkey", ")", "if", "(", "result_code", "!=", "1", ")", ":", "_raise_current_error", "(", ")", "return", "_bio_to_string", "(", "bio", ")" ]
dump a public key to a buffer .
train
true
41,258
def get_version_string(): version = (u'%s.%s' % (VERSION[0], VERSION[1])) if (VERSION[2] or VERSION[3]): version += (u'.%s' % VERSION[2]) if VERSION[3]: version += (u'.%s' % VERSION[3]) if (VERSION[4] != u'final'): if (VERSION[4] == u'rc'): version += (u' RC%s' % VERSION[5]) else: version += (u' %s %s' % (VERSION[4], VERSION[5])) if (not is_release()): version += u' (dev)' return version
[ "def", "get_version_string", "(", ")", ":", "version", "=", "(", "u'%s.%s'", "%", "(", "VERSION", "[", "0", "]", ",", "VERSION", "[", "1", "]", ")", ")", "if", "(", "VERSION", "[", "2", "]", "or", "VERSION", "[", "3", "]", ")", ":", "version", "+=", "(", "u'.%s'", "%", "VERSION", "[", "2", "]", ")", "if", "VERSION", "[", "3", "]", ":", "version", "+=", "(", "u'.%s'", "%", "VERSION", "[", "3", "]", ")", "if", "(", "VERSION", "[", "4", "]", "!=", "u'final'", ")", ":", "if", "(", "VERSION", "[", "4", "]", "==", "u'rc'", ")", ":", "version", "+=", "(", "u' RC%s'", "%", "VERSION", "[", "5", "]", ")", "else", ":", "version", "+=", "(", "u' %s %s'", "%", "(", "VERSION", "[", "4", "]", ",", "VERSION", "[", "5", "]", ")", ")", "if", "(", "not", "is_release", "(", ")", ")", ":", "version", "+=", "u' (dev)'", "return", "version" ]
return the review board version as a human-readable string .
train
false
41,260
def isWiFiEnabled(): try: mContext = autoclass('android.content.Context') pythonActivity = autoclass('org.renpy.android.PythonService') wifiManager = cast('android.net.wifi.WifiManager', pythonActivity.mService.getSystemService(mContext.WIFI_SERVICE)) return wifiManager.isWifiEnabled() except Exception as e: return None
[ "def", "isWiFiEnabled", "(", ")", ":", "try", ":", "mContext", "=", "autoclass", "(", "'android.content.Context'", ")", "pythonActivity", "=", "autoclass", "(", "'org.renpy.android.PythonService'", ")", "wifiManager", "=", "cast", "(", "'android.net.wifi.WifiManager'", ",", "pythonActivity", ".", "mService", ".", "getSystemService", "(", "mContext", ".", "WIFI_SERVICE", ")", ")", "return", "wifiManager", ".", "isWifiEnabled", "(", ")", "except", "Exception", "as", "e", ":", "return", "None" ]
returns none if an error .
train
false
41,261
def symptom_database_connection_is_not_SQLite(): return ((CONF.database.connection is not None) and ('sqlite' in CONF.database.connection))
[ "def", "symptom_database_connection_is_not_SQLite", "(", ")", ":", "return", "(", "(", "CONF", ".", "database", ".", "connection", "is", "not", "None", ")", "and", "(", "'sqlite'", "in", "CONF", ".", "database", ".", "connection", ")", ")" ]
sqlite is not recommended for production deployments .
train
false
41,264
def _nonresident_page_regions(status_bytes, incore_mask, max_region_len=None): assert ((max_region_len is None) or (max_region_len > 0)) start = None for (i, x) in enumerate(status_bytes): in_core = (x & incore_mask) if (start is None): if (not in_core): start = i else: count = (i - start) if in_core: (yield (start, count)) start = None elif (max_region_len and (count >= max_region_len)): (yield (start, count)) start = i if (start is not None): (yield (start, (len(status_bytes) - start)))
[ "def", "_nonresident_page_regions", "(", "status_bytes", ",", "incore_mask", ",", "max_region_len", "=", "None", ")", ":", "assert", "(", "(", "max_region_len", "is", "None", ")", "or", "(", "max_region_len", ">", "0", ")", ")", "start", "=", "None", "for", "(", "i", ",", "x", ")", "in", "enumerate", "(", "status_bytes", ")", ":", "in_core", "=", "(", "x", "&", "incore_mask", ")", "if", "(", "start", "is", "None", ")", ":", "if", "(", "not", "in_core", ")", ":", "start", "=", "i", "else", ":", "count", "=", "(", "i", "-", "start", ")", "if", "in_core", ":", "(", "yield", "(", "start", ",", "count", ")", ")", "start", "=", "None", "elif", "(", "max_region_len", "and", "(", "count", ">=", "max_region_len", ")", ")", ":", "(", "yield", "(", "start", ",", "count", ")", ")", "start", "=", "i", "if", "(", "start", "is", "not", "None", ")", ":", "(", "yield", "(", "start", ",", "(", "len", "(", "status_bytes", ")", "-", "start", ")", ")", ")" ]
return pairs in ascending start_page order for each contiguous region of nonresident pages indicated by the mincore() status_bytes .
train
false
41,265
def read_obj(addr_space, types, member_list, vaddr): if (len(member_list) < 2): raise Exception(('Invalid type/member ' + str(member_list))) (offset, current_type) = get_obj_offset(types, member_list) return read_value(addr_space, current_type, (vaddr + offset))
[ "def", "read_obj", "(", "addr_space", ",", "types", ",", "member_list", ",", "vaddr", ")", ":", "if", "(", "len", "(", "member_list", ")", "<", "2", ")", ":", "raise", "Exception", "(", "(", "'Invalid type/member '", "+", "str", "(", "member_list", ")", ")", ")", "(", "offset", ",", "current_type", ")", "=", "get_obj_offset", "(", "types", ",", "member_list", ")", "return", "read_value", "(", "addr_space", ",", "current_type", ",", "(", "vaddr", "+", "offset", ")", ")" ]
read the low-level value for some complex types member .
train
false
41,267
def get_socket_address(host, port, ipv4_only=False): try: info = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_DGRAM) except socket.gaierror as e: try: if (not ipv4_only): info = socket.getaddrinfo(host, port, socket.AF_INET6, socket.SOCK_DGRAM) elif (host == 'localhost'): log.warning('Warning localhost seems undefined in your host file, using 127.0.0.1 instead') info = socket.getaddrinfo('127.0.0.1', port, socket.AF_INET, socket.SOCK_DGRAM) else: log.error('Error processing host %s and port %s: %s', host, port, e) return None except socket.gaierror as e: log.error('Error processing host %s and port %s: %s', host, port, e) return None sockaddr = info[0][(-1)] if ((info[0][0] == socket.AF_INET) and (not ipv4_only)): mapped_host = mapto_v6(sockaddr[0]) sockaddr = (mapped_host, sockaddr[1], 0, 0) return sockaddr
[ "def", "get_socket_address", "(", "host", ",", "port", ",", "ipv4_only", "=", "False", ")", ":", "try", ":", "info", "=", "socket", ".", "getaddrinfo", "(", "host", ",", "port", ",", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "except", "socket", ".", "gaierror", "as", "e", ":", "try", ":", "if", "(", "not", "ipv4_only", ")", ":", "info", "=", "socket", ".", "getaddrinfo", "(", "host", ",", "port", ",", "socket", ".", "AF_INET6", ",", "socket", ".", "SOCK_DGRAM", ")", "elif", "(", "host", "==", "'localhost'", ")", ":", "log", ".", "warning", "(", "'Warning localhost seems undefined in your host file, using 127.0.0.1 instead'", ")", "info", "=", "socket", ".", "getaddrinfo", "(", "'127.0.0.1'", ",", "port", ",", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "else", ":", "log", ".", "error", "(", "'Error processing host %s and port %s: %s'", ",", "host", ",", "port", ",", "e", ")", "return", "None", "except", "socket", ".", "gaierror", "as", "e", ":", "log", ".", "error", "(", "'Error processing host %s and port %s: %s'", ",", "host", ",", "port", ",", "e", ")", "return", "None", "sockaddr", "=", "info", "[", "0", "]", "[", "(", "-", "1", ")", "]", "if", "(", "(", "info", "[", "0", "]", "[", "0", "]", "==", "socket", ".", "AF_INET", ")", "and", "(", "not", "ipv4_only", ")", ")", ":", "mapped_host", "=", "mapto_v6", "(", "sockaddr", "[", "0", "]", ")", "sockaddr", "=", "(", "mapped_host", ",", "sockaddr", "[", "1", "]", ",", "0", ",", "0", ")", "return", "sockaddr" ]
gather informations to open the server socket .
train
false
41,268
def is_anime_in_show_list(): for show in sickrage.srCore.SHOWLIST: if show.is_anime: return True return False
[ "def", "is_anime_in_show_list", "(", ")", ":", "for", "show", "in", "sickrage", ".", "srCore", ".", "SHOWLIST", ":", "if", "show", ".", "is_anime", ":", "return", "True", "return", "False" ]
check if any shows in list contain anime :return: true if global showlist contains anime .
train
false
41,269
def require_backend(required_backend): def decorator(undecorated_object): @wraps(undecorated_object) def wrapper(*args, **kwargs): config = get_blockdevice_config() configured_backend = config.pop('backend') skipper = skipUnless((configured_backend == required_backend), 'The backend in the supplied configuration is not suitable for this test. Found: {!r}. Required: {!r}.'.format(configured_backend, required_backend)) decorated_object = skipper(undecorated_object) result = decorated_object(*args, **kwargs) return result return wrapper return decorator
[ "def", "require_backend", "(", "required_backend", ")", ":", "def", "decorator", "(", "undecorated_object", ")", ":", "@", "wraps", "(", "undecorated_object", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "config", "=", "get_blockdevice_config", "(", ")", "configured_backend", "=", "config", ".", "pop", "(", "'backend'", ")", "skipper", "=", "skipUnless", "(", "(", "configured_backend", "==", "required_backend", ")", ",", "'The backend in the supplied configuration is not suitable for this test. Found: {!r}. Required: {!r}.'", ".", "format", "(", "configured_backend", ",", "required_backend", ")", ")", "decorated_object", "=", "skipper", "(", "undecorated_object", ")", "result", "=", "decorated_object", "(", "*", "args", ",", "**", "kwargs", ")", "return", "result", "return", "wrapper", "return", "decorator" ]
raise skiptest unless the functional test configuration has required_backend .
train
false
41,270
def obj_python_attrs(msg_): if hasattr(msg_, '_fields'): for k in msg_._fields: (yield (k, getattr(msg_, k))) return base = getattr(msg_, '_base_attributes', []) opt = getattr(msg_, '_opt_attributes', []) for (k, v) in inspect.getmembers(msg_): if (k in opt): pass elif k.startswith('_'): continue elif callable(v): continue elif (k in base): continue elif hasattr(msg_.__class__, k): continue (yield (k, v))
[ "def", "obj_python_attrs", "(", "msg_", ")", ":", "if", "hasattr", "(", "msg_", ",", "'_fields'", ")", ":", "for", "k", "in", "msg_", ".", "_fields", ":", "(", "yield", "(", "k", ",", "getattr", "(", "msg_", ",", "k", ")", ")", ")", "return", "base", "=", "getattr", "(", "msg_", ",", "'_base_attributes'", ",", "[", "]", ")", "opt", "=", "getattr", "(", "msg_", ",", "'_opt_attributes'", ",", "[", "]", ")", "for", "(", "k", ",", "v", ")", "in", "inspect", ".", "getmembers", "(", "msg_", ")", ":", "if", "(", "k", "in", "opt", ")", ":", "pass", "elif", "k", ".", "startswith", "(", "'_'", ")", ":", "continue", "elif", "callable", "(", "v", ")", ":", "continue", "elif", "(", "k", "in", "base", ")", ":", "continue", "elif", "hasattr", "(", "msg_", ".", "__class__", ",", "k", ")", ":", "continue", "(", "yield", "(", "k", ",", "v", ")", ")" ]
iterate object attributes for stringify purposes .
train
true
41,271
def test_show_fixtures_and_execute_test(testdir): p = testdir.makepyfile('\n import pytest\n @pytest.fixture\n def arg():\n assert True\n def test_arg(arg):\n assert False\n ') result = testdir.runpytest('--setup-show', p) assert (result.ret == 1) result.stdout.fnmatch_lines(['*SETUP F arg*', '*test_arg (fixtures used: arg)F', '*TEARDOWN F arg*'])
[ "def", "test_show_fixtures_and_execute_test", "(", "testdir", ")", ":", "p", "=", "testdir", ".", "makepyfile", "(", "'\\n import pytest\\n @pytest.fixture\\n def arg():\\n assert True\\n def test_arg(arg):\\n assert False\\n '", ")", "result", "=", "testdir", ".", "runpytest", "(", "'--setup-show'", ",", "p", ")", "assert", "(", "result", ".", "ret", "==", "1", ")", "result", ".", "stdout", ".", "fnmatch_lines", "(", "[", "'*SETUP F arg*'", ",", "'*test_arg (fixtures used: arg)F'", ",", "'*TEARDOWN F arg*'", "]", ")" ]
verifies that setups are shown and tests are executed .
train
false
41,272
def register_instances(name, instances, region=None, key=None, keyid=None, profile=None): if (isinstance(instances, str) or isinstance(instances, six.text_type)): instances = [instances] conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: registered_instances = conn.register_instances(name, instances) except boto.exception.BotoServerError as error: log.warning(error) return False registered_instance_ids = [instance.id for instance in registered_instances] register_failures = set(instances).difference(set(registered_instance_ids)) if register_failures: log.warning('Instance(s): {0} not registered with ELB {1}.'.format(list(register_failures), name)) register_result = False else: register_result = True return register_result
[ "def", "register_instances", "(", "name", ",", "instances", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "isinstance", "(", "instances", ",", "str", ")", "or", "isinstance", "(", "instances", ",", "six", ".", "text_type", ")", ")", ":", "instances", "=", "[", "instances", "]", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "registered_instances", "=", "conn", ".", "register_instances", "(", "name", ",", "instances", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "error", ":", "log", ".", "warning", "(", "error", ")", "return", "False", "registered_instance_ids", "=", "[", "instance", ".", "id", "for", "instance", "in", "registered_instances", "]", "register_failures", "=", "set", "(", "instances", ")", ".", "difference", "(", "set", "(", "registered_instance_ids", ")", ")", "if", "register_failures", ":", "log", ".", "warning", "(", "'Instance(s): {0} not registered with ELB {1}.'", ".", "format", "(", "list", "(", "register_failures", ")", ",", "name", ")", ")", "register_result", "=", "False", "else", ":", "register_result", "=", "True", "return", "register_result" ]
register instances with an elb .
train
true
41,273
@with_setup(prepare_stdout) def test_output_with_successful_outline_colorless(): runner = Runner(feature_name('success_outline'), verbosity=3, no_color=True) runner.run() assert_stdout_lines(u'\nFeature: Successful Scenario Outline # tests/functional/output_features/success_outline/success_outline.feature:1\n As lettuce author # tests/functional/output_features/success_outline/success_outline.feature:2\n In order to finish the first release # tests/functional/output_features/success_outline/success_outline.feature:3\n I want to make scenario outlines work \u2665 # tests/functional/output_features/success_outline/success_outline.feature:4\n\n Scenario Outline: fill a web form # tests/functional/output_features/success_outline/success_outline.feature:6\n Given I open browser at "http://www.my-website.com/" # tests/functional/output_features/success_outline/success_outline_steps.py:21\n And click on "sign-up" # tests/functional/output_features/success_outline/success_outline_steps.py:25\n When I fill the field "username" with "<username>" # tests/functional/output_features/success_outline/success_outline_steps.py:29\n And I fill the field "password" with "<password>" # tests/functional/output_features/success_outline/success_outline_steps.py:29\n And I fill the field "password-confirm" with "<password>" # tests/functional/output_features/success_outline/success_outline_steps.py:29\n And I fill the field "email" with "<email>" # tests/functional/output_features/success_outline/success_outline_steps.py:29\n And I click "done" # tests/functional/output_features/success_outline/success_outline_steps.py:33\n Then I see the title of the page is "<title>" # tests/functional/output_features/success_outline/success_outline_steps.py:37\n\n Examples:\n | username | password | email | title |\n | john | doe-1234 | john@gmail.org | John \\| My Website |\n | mary | wee-9876 | mary@email.com | Mary \\| My Website |\n | foo | foo-bar | foo@bar.com | Foo \\| My Website |\n\n1 feature (1 passed)\n3 scenarios (3 passed)\n24 steps (24 passed)\n')
[ "@", "with_setup", "(", "prepare_stdout", ")", "def", "test_output_with_successful_outline_colorless", "(", ")", ":", "runner", "=", "Runner", "(", "feature_name", "(", "'success_outline'", ")", ",", "verbosity", "=", "3", ",", "no_color", "=", "True", ")", "runner", ".", "run", "(", ")", "assert_stdout_lines", "(", "u'\\nFeature: Successful Scenario Outline # tests/functional/output_features/success_outline/success_outline.feature:1\\n As lettuce author # tests/functional/output_features/success_outline/success_outline.feature:2\\n In order to finish the first release # tests/functional/output_features/success_outline/success_outline.feature:3\\n I want to make scenario outlines work \\u2665 # tests/functional/output_features/success_outline/success_outline.feature:4\\n\\n Scenario Outline: fill a web form # tests/functional/output_features/success_outline/success_outline.feature:6\\n Given I open browser at \"http://www.my-website.com/\" # tests/functional/output_features/success_outline/success_outline_steps.py:21\\n And click on \"sign-up\" # tests/functional/output_features/success_outline/success_outline_steps.py:25\\n When I fill the field \"username\" with \"<username>\" # tests/functional/output_features/success_outline/success_outline_steps.py:29\\n And I fill the field \"password\" with \"<password>\" # tests/functional/output_features/success_outline/success_outline_steps.py:29\\n And I fill the field \"password-confirm\" with \"<password>\" # tests/functional/output_features/success_outline/success_outline_steps.py:29\\n And I fill the field \"email\" with \"<email>\" # tests/functional/output_features/success_outline/success_outline_steps.py:29\\n And I click \"done\" # tests/functional/output_features/success_outline/success_outline_steps.py:33\\n Then I see the title of the page is \"<title>\" # tests/functional/output_features/success_outline/success_outline_steps.py:37\\n\\n Examples:\\n | username | password | email | title |\\n | john | doe-1234 | john@gmail.org | John \\\\| My Website |\\n | mary | wee-9876 | mary@email.com | Mary \\\\| My Website |\\n | foo | foo-bar | foo@bar.com | Foo \\\\| My Website |\\n\\n1 feature (1 passed)\\n3 scenarios (3 passed)\\n24 steps (24 passed)\\n'", ")" ]
with colorless output .
train
false
41,275
def check_topic_permissions(topic_name): pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) permissions_to_check = ['pubsub.topics.publish', 'pubsub.topics.update'] allowed_permissions = topic.check_iam_permissions(permissions_to_check) print 'Allowed permissions for topic {}: {}'.format(topic.name, allowed_permissions)
[ "def", "check_topic_permissions", "(", "topic_name", ")", ":", "pubsub_client", "=", "pubsub", ".", "Client", "(", ")", "topic", "=", "pubsub_client", ".", "topic", "(", "topic_name", ")", "permissions_to_check", "=", "[", "'pubsub.topics.publish'", ",", "'pubsub.topics.update'", "]", "allowed_permissions", "=", "topic", ".", "check_iam_permissions", "(", "permissions_to_check", ")", "print", "'Allowed permissions for topic {}: {}'", ".", "format", "(", "topic", ".", "name", ",", "allowed_permissions", ")" ]
checks to which permissions are available on the given topic .
train
false
41,276
def CreateRandomBytes(bytes, b64encode=False): if b64encode: sys.stdout.write(base64.b64encode(os.urandom(bytes))) else: sys.stdout.write(os.urandom(bytes))
[ "def", "CreateRandomBytes", "(", "bytes", ",", "b64encode", "=", "False", ")", ":", "if", "b64encode", ":", "sys", ".", "stdout", ".", "write", "(", "base64", ".", "b64encode", "(", "os", ".", "urandom", "(", "bytes", ")", ")", ")", "else", ":", "sys", ".", "stdout", ".", "write", "(", "os", ".", "urandom", "(", "bytes", ")", ")" ]
generates a string of random bytes .
train
false
41,277
def _broadcast_to_shape(context, builder, arrtype, arr, target_shape): shapes = cgutils.unpack_tuple(builder, arr.shape) strides = cgutils.unpack_tuple(builder, arr.strides) (shapes, strides) = _bc_adjust_dimension(context, builder, shapes, strides, target_shape) (shapes, strides) = _bc_adjust_shape_strides(context, builder, shapes, strides, target_shape) new_arrtype = arrtype.copy(ndim=len(target_shape), layout='A') new_arr = make_array(new_arrtype)(context, builder) repl = dict(shape=cgutils.pack_array(builder, shapes), strides=cgutils.pack_array(builder, strides)) cgutils.copy_struct(new_arr, arr, repl) return (new_arrtype, new_arr)
[ "def", "_broadcast_to_shape", "(", "context", ",", "builder", ",", "arrtype", ",", "arr", ",", "target_shape", ")", ":", "shapes", "=", "cgutils", ".", "unpack_tuple", "(", "builder", ",", "arr", ".", "shape", ")", "strides", "=", "cgutils", ".", "unpack_tuple", "(", "builder", ",", "arr", ".", "strides", ")", "(", "shapes", ",", "strides", ")", "=", "_bc_adjust_dimension", "(", "context", ",", "builder", ",", "shapes", ",", "strides", ",", "target_shape", ")", "(", "shapes", ",", "strides", ")", "=", "_bc_adjust_shape_strides", "(", "context", ",", "builder", ",", "shapes", ",", "strides", ",", "target_shape", ")", "new_arrtype", "=", "arrtype", ".", "copy", "(", "ndim", "=", "len", "(", "target_shape", ")", ",", "layout", "=", "'A'", ")", "new_arr", "=", "make_array", "(", "new_arrtype", ")", "(", "context", ",", "builder", ")", "repl", "=", "dict", "(", "shape", "=", "cgutils", ".", "pack_array", "(", "builder", ",", "shapes", ")", ",", "strides", "=", "cgutils", ".", "pack_array", "(", "builder", ",", "strides", ")", ")", "cgutils", ".", "copy_struct", "(", "new_arr", ",", "arr", ",", "repl", ")", "return", "(", "new_arrtype", ",", "new_arr", ")" ]
broadcast the given array to the target_shape .
train
false
41,278
def test_rgb_to_hsl_part_14(): assert (rgb_to_hsl(51, 51, 0) == (60, 100, 10)) assert (rgb_to_hsl(102, 102, 0) == (60, 100, 20)) assert (rgb_to_hsl(153, 153, 0) == (60, 100, 30)) assert (rgb_to_hsl(204, 204, 0) == (60, 100, 40)) assert (rgb_to_hsl(255, 255, 0) == (60, 100, 50)) assert (rgb_to_hsl(255, 255, 51) == (60, 100, 60)) assert (rgb_to_hsl(255, 255, 102) == (60, 100, 70)) assert (rgb_to_hsl(255, 255, 153) == (60, 100, 80)) assert (rgb_to_hsl(255, 255, 204) == (60, 100, 90))
[ "def", "test_rgb_to_hsl_part_14", "(", ")", ":", "assert", "(", "rgb_to_hsl", "(", "51", ",", "51", ",", "0", ")", "==", "(", "60", ",", "100", ",", "10", ")", ")", "assert", "(", "rgb_to_hsl", "(", "102", ",", "102", ",", "0", ")", "==", "(", "60", ",", "100", ",", "20", ")", ")", "assert", "(", "rgb_to_hsl", "(", "153", ",", "153", ",", "0", ")", "==", "(", "60", ",", "100", ",", "30", ")", ")", "assert", "(", "rgb_to_hsl", "(", "204", ",", "204", ",", "0", ")", "==", "(", "60", ",", "100", ",", "40", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "255", ",", "0", ")", "==", "(", "60", ",", "100", ",", "50", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "255", ",", "51", ")", "==", "(", "60", ",", "100", ",", "60", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "255", ",", "102", ")", "==", "(", "60", ",", "100", ",", "70", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "255", ",", "153", ")", "==", "(", "60", ",", "100", ",", "80", ")", ")", "assert", "(", "rgb_to_hsl", "(", "255", ",", "255", ",", "204", ")", "==", "(", "60", ",", "100", ",", "90", ")", ")" ]
test rgb to hsl color function .
train
false
41,279
def get_tile_coords_from_tuple(options, t): x = 0 y = 0 z = options.zoom_level n = 1 for i in t: if (i == 1): x += (2 ** (z - n)) elif (i == 2): y += (2 ** (z - n)) elif (i == 3): x += (2 ** (z - n)) y += (2 ** (z - n)) n += 1 return (x, y)
[ "def", "get_tile_coords_from_tuple", "(", "options", ",", "t", ")", ":", "x", "=", "0", "y", "=", "0", "z", "=", "options", ".", "zoom_level", "n", "=", "1", "for", "i", "in", "t", ":", "if", "(", "i", "==", "1", ")", ":", "x", "+=", "(", "2", "**", "(", "z", "-", "n", ")", ")", "elif", "(", "i", "==", "2", ")", ":", "y", "+=", "(", "2", "**", "(", "z", "-", "n", ")", ")", "elif", "(", "i", "==", "3", ")", ":", "x", "+=", "(", "2", "**", "(", "z", "-", "n", ")", ")", "y", "+=", "(", "2", "**", "(", "z", "-", "n", ")", ")", "n", "+=", "1", "return", "(", "x", ",", "y", ")" ]
gets a tuple of coords from get_tuple_coords and returns the number of tiles from the top left corner to this tile .
train
false
41,280
def create_cert_binding(name, site, hostheader='', ipaddress='*', port=443, sslflags=0): ret = {'name': name, 'changes': {}, 'comment': str(), 'result': None} binding_info = _get_binding_info(hostheader, ipaddress, port) current_cert_bindings = __salt__['win_iis.list_cert_bindings'](site) if (binding_info in current_cert_bindings): current_name = current_cert_bindings[binding_info]['certificatehash'] if (name == current_name): ret['comment'] = 'Certificate binding already present: {0}'.format(name) ret['result'] = True return ret ret['comment'] = 'Certificate binding already present with a different thumbprint: {0}'.format(current_name) ret['result'] = False elif __opts__['test']: ret['comment'] = 'Certificate binding will be created: {0}'.format(name) ret['changes'] = {'old': None, 'new': name} else: ret['comment'] = 'Created certificate binding: {0}'.format(name) ret['changes'] = {'old': None, 'new': name} ret['result'] = __salt__['win_iis.create_cert_binding'](name, site, hostheader, ipaddress, port, sslflags) return ret
[ "def", "create_cert_binding", "(", "name", ",", "site", ",", "hostheader", "=", "''", ",", "ipaddress", "=", "'*'", ",", "port", "=", "443", ",", "sslflags", "=", "0", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "str", "(", ")", ",", "'result'", ":", "None", "}", "binding_info", "=", "_get_binding_info", "(", "hostheader", ",", "ipaddress", ",", "port", ")", "current_cert_bindings", "=", "__salt__", "[", "'win_iis.list_cert_bindings'", "]", "(", "site", ")", "if", "(", "binding_info", "in", "current_cert_bindings", ")", ":", "current_name", "=", "current_cert_bindings", "[", "binding_info", "]", "[", "'certificatehash'", "]", "if", "(", "name", "==", "current_name", ")", ":", "ret", "[", "'comment'", "]", "=", "'Certificate binding already present: {0}'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "True", "return", "ret", "ret", "[", "'comment'", "]", "=", "'Certificate binding already present with a different thumbprint: {0}'", ".", "format", "(", "current_name", ")", "ret", "[", "'result'", "]", "=", "False", "elif", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Certificate binding will be created: {0}'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "None", ",", "'new'", ":", "name", "}", "else", ":", "ret", "[", "'comment'", "]", "=", "'Created certificate binding: {0}'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "None", ",", "'new'", ":", "name", "}", "ret", "[", "'result'", "]", "=", "__salt__", "[", "'win_iis.create_cert_binding'", "]", "(", "name", ",", "site", ",", "hostheader", ",", "ipaddress", ",", "port", ",", "sslflags", ")", "return", "ret" ]
assign a certificate to an iis binding .
train
true
41,281
def is_standard(action): boolean_actions = (_StoreConstAction, _StoreFalseAction, _StoreTrueAction) return ((not action.choices) and (not isinstance(action, _CountAction)) and (not isinstance(action, _HelpAction)) and (type(action) not in boolean_actions))
[ "def", "is_standard", "(", "action", ")", ":", "boolean_actions", "=", "(", "_StoreConstAction", ",", "_StoreFalseAction", ",", "_StoreTrueAction", ")", "return", "(", "(", "not", "action", ".", "choices", ")", "and", "(", "not", "isinstance", "(", "action", ",", "_CountAction", ")", ")", "and", "(", "not", "isinstance", "(", "action", ",", "_HelpAction", ")", ")", "and", "(", "type", "(", "action", ")", "not", "in", "boolean_actions", ")", ")" ]
actions which are general "store" instructions .
train
true
41,282
def recompute_svn_backend(): global Client global has_svn_backend Client = None has_svn_backend = False required_module = None for backend_path in settings.SVNTOOL_BACKENDS: try: mod = __import__(six.binary_type(backend_path), fromlist=[u'Client', u'has_svn_backend']) if ((not hasattr(mod, u'has_svn_backend')) or (not hasattr(mod, u'Client'))): logging.error(u'Attempted to load invalid SVN backend %s', backend_path) continue has_svn_backend = mod.has_svn_backend if (has_svn_backend or (not required_module)): SVNTool.dependencies[u'modules'] = [mod.Client.required_module] if has_svn_backend: logging.info(u'Using %s backend for SVN', backend_path) Client = mod.Client break except ImportError: logging.error(u'Unable to load SVN backend %s', backend_path, exc_info=1)
[ "def", "recompute_svn_backend", "(", ")", ":", "global", "Client", "global", "has_svn_backend", "Client", "=", "None", "has_svn_backend", "=", "False", "required_module", "=", "None", "for", "backend_path", "in", "settings", ".", "SVNTOOL_BACKENDS", ":", "try", ":", "mod", "=", "__import__", "(", "six", ".", "binary_type", "(", "backend_path", ")", ",", "fromlist", "=", "[", "u'Client'", ",", "u'has_svn_backend'", "]", ")", "if", "(", "(", "not", "hasattr", "(", "mod", ",", "u'has_svn_backend'", ")", ")", "or", "(", "not", "hasattr", "(", "mod", ",", "u'Client'", ")", ")", ")", ":", "logging", ".", "error", "(", "u'Attempted to load invalid SVN backend %s'", ",", "backend_path", ")", "continue", "has_svn_backend", "=", "mod", ".", "has_svn_backend", "if", "(", "has_svn_backend", "or", "(", "not", "required_module", ")", ")", ":", "SVNTool", ".", "dependencies", "[", "u'modules'", "]", "=", "[", "mod", ".", "Client", ".", "required_module", "]", "if", "has_svn_backend", ":", "logging", ".", "info", "(", "u'Using %s backend for SVN'", ",", "backend_path", ")", "Client", "=", "mod", ".", "Client", "break", "except", "ImportError", ":", "logging", ".", "error", "(", "u'Unable to load SVN backend %s'", ",", "backend_path", ",", "exc_info", "=", "1", ")" ]
recomputes the svntool client backend to use .
train
false
41,283
def shared_zeros(*shape): return theano.shared(numpy.zeros(shape, dtype=theano.config.floatX))
[ "def", "shared_zeros", "(", "*", "shape", ")", ":", "return", "theano", ".", "shared", "(", "numpy", ".", "zeros", "(", "shape", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", ")" ]
initialize a vector shared variable with zero elements .
train
false
41,284
def test_mro_bases(): class C(object, ): def __getattribute__(self, name): if (name == 'xyz'): return 'C' return super(C, self).__getattribute__(name) class C1(C, ): def __getattribute__(self, name): if (name == 'xyz'): return 'C1' return super(C1, self).__getattribute__(name) class A(object, ): pass class B(object, ): def __getattribute__(self, name): if (name == 'xyz'): return 'B' return super(B, self).__getattribute__(name) a = C1() AreEqual(a.xyz, 'C1') C1.__bases__ = (A, B) AreEqual(a.xyz, 'C1') del C1.__getattribute__ AreEqual(a.xyz, 'B')
[ "def", "test_mro_bases", "(", ")", ":", "class", "C", "(", "object", ",", ")", ":", "def", "__getattribute__", "(", "self", ",", "name", ")", ":", "if", "(", "name", "==", "'xyz'", ")", ":", "return", "'C'", "return", "super", "(", "C", ",", "self", ")", ".", "__getattribute__", "(", "name", ")", "class", "C1", "(", "C", ",", ")", ":", "def", "__getattribute__", "(", "self", ",", "name", ")", ":", "if", "(", "name", "==", "'xyz'", ")", ":", "return", "'C1'", "return", "super", "(", "C1", ",", "self", ")", ".", "__getattribute__", "(", "name", ")", "class", "A", "(", "object", ",", ")", ":", "pass", "class", "B", "(", "object", ",", ")", ":", "def", "__getattribute__", "(", "self", ",", "name", ")", ":", "if", "(", "name", "==", "'xyz'", ")", ":", "return", "'B'", "return", "super", "(", "B", ",", "self", ")", ".", "__getattribute__", "(", "name", ")", "a", "=", "C1", "(", ")", "AreEqual", "(", "a", ".", "xyz", ",", "'C1'", ")", "C1", ".", "__bases__", "=", "(", "A", ",", "B", ")", "AreEqual", "(", "a", ".", "xyz", ",", "'C1'", ")", "del", "C1", ".", "__getattribute__", "AreEqual", "(", "a", ".", "xyz", ",", "'B'", ")" ]
verify replacing base classes also updates mro .
train
false
41,287
@pytest.mark.django_db def test_data_store_bad(store0): with pytest.raises(IntegrityError): StoreData.objects.create()
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_data_store_bad", "(", "store0", ")", ":", "with", "pytest", ".", "raises", "(", "IntegrityError", ")", ":", "StoreData", ".", "objects", ".", "create", "(", ")" ]
test that you cant add a duplicate file extension .
train
false
41,288
def renameUnprocessedFolder(path, tag): for i in itertools.count(): if (i == 0): new_path = ('%s (%s)' % (path, tag)) else: new_path = ('%s (%s[%d])' % (path, tag, i)) if os.path.exists(new_path): i += 1 else: os.rename(path, new_path) return
[ "def", "renameUnprocessedFolder", "(", "path", ",", "tag", ")", ":", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "if", "(", "i", "==", "0", ")", ":", "new_path", "=", "(", "'%s (%s)'", "%", "(", "path", ",", "tag", ")", ")", "else", ":", "new_path", "=", "(", "'%s (%s[%d])'", "%", "(", "path", ",", "tag", ",", "i", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "new_path", ")", ":", "i", "+=", "1", "else", ":", "os", ".", "rename", "(", "path", ",", "new_path", ")", "return" ]
rename a unprocessed folder to a new unique name to indicate a certain status .
train
false
41,291
def BuildCampaignOperations(batch_job_helper, budget_operations, number_of_campaigns=1): budget_id = budget_operations[0]['operand']['budgetId'] campaign_operations = [{'xsi_type': 'CampaignOperation', 'operand': {'name': ('Batch Campaign #%s' % uuid.uuid4()), 'status': 'PAUSED', 'id': batch_job_helper.GetId(), 'advertisingChannelType': 'SEARCH', 'budget': {'budgetId': budget_id}, 'biddingStrategyConfiguration': {'biddingStrategyType': 'MANUAL_CPC'}}, 'operator': 'ADD'} for _ in range(number_of_campaigns)] return campaign_operations
[ "def", "BuildCampaignOperations", "(", "batch_job_helper", ",", "budget_operations", ",", "number_of_campaigns", "=", "1", ")", ":", "budget_id", "=", "budget_operations", "[", "0", "]", "[", "'operand'", "]", "[", "'budgetId'", "]", "campaign_operations", "=", "[", "{", "'xsi_type'", ":", "'CampaignOperation'", ",", "'operand'", ":", "{", "'name'", ":", "(", "'Batch Campaign #%s'", "%", "uuid", ".", "uuid4", "(", ")", ")", ",", "'status'", ":", "'PAUSED'", ",", "'id'", ":", "batch_job_helper", ".", "GetId", "(", ")", ",", "'advertisingChannelType'", ":", "'SEARCH'", ",", "'budget'", ":", "{", "'budgetId'", ":", "budget_id", "}", ",", "'biddingStrategyConfiguration'", ":", "{", "'biddingStrategyType'", ":", "'MANUAL_CPC'", "}", "}", ",", "'operator'", ":", "'ADD'", "}", "for", "_", "in", "range", "(", "number_of_campaigns", ")", "]", "return", "campaign_operations" ]
builds the operations needed to create a new campaign .
train
true
41,292
def find_bezier_t_intersecting_with_closedpath(bezier_point_at_t, inside_closedpath, t0=0.0, t1=1.0, tolerence=0.01): start = bezier_point_at_t(t0) end = bezier_point_at_t(t1) start_inside = inside_closedpath(start) end_inside = inside_closedpath(end) if (not xor(start_inside, end_inside)): raise ValueError('the segment does not seemed to intersect with the path') while 1: if ((((start[0] - end[0]) ** 2) + ((start[1] - end[1]) ** 2)) < (tolerence ** 2)): return (t0, t1) middle_t = (0.5 * (t0 + t1)) middle = bezier_point_at_t(middle_t) middle_inside = inside_closedpath(middle) if xor(start_inside, middle_inside): t1 = middle_t end = middle end_inside = middle_inside else: t0 = middle_t start = middle start_inside = middle_inside
[ "def", "find_bezier_t_intersecting_with_closedpath", "(", "bezier_point_at_t", ",", "inside_closedpath", ",", "t0", "=", "0.0", ",", "t1", "=", "1.0", ",", "tolerence", "=", "0.01", ")", ":", "start", "=", "bezier_point_at_t", "(", "t0", ")", "end", "=", "bezier_point_at_t", "(", "t1", ")", "start_inside", "=", "inside_closedpath", "(", "start", ")", "end_inside", "=", "inside_closedpath", "(", "end", ")", "if", "(", "not", "xor", "(", "start_inside", ",", "end_inside", ")", ")", ":", "raise", "ValueError", "(", "'the segment does not seemed to intersect with the path'", ")", "while", "1", ":", "if", "(", "(", "(", "(", "start", "[", "0", "]", "-", "end", "[", "0", "]", ")", "**", "2", ")", "+", "(", "(", "start", "[", "1", "]", "-", "end", "[", "1", "]", ")", "**", "2", ")", ")", "<", "(", "tolerence", "**", "2", ")", ")", ":", "return", "(", "t0", ",", "t1", ")", "middle_t", "=", "(", "0.5", "*", "(", "t0", "+", "t1", ")", ")", "middle", "=", "bezier_point_at_t", "(", "middle_t", ")", "middle_inside", "=", "inside_closedpath", "(", "middle", ")", "if", "xor", "(", "start_inside", ",", "middle_inside", ")", ":", "t1", "=", "middle_t", "end", "=", "middle", "end_inside", "=", "middle_inside", "else", ":", "t0", "=", "middle_t", "start", "=", "middle", "start_inside", "=", "middle_inside" ]
find a parameter t0 and t1 of the given bezier path which bounds the intersecting points with a provided closed path .
train
false
41,293
def get_event_id(url): match = re.search('{event_id:\\s(?P<event_id>\\d+),.*}', http.get(url).text) try: event_id = int(match.group('event_id')) except: raise PluginError('Failed to get event id from URL.') return event_id
[ "def", "get_event_id", "(", "url", ")", ":", "match", "=", "re", ".", "search", "(", "'{event_id:\\\\s(?P<event_id>\\\\d+),.*}'", ",", "http", ".", "get", "(", "url", ")", ".", "text", ")", "try", ":", "event_id", "=", "int", "(", "match", ".", "group", "(", "'event_id'", ")", ")", "except", ":", "raise", "PluginError", "(", "'Failed to get event id from URL.'", ")", "return", "event_id" ]
extract event id from talk html page .
train
false
41,295
def _get_terminal_size_windows(): try: from ctypes import windll, create_string_buffer h = windll.kernel32.GetStdHandle((-12)) csbi = create_string_buffer(22) res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) if res: (bufx, bufy, curx, cury, wattr, left, top, right, bottom, maxx, maxy) = struct.unpack('hhhhHhhhhhh', csbi.raw) sizex = ((right - left) + 1) sizey = ((bottom - top) + 1) return (sizex, sizey) except: pass
[ "def", "_get_terminal_size_windows", "(", ")", ":", "try", ":", "from", "ctypes", "import", "windll", ",", "create_string_buffer", "h", "=", "windll", ".", "kernel32", ".", "GetStdHandle", "(", "(", "-", "12", ")", ")", "csbi", "=", "create_string_buffer", "(", "22", ")", "res", "=", "windll", ".", "kernel32", ".", "GetConsoleScreenBufferInfo", "(", "h", ",", "csbi", ")", "if", "res", ":", "(", "bufx", ",", "bufy", ",", "curx", ",", "cury", ",", "wattr", ",", "left", ",", "top", ",", "right", ",", "bottom", ",", "maxx", ",", "maxy", ")", "=", "struct", ".", "unpack", "(", "'hhhhHhhhhhh'", ",", "csbi", ".", "raw", ")", "sizex", "=", "(", "(", "right", "-", "left", ")", "+", "1", ")", "sizey", "=", "(", "(", "bottom", "-", "top", ")", "+", "1", ")", "return", "(", "sizex", ",", "sizey", ")", "except", ":", "pass" ]
get terminal size on ms windows .
train
true
41,298
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
41,299
def _nova_to_osvif_routes(routes): return objects.route.RouteList(objects=[_nova_to_osvif_route(route) for route in routes])
[ "def", "_nova_to_osvif_routes", "(", "routes", ")", ":", "return", "objects", ".", "route", ".", "RouteList", "(", "objects", "=", "[", "_nova_to_osvif_route", "(", "route", ")", "for", "route", "in", "routes", "]", ")" ]
convert nova route list into os_vif object .
train
false
41,300
def get_all_vlanids_used(): LOG.debug(_('get_all_vlanids() called')) session = db.get_session() try: vlanids = session.query(l2network_models.VlanID).filter_by(vlan_used=True).all() return vlanids except exc.NoResultFound: return []
[ "def", "get_all_vlanids_used", "(", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'get_all_vlanids() called'", ")", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "vlanids", "=", "session", ".", "query", "(", "l2network_models", ".", "VlanID", ")", ".", "filter_by", "(", "vlan_used", "=", "True", ")", ".", "all", "(", ")", "return", "vlanids", "except", "exc", ".", "NoResultFound", ":", "return", "[", "]" ]
gets all the vlanids used .
train
false
41,301
def _entity_from_response_type(feature_type, results): detected_objects = [] if (feature_type == _FACE_ANNOTATIONS): detected_objects.extend((Face.from_api_repr(face) for face in results)) elif (feature_type == _IMAGE_PROPERTIES_ANNOTATION): return ImagePropertiesAnnotation.from_api_repr(results) elif (feature_type == _SAFE_SEARCH_ANNOTATION): return SafeSearchAnnotation.from_api_repr(results) else: for result in results: detected_objects.append(EntityAnnotation.from_api_repr(result)) return detected_objects
[ "def", "_entity_from_response_type", "(", "feature_type", ",", "results", ")", ":", "detected_objects", "=", "[", "]", "if", "(", "feature_type", "==", "_FACE_ANNOTATIONS", ")", ":", "detected_objects", ".", "extend", "(", "(", "Face", ".", "from_api_repr", "(", "face", ")", "for", "face", "in", "results", ")", ")", "elif", "(", "feature_type", "==", "_IMAGE_PROPERTIES_ANNOTATION", ")", ":", "return", "ImagePropertiesAnnotation", ".", "from_api_repr", "(", "results", ")", "elif", "(", "feature_type", "==", "_SAFE_SEARCH_ANNOTATION", ")", ":", "return", "SafeSearchAnnotation", ".", "from_api_repr", "(", "results", ")", "else", ":", "for", "result", "in", "results", ":", "detected_objects", ".", "append", "(", "EntityAnnotation", ".", "from_api_repr", "(", "result", ")", ")", "return", "detected_objects" ]
convert a json result to an entity type based on the feature .
train
false
41,302
def get_environ_proxies(url): get_proxy = (lambda k: (os.environ.get(k) or os.environ.get(k.upper()))) no_proxy = get_proxy('no_proxy') netloc = urlparse(url).netloc if no_proxy: no_proxy = no_proxy.split(',') for host in no_proxy: if (netloc.endswith(host) or netloc.split(':')[0].endswith(host)): return {} if proxy_bypass(netloc): return {} return getproxies()
[ "def", "get_environ_proxies", "(", "url", ")", ":", "get_proxy", "=", "(", "lambda", "k", ":", "(", "os", ".", "environ", ".", "get", "(", "k", ")", "or", "os", ".", "environ", ".", "get", "(", "k", ".", "upper", "(", ")", ")", ")", ")", "no_proxy", "=", "get_proxy", "(", "'no_proxy'", ")", "netloc", "=", "urlparse", "(", "url", ")", ".", "netloc", "if", "no_proxy", ":", "no_proxy", "=", "no_proxy", ".", "split", "(", "','", ")", "for", "host", "in", "no_proxy", ":", "if", "(", "netloc", ".", "endswith", "(", "host", ")", "or", "netloc", ".", "split", "(", "':'", ")", "[", "0", "]", ".", "endswith", "(", "host", ")", ")", ":", "return", "{", "}", "if", "proxy_bypass", "(", "netloc", ")", ":", "return", "{", "}", "return", "getproxies", "(", ")" ]
return a dict of environment proxies .
train
false
41,305
def _truncate(words, cutlength): stems = {} for word in words: stem = word[:cutlength] try: stems[stem].update([word]) except KeyError: stems[stem] = set([word]) return stems
[ "def", "_truncate", "(", "words", ",", "cutlength", ")", ":", "stems", "=", "{", "}", "for", "word", "in", "words", ":", "stem", "=", "word", "[", ":", "cutlength", "]", "try", ":", "stems", "[", "stem", "]", ".", "update", "(", "[", "word", "]", ")", "except", "KeyError", ":", "stems", "[", "stem", "]", "=", "set", "(", "[", "word", "]", ")", "return", "stems" ]
group words by stems defined by truncating them at given length .
train
false
41,306
def tooltip_helper(desc): tooltip = [] tooltip.append('<b>{name}</b>'.format(name=escape(desc.name))) if desc.project_name: tooltip[0] += ' (from {0})'.format(desc.project_name) if desc.description: tooltip.append('{0}'.format(escape(desc.description))) inputs_fmt = '<li>{name}</li>' if desc.inputs: inputs = ''.join((inputs_fmt.format(name=inp.name) for inp in desc.inputs)) tooltip.append('Inputs:<ul>{0}</ul>'.format(inputs)) else: tooltip.append('No inputs') if desc.outputs: outputs = ''.join((inputs_fmt.format(name=out.name) for out in desc.outputs)) tooltip.append('Outputs:<ul>{0}</ul>'.format(outputs)) else: tooltip.append('No outputs') return '<hr/>'.join(tooltip)
[ "def", "tooltip_helper", "(", "desc", ")", ":", "tooltip", "=", "[", "]", "tooltip", ".", "append", "(", "'<b>{name}</b>'", ".", "format", "(", "name", "=", "escape", "(", "desc", ".", "name", ")", ")", ")", "if", "desc", ".", "project_name", ":", "tooltip", "[", "0", "]", "+=", "' (from {0})'", ".", "format", "(", "desc", ".", "project_name", ")", "if", "desc", ".", "description", ":", "tooltip", ".", "append", "(", "'{0}'", ".", "format", "(", "escape", "(", "desc", ".", "description", ")", ")", ")", "inputs_fmt", "=", "'<li>{name}</li>'", "if", "desc", ".", "inputs", ":", "inputs", "=", "''", ".", "join", "(", "(", "inputs_fmt", ".", "format", "(", "name", "=", "inp", ".", "name", ")", "for", "inp", "in", "desc", ".", "inputs", ")", ")", "tooltip", ".", "append", "(", "'Inputs:<ul>{0}</ul>'", ".", "format", "(", "inputs", ")", ")", "else", ":", "tooltip", ".", "append", "(", "'No inputs'", ")", "if", "desc", ".", "outputs", ":", "outputs", "=", "''", ".", "join", "(", "(", "inputs_fmt", ".", "format", "(", "name", "=", "out", ".", "name", ")", "for", "out", "in", "desc", ".", "outputs", ")", ")", "tooltip", ".", "append", "(", "'Outputs:<ul>{0}</ul>'", ".", "format", "(", "outputs", ")", ")", "else", ":", "tooltip", ".", "append", "(", "'No outputs'", ")", "return", "'<hr/>'", ".", "join", "(", "tooltip", ")" ]
widget tooltip construction helper .
train
false
41,308
def cgconfig_exists(): return service_cgconfig_control('exists')
[ "def", "cgconfig_exists", "(", ")", ":", "return", "service_cgconfig_control", "(", "'exists'", ")" ]
check if cgconfig is available on the host or perhaps systemd is used .
train
false
41,309
def test_sample_wrong_X_dft_ratio(): cc = ClusterCentroids(random_state=RND_SEED) cc.fit(X, Y) assert_raises(RuntimeError, cc.sample, np.random.random((100, 40)), np.array((([0] * 50) + ([1] * 50))))
[ "def", "test_sample_wrong_X_dft_ratio", "(", ")", ":", "cc", "=", "ClusterCentroids", "(", "random_state", "=", "RND_SEED", ")", "cc", ".", "fit", "(", "X", ",", "Y", ")", "assert_raises", "(", "RuntimeError", ",", "cc", ".", "sample", ",", "np", ".", "random", ".", "random", "(", "(", "100", ",", "40", ")", ")", ",", "np", ".", "array", "(", "(", "(", "[", "0", "]", "*", "50", ")", "+", "(", "[", "1", "]", "*", "50", ")", ")", ")", ")" ]
test either if an error is raised when x is different at fitting and sampling without ratio .
train
false
41,310
def get_default_parser(usage='%prog [options] <start|stop|status>'): parser = OptionParser(usage=usage) parser.add_option('--debug', action='store_true', help='Run in the foreground, log to stdout') parser.add_option('--syslog', action='store_true', help='Write logs to syslog') parser.add_option('--nodaemon', action='store_true', help='Run in the foreground') parser.add_option('--profile', help='Record performance profile data to the given file') parser.add_option('--profiler', help='Specify the profiler to use') parser.add_option('--pidfile', default=None, help='Write pid to the given file') parser.add_option('--umask', default=None, help='Use the given umask when creating files') parser.add_option('--config', default=None, help='Use the given config file') parser.add_option('--whitelist', default=None, help='Use the given whitelist file') parser.add_option('--blacklist', default=None, help='Use the given blacklist file') parser.add_option('--logdir', default=None, help='Write logs in the given directory') parser.add_option('--instance', default='a', help='Manage a specific carbon instance') return parser
[ "def", "get_default_parser", "(", "usage", "=", "'%prog [options] <start|stop|status>'", ")", ":", "parser", "=", "OptionParser", "(", "usage", "=", "usage", ")", "parser", ".", "add_option", "(", "'--debug'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run in the foreground, log to stdout'", ")", "parser", ".", "add_option", "(", "'--syslog'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Write logs to syslog'", ")", "parser", ".", "add_option", "(", "'--nodaemon'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run in the foreground'", ")", "parser", ".", "add_option", "(", "'--profile'", ",", "help", "=", "'Record performance profile data to the given file'", ")", "parser", ".", "add_option", "(", "'--profiler'", ",", "help", "=", "'Specify the profiler to use'", ")", "parser", ".", "add_option", "(", "'--pidfile'", ",", "default", "=", "None", ",", "help", "=", "'Write pid to the given file'", ")", "parser", ".", "add_option", "(", "'--umask'", ",", "default", "=", "None", ",", "help", "=", "'Use the given umask when creating files'", ")", "parser", ".", "add_option", "(", "'--config'", ",", "default", "=", "None", ",", "help", "=", "'Use the given config file'", ")", "parser", ".", "add_option", "(", "'--whitelist'", ",", "default", "=", "None", ",", "help", "=", "'Use the given whitelist file'", ")", "parser", ".", "add_option", "(", "'--blacklist'", ",", "default", "=", "None", ",", "help", "=", "'Use the given blacklist file'", ")", "parser", ".", "add_option", "(", "'--logdir'", ",", "default", "=", "None", ",", "help", "=", "'Write logs in the given directory'", ")", "parser", ".", "add_option", "(", "'--instance'", ",", "default", "=", "'a'", ",", "help", "=", "'Manage a specific carbon instance'", ")", "return", "parser" ]
create a parser for command line options .
train
false
41,312
def query_tags(): return read_url(get_tags_url())
[ "def", "query_tags", "(", ")", ":", "return", "read_url", "(", "get_tags_url", "(", ")", ")" ]
hits the github api for repository tags and returns the data .
train
false
41,313
def set_account_ip(account_id, ip, date=None): if ip_address(ip).is_private: g.stats.simple_event('ip.private_ip_storage_prevented') g.stats.count_string('private_ip_storage_prevented', ip) return if (date is None): date = datetime.datetime.now(g.tz) m = Mutator(CONNECTION_POOL) m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL) m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL) m.send()
[ "def", "set_account_ip", "(", "account_id", ",", "ip", ",", "date", "=", "None", ")", ":", "if", "ip_address", "(", "ip", ")", ".", "is_private", ":", "g", ".", "stats", ".", "simple_event", "(", "'ip.private_ip_storage_prevented'", ")", "g", ".", "stats", ".", "count_string", "(", "'private_ip_storage_prevented'", ",", "ip", ")", "return", "if", "(", "date", "is", "None", ")", ":", "date", "=", "datetime", ".", "datetime", ".", "now", "(", "g", ".", "tz", ")", "m", "=", "Mutator", "(", "CONNECTION_POOL", ")", "m", ".", "insert", "(", "IPsByAccount", ".", "_cf", ",", "str", "(", "account_id", ")", ",", "{", "date", ":", "ip", "}", ",", "ttl", "=", "CF_TTL", ")", "m", ".", "insert", "(", "AccountsByIP", ".", "_cf", ",", "ip", ",", "{", "date", ":", "str", "(", "account_id", ")", "}", ",", "ttl", "=", "CF_TTL", ")", "m", ".", "send", "(", ")" ]
set an ip address as having accessed an account .
train
false
41,314
def native_path(path): if (_os.name != 'posix'): path = path.replace('/', _os.path.sep) return path
[ "def", "native_path", "(", "path", ")", ":", "if", "(", "_os", ".", "name", "!=", "'posix'", ")", ":", "path", "=", "path", ".", "replace", "(", "'/'", ",", "_os", ".", "path", ".", "sep", ")", "return", "path" ]
converts a path name from universal path notation to the operating system-specific format .
train
false
41,315
def build_dataset(n_samples=50, n_features=200, n_informative_features=10, n_targets=1): random_state = np.random.RandomState(0) if (n_targets > 1): w = random_state.randn(n_features, n_targets) else: w = random_state.randn(n_features) w[n_informative_features:] = 0.0 X = random_state.randn(n_samples, n_features) y = np.dot(X, w) X_test = random_state.randn(n_samples, n_features) y_test = np.dot(X_test, w) return (X, y, X_test, y_test)
[ "def", "build_dataset", "(", "n_samples", "=", "50", ",", "n_features", "=", "200", ",", "n_informative_features", "=", "10", ",", "n_targets", "=", "1", ")", ":", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "if", "(", "n_targets", ">", "1", ")", ":", "w", "=", "random_state", ".", "randn", "(", "n_features", ",", "n_targets", ")", "else", ":", "w", "=", "random_state", ".", "randn", "(", "n_features", ")", "w", "[", "n_informative_features", ":", "]", "=", "0.0", "X", "=", "random_state", ".", "randn", "(", "n_samples", ",", "n_features", ")", "y", "=", "np", ".", "dot", "(", "X", ",", "w", ")", "X_test", "=", "random_state", ".", "randn", "(", "n_samples", ",", "n_features", ")", "y_test", "=", "np", ".", "dot", "(", "X_test", ",", "w", ")", "return", "(", "X", ",", "y", ",", "X_test", ",", "y_test", ")" ]
build an ill-posed linear regression problem with many noisy features and comparatively few samples .
train
false
41,317
def t_KEGG_Enzyme(testfiles): for file in testfiles: fh = open(os.path.join('KEGG', file)) print((('Testing Bio.KEGG.Enzyme on ' + file) + '\n\n')) records = Enzyme.parse(fh) for (i, record) in enumerate(records): print(record) fh.seek(0) if (i == 0): print(Enzyme.read(fh)) else: try: print(Enzyme.read(fh)) assert False except ValueError as e: assert (str(e) == 'More than one record found in handle') print('\n') fh.close()
[ "def", "t_KEGG_Enzyme", "(", "testfiles", ")", ":", "for", "file", "in", "testfiles", ":", "fh", "=", "open", "(", "os", ".", "path", ".", "join", "(", "'KEGG'", ",", "file", ")", ")", "print", "(", "(", "(", "'Testing Bio.KEGG.Enzyme on '", "+", "file", ")", "+", "'\\n\\n'", ")", ")", "records", "=", "Enzyme", ".", "parse", "(", "fh", ")", "for", "(", "i", ",", "record", ")", "in", "enumerate", "(", "records", ")", ":", "print", "(", "record", ")", "fh", ".", "seek", "(", "0", ")", "if", "(", "i", "==", "0", ")", ":", "print", "(", "Enzyme", ".", "read", "(", "fh", ")", ")", "else", ":", "try", ":", "print", "(", "Enzyme", ".", "read", "(", "fh", ")", ")", "assert", "False", "except", "ValueError", "as", "e", ":", "assert", "(", "str", "(", "e", ")", "==", "'More than one record found in handle'", ")", "print", "(", "'\\n'", ")", "fh", ".", "close", "(", ")" ]
tests bio .
train
false
41,319
def allow_rasterization(draw): @contextmanager def with_rasterized(artist, renderer): if artist.get_rasterized(): renderer.start_rasterizing() if (artist.get_agg_filter() is not None): renderer.start_filter() try: (yield) finally: if (artist.get_agg_filter() is not None): renderer.stop_filter(artist.get_agg_filter()) if artist.get_rasterized(): renderer.stop_rasterizing() @wraps(draw) def draw_wrapper(artist, renderer, *args, **kwargs): with with_rasterized(artist, renderer): return draw(artist, renderer, *args, **kwargs) draw_wrapper._supports_rasterization = True return draw_wrapper
[ "def", "allow_rasterization", "(", "draw", ")", ":", "@", "contextmanager", "def", "with_rasterized", "(", "artist", ",", "renderer", ")", ":", "if", "artist", ".", "get_rasterized", "(", ")", ":", "renderer", ".", "start_rasterizing", "(", ")", "if", "(", "artist", ".", "get_agg_filter", "(", ")", "is", "not", "None", ")", ":", "renderer", ".", "start_filter", "(", ")", "try", ":", "(", "yield", ")", "finally", ":", "if", "(", "artist", ".", "get_agg_filter", "(", ")", "is", "not", "None", ")", ":", "renderer", ".", "stop_filter", "(", "artist", ".", "get_agg_filter", "(", ")", ")", "if", "artist", ".", "get_rasterized", "(", ")", ":", "renderer", ".", "stop_rasterizing", "(", ")", "@", "wraps", "(", "draw", ")", "def", "draw_wrapper", "(", "artist", ",", "renderer", ",", "*", "args", ",", "**", "kwargs", ")", ":", "with", "with_rasterized", "(", "artist", ",", "renderer", ")", ":", "return", "draw", "(", "artist", ",", "renderer", ",", "*", "args", ",", "**", "kwargs", ")", "draw_wrapper", ".", "_supports_rasterization", "=", "True", "return", "draw_wrapper" ]
decorator for artist .
train
false
41,320
def _sympify(a): return sympify(a, strict=True)
[ "def", "_sympify", "(", "a", ")", ":", "return", "sympify", "(", "a", ",", "strict", "=", "True", ")" ]
short version of sympify for internal usage for __add__ and __eq__ methods where it is ok to allow some things in the expression .
train
false
41,321
def appstr(app): return u'{0}:{1:#x}'.format((app.main or u'__main__'), id(app))
[ "def", "appstr", "(", "app", ")", ":", "return", "u'{0}:{1:#x}'", ".", "format", "(", "(", "app", ".", "main", "or", "u'__main__'", ")", ",", "id", "(", "app", ")", ")" ]
string used in __repr__ etc .
train
false
41,325
def one_click_unsubscribe_link(user_profile, endpoint): token = unsubscribe_token(user_profile) resource_path = ('accounts/unsubscribe/%s/%s' % (endpoint, token)) return ('%s/%s' % (user_profile.realm.uri.rstrip('/'), resource_path))
[ "def", "one_click_unsubscribe_link", "(", "user_profile", ",", "endpoint", ")", ":", "token", "=", "unsubscribe_token", "(", "user_profile", ")", "resource_path", "=", "(", "'accounts/unsubscribe/%s/%s'", "%", "(", "endpoint", ",", "token", ")", ")", "return", "(", "'%s/%s'", "%", "(", "user_profile", ".", "realm", ".", "uri", ".", "rstrip", "(", "'/'", ")", ",", "resource_path", ")", ")" ]
generate a unique link that a logged-out user can visit to unsubscribe from zulip e-mails without having to first log in .
train
false
41,326
@utils.arg('--zone', metavar='<zone>', default=None, help=_('Filters the list, returning only those hosts in the availability zone <zone>.')) def do_host_list(cs, args): columns = ['host_name', 'service', 'zone'] result = cs.hosts.list(args.zone) utils.print_list(result, columns)
[ "@", "utils", ".", "arg", "(", "'--zone'", ",", "metavar", "=", "'<zone>'", ",", "default", "=", "None", ",", "help", "=", "_", "(", "'Filters the list, returning only those hosts in the availability zone <zone>.'", ")", ")", "def", "do_host_list", "(", "cs", ",", "args", ")", ":", "columns", "=", "[", "'host_name'", ",", "'service'", ",", "'zone'", "]", "result", "=", "cs", ".", "hosts", ".", "list", "(", "args", ".", "zone", ")", "utils", ".", "print_list", "(", "result", ",", "columns", ")" ]
list all hosts by service .
train
false
41,327
def pair(address, key): if (not salt.utils.validate.net.mac(address)): raise CommandExecutionError('Invalid BD address passed to bluetooth.pair') try: int(key) except Exception: raise CommandExecutionError('bluetooth.pair requires a numerical key to be used') addy = address_() cmd = 'echo {0} | bluez-simple-agent {1} {2}'.format(_cmd_quote(addy['device']), _cmd_quote(address), _cmd_quote(key)) out = __salt__['cmd.run'](cmd, python_shell=True).splitlines() return out
[ "def", "pair", "(", "address", ",", "key", ")", ":", "if", "(", "not", "salt", ".", "utils", ".", "validate", ".", "net", ".", "mac", "(", "address", ")", ")", ":", "raise", "CommandExecutionError", "(", "'Invalid BD address passed to bluetooth.pair'", ")", "try", ":", "int", "(", "key", ")", "except", "Exception", ":", "raise", "CommandExecutionError", "(", "'bluetooth.pair requires a numerical key to be used'", ")", "addy", "=", "address_", "(", ")", "cmd", "=", "'echo {0} | bluez-simple-agent {1} {2}'", ".", "format", "(", "_cmd_quote", "(", "addy", "[", "'device'", "]", ")", ",", "_cmd_quote", "(", "address", ")", ",", "_cmd_quote", "(", "key", ")", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "True", ")", ".", "splitlines", "(", ")", "return", "out" ]
pair the bluetooth adapter with a device cli example: .
train
true