id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
21,594
def _DedupingEntityGenerator(cursor): seen = set() for row in cursor: (row_key, row_entity) = row[:2] encoded_row_key = str(row_key) if (encoded_row_key in seen): continue seen.add(encoded_row_key) entity = entity_pb.EntityProto(row_entity) datastore_stub_util.PrepareSpecialPropertiesForLoad(entity) (yield entity)
[ "def", "_DedupingEntityGenerator", "(", "cursor", ")", ":", "seen", "=", "set", "(", ")", "for", "row", "in", "cursor", ":", "(", "row_key", ",", "row_entity", ")", "=", "row", "[", ":", "2", "]", "encoded_row_key", "=", "str", "(", "row_key", ")", "if", "(", "encoded_row_key", "in", "seen", ")", ":", "continue", "seen", ".", "add", "(", "encoded_row_key", ")", "entity", "=", "entity_pb", ".", "EntityProto", "(", "row_entity", ")", "datastore_stub_util", ".", "PrepareSpecialPropertiesForLoad", "(", "entity", ")", "(", "yield", "entity", ")" ]
generator that removes duplicate entities from the results .
train
false
21,595
def find_sliced_simpler(track_or_chain): requirement = (lambda i: (getattr(i, 'playback_mode', None) == Live.SimplerDevice.PlaybackMode.slicing)) return find_instrument_meeting_requirement(requirement, track_or_chain)
[ "def", "find_sliced_simpler", "(", "track_or_chain", ")", ":", "requirement", "=", "(", "lambda", "i", ":", "(", "getattr", "(", "i", ",", "'playback_mode'", ",", "None", ")", "==", "Live", ".", "SimplerDevice", ".", "PlaybackMode", ".", "slicing", ")", ")", "return", "find_instrument_meeting_requirement", "(", "requirement", ",", "track_or_chain", ")" ]
looks up recursively for a sliced simpler device in the track .
train
false
21,596
def testsource(module, name): module = _normalize_module(module) tests = DocTestFinder().find(module) test = [t for t in tests if (t.name == name)] if (not test): raise ValueError(name, 'not found in tests') test = test[0] testsrc = script_from_examples(test.docstring) return testsrc
[ "def", "testsource", "(", "module", ",", "name", ")", ":", "module", "=", "_normalize_module", "(", "module", ")", "tests", "=", "DocTestFinder", "(", ")", ".", "find", "(", "module", ")", "test", "=", "[", "t", "for", "t", "in", "tests", "if", "(", "t", ".", "name", "==", "name", ")", "]", "if", "(", "not", "test", ")", ":", "raise", "ValueError", "(", "name", ",", "'not found in tests'", ")", "test", "=", "test", "[", "0", "]", "testsrc", "=", "script_from_examples", "(", "test", ".", "docstring", ")", "return", "testsrc" ]
extract the test sources from a doctest docstring as a script .
train
true
21,598
@app.route('/deny') def view_deny_page(): response = make_response() response.data = ANGRY_ASCII response.content_type = 'text/plain' return response
[ "@", "app", ".", "route", "(", "'/deny'", ")", "def", "view_deny_page", "(", ")", ":", "response", "=", "make_response", "(", ")", "response", ".", "data", "=", "ANGRY_ASCII", "response", ".", "content_type", "=", "'text/plain'", "return", "response" ]
simple html page .
train
false
21,599
def file_doesnt_endwith(test, endings): if (not isfile(test)): return False for e in endings: if test.endswith(e): return False return True
[ "def", "file_doesnt_endwith", "(", "test", ",", "endings", ")", ":", "if", "(", "not", "isfile", "(", "test", ")", ")", ":", "return", "False", "for", "e", "in", "endings", ":", "if", "test", ".", "endswith", "(", "e", ")", ":", "return", "False", "return", "True" ]
return true if test is a file and its name does not end with any of the strings listed in endings .
train
false
21,600
def delete_child_rows(rows, doctype): for p in list(set([r[1] for r in rows])): if p: frappe.db.sql(u'delete from `tab{0}` where parent=%s'.format(doctype), p)
[ "def", "delete_child_rows", "(", "rows", ",", "doctype", ")", ":", "for", "p", "in", "list", "(", "set", "(", "[", "r", "[", "1", "]", "for", "r", "in", "rows", "]", ")", ")", ":", "if", "p", ":", "frappe", ".", "db", ".", "sql", "(", "u'delete from `tab{0}` where parent=%s'", ".", "format", "(", "doctype", ")", ",", "p", ")" ]
delete child rows for all parents .
train
false
21,601
@pytest.mark.parametrize('attr, args, expected', [('days', [], 7), ('nanoseconds', [], 0), ('seconds', [], 0), ('total_seconds', [], 604800.0)]) def test_td_namespace(attr, args, expected): df = DataFrame({'span': [timedelta(7)]}) t = symbol('t', 'var * {span: timedelta}') expr = getattr(t.span.dt, attr)(*args) assert_series_equal(compute(expr, df), Series(expected, name=expr._name))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'attr, args, expected'", ",", "[", "(", "'days'", ",", "[", "]", ",", "7", ")", ",", "(", "'nanoseconds'", ",", "[", "]", ",", "0", ")", ",", "(", "'seconds'", ",", "[", "]", ",", "0", ")", ",", "(", "'total_seconds'", ",", "[", "]", ",", "604800.0", ")", "]", ")", "def", "test_td_namespace", "(", "attr", ",", "args", ",", "expected", ")", ":", "df", "=", "DataFrame", "(", "{", "'span'", ":", "[", "timedelta", "(", "7", ")", "]", "}", ")", "t", "=", "symbol", "(", "'t'", ",", "'var * {span: timedelta}'", ")", "expr", "=", "getattr", "(", "t", ".", "span", ".", "dt", ",", "attr", ")", "(", "*", "args", ")", "assert_series_equal", "(", "compute", "(", "expr", ",", "df", ")", ",", "Series", "(", "expected", ",", "name", "=", "expr", ".", "_name", ")", ")" ]
timedelta functions .
train
false
21,604
def jgetattr(data, attr, default=None): try: return getattr(data, attr, default) except py4j.protocol.Py4JJavaError: return default
[ "def", "jgetattr", "(", "data", ",", "attr", ",", "default", "=", "None", ")", ":", "try", ":", "return", "getattr", "(", "data", ",", "attr", ",", "default", ")", "except", "py4j", ".", "protocol", ".", "Py4JJavaError", ":", "return", "default" ]
sparks api doesnt properly implement the getattr interface .
train
false
21,605
def _TestListIdentities(tester, user_cookie, request_dict): validator = tester.validator (user_id, device_id) = tester.GetIdsFromCookie(user_cookie) request_dict = deepcopy(request_dict) actual_dict = tester.SendRequest('list_identities', user_cookie, request_dict) expected_dict = {'user_identities': []} predicate = (lambda ident: (ident.user_id == user_id)) for expected_ident in validator.QueryModelObjects(Identity, predicate=predicate): ident_dict = {'identity': expected_ident.key} if (expected_ident.authority is not None): ident_dict['authority'] = expected_ident.authority expected_dict['user_identities'].append(ident_dict) tester._CompareResponseDicts('list_identities', user_id, request_dict, expected_dict, actual_dict) return actual_dict
[ "def", "_TestListIdentities", "(", "tester", ",", "user_cookie", ",", "request_dict", ")", ":", "validator", "=", "tester", ".", "validator", "(", "user_id", ",", "device_id", ")", "=", "tester", ".", "GetIdsFromCookie", "(", "user_cookie", ")", "request_dict", "=", "deepcopy", "(", "request_dict", ")", "actual_dict", "=", "tester", ".", "SendRequest", "(", "'list_identities'", ",", "user_cookie", ",", "request_dict", ")", "expected_dict", "=", "{", "'user_identities'", ":", "[", "]", "}", "predicate", "=", "(", "lambda", "ident", ":", "(", "ident", ".", "user_id", "==", "user_id", ")", ")", "for", "expected_ident", "in", "validator", ".", "QueryModelObjects", "(", "Identity", ",", "predicate", "=", "predicate", ")", ":", "ident_dict", "=", "{", "'identity'", ":", "expected_ident", ".", "key", "}", "if", "(", "expected_ident", ".", "authority", "is", "not", "None", ")", ":", "ident_dict", "[", "'authority'", "]", "=", "expected_ident", ".", "authority", "expected_dict", "[", "'user_identities'", "]", ".", "append", "(", "ident_dict", ")", "tester", ".", "_CompareResponseDicts", "(", "'list_identities'", ",", "user_id", ",", "request_dict", ",", "expected_dict", ",", "actual_dict", ")", "return", "actual_dict" ]
called by the servicetester in order to test list_identities service api call .
train
false
21,606
def find_jar_path(): paths = [] jar_file = u'py4j{0}.jar'.format(__version__) paths.append(jar_file) paths.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), (u'../../../py4j-java/' + jar_file))) paths.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), (u'../share/py4j/' + jar_file))) paths.append((u'../../../current-release/' + jar_file)) paths.append(os.path.join(sys.prefix, (u'share/py4j/' + jar_file))) for path in paths: if os.path.exists(path): return path return u''
[ "def", "find_jar_path", "(", ")", ":", "paths", "=", "[", "]", "jar_file", "=", "u'py4j{0}.jar'", ".", "format", "(", "__version__", ")", "paths", ".", "append", "(", "jar_file", ")", "paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ",", "(", "u'../../../py4j-java/'", "+", "jar_file", ")", ")", ")", "paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ",", "(", "u'../share/py4j/'", "+", "jar_file", ")", ")", ")", "paths", ".", "append", "(", "(", "u'../../../current-release/'", "+", "jar_file", ")", ")", "paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "sys", ".", "prefix", ",", "(", "u'share/py4j/'", "+", "jar_file", ")", ")", ")", "for", "path", "in", "paths", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "path", "return", "u''" ]
tries to find the path where the py4j jar is located .
train
false
21,607
def take_using_weights(items, weights): assert isinstance(items, list) assert isinstance(weights, list) assert all((isinstance(i, int) for i in weights)) assert (len(items) == len(weights)) assert (len(items) > 0) already_taken = [0 for i in items] item_count = len(items) max_weight = max(weights) i = 0 while True: adding = True while adding: adding = False for (item_i, item, weight) in zip(range(item_count), items, weights): if (already_taken[item_i] < ((i * weight) / float(max_weight))): (yield item) already_taken[item_i] += 1 adding = True i += 1
[ "def", "take_using_weights", "(", "items", ",", "weights", ")", ":", "assert", "isinstance", "(", "items", ",", "list", ")", "assert", "isinstance", "(", "weights", ",", "list", ")", "assert", "all", "(", "(", "isinstance", "(", "i", ",", "int", ")", "for", "i", "in", "weights", ")", ")", "assert", "(", "len", "(", "items", ")", "==", "len", "(", "weights", ")", ")", "assert", "(", "len", "(", "items", ")", ">", "0", ")", "already_taken", "=", "[", "0", "for", "i", "in", "items", "]", "item_count", "=", "len", "(", "items", ")", "max_weight", "=", "max", "(", "weights", ")", "i", "=", "0", "while", "True", ":", "adding", "=", "True", "while", "adding", ":", "adding", "=", "False", "for", "(", "item_i", ",", "item", ",", "weight", ")", "in", "zip", "(", "range", "(", "item_count", ")", ",", "items", ",", "weights", ")", ":", "if", "(", "already_taken", "[", "item_i", "]", "<", "(", "(", "i", "*", "weight", ")", "/", "float", "(", "max_weight", ")", ")", ")", ":", "(", "yield", "item", ")", "already_taken", "[", "item_i", "]", "+=", "1", "adding", "=", "True", "i", "+=", "1" ]
generator that keeps yielding items from the items list .
train
true
21,608
def _connected_by_alternating_paths(G, matching, targets): return {v for v in G if _is_connected_by_alternating_path(G, v, matching, targets)}
[ "def", "_connected_by_alternating_paths", "(", "G", ",", "matching", ",", "targets", ")", ":", "return", "{", "v", "for", "v", "in", "G", "if", "_is_connected_by_alternating_path", "(", "G", ",", "v", ",", "matching", ",", "targets", ")", "}" ]
returns the set of vertices that are connected to one of the target vertices by an alternating path in g .
train
false
21,610
def args_to_dict(args): arg_re = re.compile('(\\w+)[:=](.*)$') dict = {} for arg in args: match = arg_re.match(arg) if match: dict[match.group(1).lower()] = match.group(2) else: logging.warning(("args_to_dict: argument '%s' doesn't match '%s' pattern. Ignored." % (arg, arg_re.pattern))) return dict
[ "def", "args_to_dict", "(", "args", ")", ":", "arg_re", "=", "re", ".", "compile", "(", "'(\\\\w+)[:=](.*)$'", ")", "dict", "=", "{", "}", "for", "arg", "in", "args", ":", "match", "=", "arg_re", ".", "match", "(", "arg", ")", "if", "match", ":", "dict", "[", "match", ".", "group", "(", "1", ")", ".", "lower", "(", ")", "]", "=", "match", ".", "group", "(", "2", ")", "else", ":", "logging", ".", "warning", "(", "(", "\"args_to_dict: argument '%s' doesn't match '%s' pattern. Ignored.\"", "%", "(", "arg", ",", "arg_re", ".", "pattern", ")", ")", ")", "return", "dict" ]
convert autoserv extra arguments in the form of key=val or key:val to a dictionary .
train
false
21,612
@handle_response_format @treeio_login_required def task_set_status(request, task_id, status_id, response_format='html'): task = get_object_or_404(Task, pk=task_id) if (not request.user.profile.has_permission(task, mode='x')): return user_denied(request, message="You don't have access to this Task") status = get_object_or_404(TaskStatus, pk=status_id) if (not request.user.profile.has_permission(status)): return user_denied(request, message="You don't have access to this Task Status") if (not (task.status == status)): task.status = status task.save() return task_view(request, task_id, response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "task_set_status", "(", "request", ",", "task_id", ",", "status_id", ",", "response_format", "=", "'html'", ")", ":", "task", "=", "get_object_or_404", "(", "Task", ",", "pk", "=", "task_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "task", ",", "mode", "=", "'x'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Task\"", ")", "status", "=", "get_object_or_404", "(", "TaskStatus", ",", "pk", "=", "status_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "status", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Task Status\"", ")", "if", "(", "not", "(", "task", ".", "status", "==", "status", ")", ")", ":", "task", ".", "status", "=", "status", "task", ".", "save", "(", ")", "return", "task_view", "(", "request", ",", "task_id", ",", "response_format", ")" ]
task quick set: status .
train
false
21,613
def cp(source, destination): mv(source, destination, _action=shutil.copy)
[ "def", "cp", "(", "source", ",", "destination", ")", ":", "mv", "(", "source", ",", "destination", ",", "_action", "=", "shutil", ".", "copy", ")" ]
copies the file system on the partition <from-minor> to partition <to-minor> .
train
false
21,614
def display_board(genome): print((('+-' + ('--' * len(genome))) + '+')) for row in range(len(genome)): elements = [] for genome_item in genome: if (genome_item == row): elements.append('Q') else: elements.append('.') print((('|' + ''.join(elements)) + '|')) print((('+-' + ('--' * len(genome))) + '+'))
[ "def", "display_board", "(", "genome", ")", ":", "print", "(", "(", "(", "'+-'", "+", "(", "'--'", "*", "len", "(", "genome", ")", ")", ")", "+", "'+'", ")", ")", "for", "row", "in", "range", "(", "len", "(", "genome", ")", ")", ":", "elements", "=", "[", "]", "for", "genome_item", "in", "genome", ":", "if", "(", "genome_item", "==", "row", ")", ":", "elements", ".", "append", "(", "'Q'", ")", "else", ":", "elements", ".", "append", "(", "'.'", ")", "print", "(", "(", "(", "'|'", "+", "''", ".", "join", "(", "elements", ")", ")", "+", "'|'", ")", ")", "print", "(", "(", "(", "'+-'", "+", "(", "'--'", "*", "len", "(", "genome", ")", ")", ")", "+", "'+'", ")", ")" ]
display a genome in the n-queens problem .
train
false
21,615
@not_implemented_for('directed') @not_implemented_for('multigraph') def within_inter_cluster(G, ebunch=None, delta=0.001, community='community'): if (delta <= 0): raise nx.NetworkXAlgorithmError('Delta must be greater than zero') def predict(u, v): Cu = _community(G, u, community) Cv = _community(G, v, community) if (Cu != Cv): return 0 cnbors = set(nx.common_neighbors(G, u, v)) within = set((w for w in cnbors if (_community(G, w, community) == Cu))) inter = (cnbors - within) return (len(within) / (len(inter) + delta)) return _apply_prediction(G, predict, ebunch)
[ "@", "not_implemented_for", "(", "'directed'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "within_inter_cluster", "(", "G", ",", "ebunch", "=", "None", ",", "delta", "=", "0.001", ",", "community", "=", "'community'", ")", ":", "if", "(", "delta", "<=", "0", ")", ":", "raise", "nx", ".", "NetworkXAlgorithmError", "(", "'Delta must be greater than zero'", ")", "def", "predict", "(", "u", ",", "v", ")", ":", "Cu", "=", "_community", "(", "G", ",", "u", ",", "community", ")", "Cv", "=", "_community", "(", "G", ",", "v", ",", "community", ")", "if", "(", "Cu", "!=", "Cv", ")", ":", "return", "0", "cnbors", "=", "set", "(", "nx", ".", "common_neighbors", "(", "G", ",", "u", ",", "v", ")", ")", "within", "=", "set", "(", "(", "w", "for", "w", "in", "cnbors", "if", "(", "_community", "(", "G", ",", "w", ",", "community", ")", "==", "Cu", ")", ")", ")", "inter", "=", "(", "cnbors", "-", "within", ")", "return", "(", "len", "(", "within", ")", "/", "(", "len", "(", "inter", ")", "+", "delta", ")", ")", "return", "_apply_prediction", "(", "G", ",", "predict", ",", "ebunch", ")" ]
compute the ratio of within- and inter-cluster common neighbors of all node pairs in ebunch .
train
false
21,620
def get_connection(backend=None, fail_silently=False, **kwds): path = (backend or settings.EMAIL_BACKEND) try: (mod_name, klass_name) = path.rsplit('.', 1) mod = import_module(mod_name) except ImportError as e: raise ImproperlyConfigured(('Error importing email backend module %s: "%s"' % (mod_name, e))) try: klass = getattr(mod, klass_name) except AttributeError: raise ImproperlyConfigured(('Module "%s" does not define a "%s" class' % (mod_name, klass_name))) return klass(fail_silently=fail_silently, **kwds)
[ "def", "get_connection", "(", "backend", "=", "None", ",", "fail_silently", "=", "False", ",", "**", "kwds", ")", ":", "path", "=", "(", "backend", "or", "settings", ".", "EMAIL_BACKEND", ")", "try", ":", "(", "mod_name", ",", "klass_name", ")", "=", "path", ".", "rsplit", "(", "'.'", ",", "1", ")", "mod", "=", "import_module", "(", "mod_name", ")", "except", "ImportError", "as", "e", ":", "raise", "ImproperlyConfigured", "(", "(", "'Error importing email backend module %s: \"%s\"'", "%", "(", "mod_name", ",", "e", ")", ")", ")", "try", ":", "klass", "=", "getattr", "(", "mod", ",", "klass_name", ")", "except", "AttributeError", ":", "raise", "ImproperlyConfigured", "(", "(", "'Module \"%s\" does not define a \"%s\" class'", "%", "(", "mod_name", ",", "klass_name", ")", ")", ")", "return", "klass", "(", "fail_silently", "=", "fail_silently", ",", "**", "kwds", ")" ]
return a boto connection for the service .
train
false
21,622
def build_denoiser(): status('Building denoiser...') if (not app_available('ghc')): status("GHC not installed, so cannot build the denoiser binary 'FlowgramAli_4frame'.\n") return cwd = getcwd() denoiser_dir = join(cwd, 'qiime/support_files/denoiser/FlowgramAlignment') try: chdir(denoiser_dir) if (not system_call('make clean', 'clean denoiser build directory')): return if (not system_call('make', 'build denoiser')): return status('Denoiser built.\n') finally: chdir(cwd)
[ "def", "build_denoiser", "(", ")", ":", "status", "(", "'Building denoiser...'", ")", "if", "(", "not", "app_available", "(", "'ghc'", ")", ")", ":", "status", "(", "\"GHC not installed, so cannot build the denoiser binary 'FlowgramAli_4frame'.\\n\"", ")", "return", "cwd", "=", "getcwd", "(", ")", "denoiser_dir", "=", "join", "(", "cwd", ",", "'qiime/support_files/denoiser/FlowgramAlignment'", ")", "try", ":", "chdir", "(", "denoiser_dir", ")", "if", "(", "not", "system_call", "(", "'make clean'", ",", "'clean denoiser build directory'", ")", ")", ":", "return", "if", "(", "not", "system_call", "(", "'make'", ",", "'build denoiser'", ")", ")", ":", "return", "status", "(", "'Denoiser built.\\n'", ")", "finally", ":", "chdir", "(", "cwd", ")" ]
build the denoiser code binary .
train
false
21,625
def pop_connection(): return _connection_stack.pop()
[ "def", "pop_connection", "(", ")", ":", "return", "_connection_stack", ".", "pop", "(", ")" ]
pops the topmost connection from the stack .
train
false
21,627
def _other_partitions(verified_partitions, exclude_partitions, course_key): results = [] partition_by_id = {p.id: p for p in verified_partitions} other_partition_ids = (set((p.id for p in verified_partitions)) - set((p.id for p in exclude_partitions))) for pid in other_partition_ids: partition = partition_by_id[pid] results.append(UserPartition(id=partition.id, name=partition.name, description=partition.description, scheme=partition.scheme, parameters=partition.parameters, groups=partition.groups, active=False)) log.info('Disabled partition %s in course %s because the associated in-course-reverification checkpoint does not exist.', partition.id, course_key) return results
[ "def", "_other_partitions", "(", "verified_partitions", ",", "exclude_partitions", ",", "course_key", ")", ":", "results", "=", "[", "]", "partition_by_id", "=", "{", "p", ".", "id", ":", "p", "for", "p", "in", "verified_partitions", "}", "other_partition_ids", "=", "(", "set", "(", "(", "p", ".", "id", "for", "p", "in", "verified_partitions", ")", ")", "-", "set", "(", "(", "p", ".", "id", "for", "p", "in", "exclude_partitions", ")", ")", ")", "for", "pid", "in", "other_partition_ids", ":", "partition", "=", "partition_by_id", "[", "pid", "]", "results", ".", "append", "(", "UserPartition", "(", "id", "=", "partition", ".", "id", ",", "name", "=", "partition", ".", "name", ",", "description", "=", "partition", ".", "description", ",", "scheme", "=", "partition", ".", "scheme", ",", "parameters", "=", "partition", ".", "parameters", ",", "groups", "=", "partition", ".", "groups", ",", "active", "=", "False", ")", ")", "log", ".", "info", "(", "'Disabled partition %s in course %s because the associated in-course-reverification checkpoint does not exist.'", ",", "partition", ".", "id", ",", "course_key", ")", "return", "results" ]
retrieve all partitions not associated with the current set of icrv blocks .
train
false
21,628
def log_cef(name, severity, env, *args, **kwargs): c = {'cef.product': getattr(settings, 'CEF_PRODUCT', 'AMO'), 'cef.vendor': getattr(settings, 'CEF_VENDOR', 'Mozilla'), 'cef.version': getattr(settings, 'CEF_VERSION', '0'), 'cef.device_version': getattr(settings, 'CEF_DEVICE_VERSION', '0'), 'cef.file': getattr(settings, 'CEF_FILE', 'syslog')} if isinstance(env, HttpRequest): r = env.META.copy() if ('PATH_INFO' in r): r['PATH_INFO'] = env.build_absolute_uri(r['PATH_INFO']) elif isinstance(env, dict): r = env else: r = {} if settings.USE_HEKA_FOR_CEF: return settings.HEKA.cef(name, severity, r, config=c, *args, **kwargs) else: return _log_cef(name, severity, r, config=c, *args, **kwargs)
[ "def", "log_cef", "(", "name", ",", "severity", ",", "env", ",", "*", "args", ",", "**", "kwargs", ")", ":", "c", "=", "{", "'cef.product'", ":", "getattr", "(", "settings", ",", "'CEF_PRODUCT'", ",", "'AMO'", ")", ",", "'cef.vendor'", ":", "getattr", "(", "settings", ",", "'CEF_VENDOR'", ",", "'Mozilla'", ")", ",", "'cef.version'", ":", "getattr", "(", "settings", ",", "'CEF_VERSION'", ",", "'0'", ")", ",", "'cef.device_version'", ":", "getattr", "(", "settings", ",", "'CEF_DEVICE_VERSION'", ",", "'0'", ")", ",", "'cef.file'", ":", "getattr", "(", "settings", ",", "'CEF_FILE'", ",", "'syslog'", ")", "}", "if", "isinstance", "(", "env", ",", "HttpRequest", ")", ":", "r", "=", "env", ".", "META", ".", "copy", "(", ")", "if", "(", "'PATH_INFO'", "in", "r", ")", ":", "r", "[", "'PATH_INFO'", "]", "=", "env", ".", "build_absolute_uri", "(", "r", "[", "'PATH_INFO'", "]", ")", "elif", "isinstance", "(", "env", ",", "dict", ")", ":", "r", "=", "env", "else", ":", "r", "=", "{", "}", "if", "settings", ".", "USE_HEKA_FOR_CEF", ":", "return", "settings", ".", "HEKA", ".", "cef", "(", "name", ",", "severity", ",", "r", ",", "config", "=", "c", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "_log_cef", "(", "name", ",", "severity", ",", "r", ",", "config", "=", "c", ",", "*", "args", ",", "**", "kwargs", ")" ]
simply wraps the cef_log function so we dont need to pass in the config dictionary every time .
train
false
21,629
def listObjs(regex='Q', typ=None): if (typ is not None): return [x for x in gc.get_objects() if isinstance(x, typ)] else: return [x for x in gc.get_objects() if re.match(regex, type(x).__name__)]
[ "def", "listObjs", "(", "regex", "=", "'Q'", ",", "typ", "=", "None", ")", ":", "if", "(", "typ", "is", "not", "None", ")", ":", "return", "[", "x", "for", "x", "in", "gc", ".", "get_objects", "(", ")", "if", "isinstance", "(", "x", ",", "typ", ")", "]", "else", ":", "return", "[", "x", "for", "x", "in", "gc", ".", "get_objects", "(", ")", "if", "re", ".", "match", "(", "regex", ",", "type", "(", "x", ")", ".", "__name__", ")", "]" ]
list all objects managed by python gc with class name matching regex .
train
false
21,631
def getInBetweenLoopsFromLoops(loops, radius): inBetweenLoops = [] for loop in loops: inBetweenLoop = [] for pointIndex in xrange(len(loop)): pointBegin = loop[pointIndex] pointEnd = loop[((pointIndex + 1) % len(loop))] intercircle.addPointsFromSegment(pointBegin, pointEnd, inBetweenLoop, radius) inBetweenLoops.append(inBetweenLoop) return inBetweenLoops
[ "def", "getInBetweenLoopsFromLoops", "(", "loops", ",", "radius", ")", ":", "inBetweenLoops", "=", "[", "]", "for", "loop", "in", "loops", ":", "inBetweenLoop", "=", "[", "]", "for", "pointIndex", "in", "xrange", "(", "len", "(", "loop", ")", ")", ":", "pointBegin", "=", "loop", "[", "pointIndex", "]", "pointEnd", "=", "loop", "[", "(", "(", "pointIndex", "+", "1", ")", "%", "len", "(", "loop", ")", ")", "]", "intercircle", ".", "addPointsFromSegment", "(", "pointBegin", ",", "pointEnd", ",", "inBetweenLoop", ",", "radius", ")", "inBetweenLoops", ".", "append", "(", "inBetweenLoop", ")", "return", "inBetweenLoops" ]
get the in between loops from loops .
train
false
21,632
def func_dump(func): code = marshal.dumps(func.__code__).decode('raw_unicode_escape') defaults = func.__defaults__ if func.__closure__: closure = tuple((c.cell_contents for c in func.__closure__)) else: closure = None return (code, defaults, closure)
[ "def", "func_dump", "(", "func", ")", ":", "code", "=", "marshal", ".", "dumps", "(", "func", ".", "__code__", ")", ".", "decode", "(", "'raw_unicode_escape'", ")", "defaults", "=", "func", ".", "__defaults__", "if", "func", ".", "__closure__", ":", "closure", "=", "tuple", "(", "(", "c", ".", "cell_contents", "for", "c", "in", "func", ".", "__closure__", ")", ")", "else", ":", "closure", "=", "None", "return", "(", "code", ",", "defaults", ",", "closure", ")" ]
serializes a user defined function .
train
true
21,633
def test_roles_decorator_by_itself(): @roles('r1') def command(): pass eq_hosts(command, ['a', 'b'], env={'roledefs': fake_roles}) eq_effective_roles(command, ['r1'], env={'roledefs': fake_roles})
[ "def", "test_roles_decorator_by_itself", "(", ")", ":", "@", "roles", "(", "'r1'", ")", "def", "command", "(", ")", ":", "pass", "eq_hosts", "(", "command", ",", "[", "'a'", ",", "'b'", "]", ",", "env", "=", "{", "'roledefs'", ":", "fake_roles", "}", ")", "eq_effective_roles", "(", "command", ",", "[", "'r1'", "]", ",", "env", "=", "{", "'roledefs'", ":", "fake_roles", "}", ")" ]
use of @roles only .
train
false
21,634
@register.inclusion_tag('authority/permission_delete_link.html', takes_context=True) def permission_delete_link(context, perm): user = context['request'].user if user.is_authenticated(): if (user.has_perm('authority.delete_foreign_permissions') or (user.pk == perm.creator.pk)): return base_link(context, perm, 'authority-delete-permission') return {'url': None}
[ "@", "register", ".", "inclusion_tag", "(", "'authority/permission_delete_link.html'", ",", "takes_context", "=", "True", ")", "def", "permission_delete_link", "(", "context", ",", "perm", ")", ":", "user", "=", "context", "[", "'request'", "]", ".", "user", "if", "user", ".", "is_authenticated", "(", ")", ":", "if", "(", "user", ".", "has_perm", "(", "'authority.delete_foreign_permissions'", ")", "or", "(", "user", ".", "pk", "==", "perm", ".", "creator", ".", "pk", ")", ")", ":", "return", "base_link", "(", "context", ",", "perm", ",", "'authority-delete-permission'", ")", "return", "{", "'url'", ":", "None", "}" ]
renders a html link to the delete view of the given permission .
train
false
21,635
def _get_legen_der(xx, n_coeff=100): coeffs = np.empty((len(xx), n_coeff, 3)) for (c, x) in zip(coeffs, xx): (p0s, p0ds, p0dds) = (c[:, 0], c[:, 1], c[:, 2]) p0s[:2] = [1.0, x] p0ds[:2] = [0.0, 1.0] p0dds[:2] = [0.0, 0.0] for n in range(2, n_coeff): (p0s[n], p0ds[n], p0dds[n]) = _next_legen_der(n, x, p0s[(n - 1)], p0s[(n - 2)], p0ds[(n - 1)], p0dds[(n - 1)]) return coeffs
[ "def", "_get_legen_der", "(", "xx", ",", "n_coeff", "=", "100", ")", ":", "coeffs", "=", "np", ".", "empty", "(", "(", "len", "(", "xx", ")", ",", "n_coeff", ",", "3", ")", ")", "for", "(", "c", ",", "x", ")", "in", "zip", "(", "coeffs", ",", "xx", ")", ":", "(", "p0s", ",", "p0ds", ",", "p0dds", ")", "=", "(", "c", "[", ":", ",", "0", "]", ",", "c", "[", ":", ",", "1", "]", ",", "c", "[", ":", ",", "2", "]", ")", "p0s", "[", ":", "2", "]", "=", "[", "1.0", ",", "x", "]", "p0ds", "[", ":", "2", "]", "=", "[", "0.0", ",", "1.0", "]", "p0dds", "[", ":", "2", "]", "=", "[", "0.0", ",", "0.0", "]", "for", "n", "in", "range", "(", "2", ",", "n_coeff", ")", ":", "(", "p0s", "[", "n", "]", ",", "p0ds", "[", "n", "]", ",", "p0dds", "[", "n", "]", ")", "=", "_next_legen_der", "(", "n", ",", "x", ",", "p0s", "[", "(", "n", "-", "1", ")", "]", ",", "p0s", "[", "(", "n", "-", "2", ")", "]", ",", "p0ds", "[", "(", "n", "-", "1", ")", "]", ",", "p0dds", "[", "(", "n", "-", "1", ")", "]", ")", "return", "coeffs" ]
get legendre polynomial derivatives expanded about x .
train
false
21,636
def test_redundant(): info = create_info(ch_names=['a', 'b', 'c'], sfreq=1000.0, ch_types=None) assert_equal(info['ch_names'][0], 'a') assert_equal(info['ch_names'][1], 'b') assert_equal(info['ch_names'][2], 'c') assert_equal(info['ch_names'], info['ch_names']) assert_equal(info['ch_names'], ['a', 'b', 'c']) info = create_info(ch_names=[], sfreq=1000.0, ch_types=None) assert_equal(info['ch_names'], []) info = create_info(ch_names=['a', 'b', 'c'], sfreq=1000.0, ch_types=None)
[ "def", "test_redundant", "(", ")", ":", "info", "=", "create_info", "(", "ch_names", "=", "[", "'a'", ",", "'b'", ",", "'c'", "]", ",", "sfreq", "=", "1000.0", ",", "ch_types", "=", "None", ")", "assert_equal", "(", "info", "[", "'ch_names'", "]", "[", "0", "]", ",", "'a'", ")", "assert_equal", "(", "info", "[", "'ch_names'", "]", "[", "1", "]", ",", "'b'", ")", "assert_equal", "(", "info", "[", "'ch_names'", "]", "[", "2", "]", ",", "'c'", ")", "assert_equal", "(", "info", "[", "'ch_names'", "]", ",", "info", "[", "'ch_names'", "]", ")", "assert_equal", "(", "info", "[", "'ch_names'", "]", ",", "[", "'a'", ",", "'b'", ",", "'c'", "]", ")", "info", "=", "create_info", "(", "ch_names", "=", "[", "]", ",", "sfreq", "=", "1000.0", ",", "ch_types", "=", "None", ")", "assert_equal", "(", "info", "[", "'ch_names'", "]", ",", "[", "]", ")", "info", "=", "create_info", "(", "ch_names", "=", "[", "'a'", ",", "'b'", ",", "'c'", "]", ",", "sfreq", "=", "1000.0", ",", "ch_types", "=", "None", ")" ]
test some of the redundant properties of info .
train
false
21,639
def run_eventlet(): import eventlet eventlet.monkey_patch()
[ "def", "run_eventlet", "(", ")", ":", "import", "eventlet", "eventlet", ".", "monkey_patch", "(", ")" ]
prepare to run tests with eventlet .
train
false
21,640
def getFileOrDirectoryTypesUnmodifiedGcode(fileName, fileTypes, wasCancelled): if isEmptyOrCancelled(fileName, wasCancelled): return [] if isDirectorySetting(): return archive.getFilesWithFileTypesWithoutWords(fileTypes, [], fileName) return [fileName]
[ "def", "getFileOrDirectoryTypesUnmodifiedGcode", "(", "fileName", ",", "fileTypes", ",", "wasCancelled", ")", ":", "if", "isEmptyOrCancelled", "(", "fileName", ",", "wasCancelled", ")", ":", "return", "[", "]", "if", "isDirectorySetting", "(", ")", ":", "return", "archive", ".", "getFilesWithFileTypesWithoutWords", "(", "fileTypes", ",", "[", "]", ",", "fileName", ")", "return", "[", "fileName", "]" ]
get the gcode files in the directory the file is in if directory setting is true .
train
false
21,641
def create_desktop(desktop_name, start_explorer=1): sa = pywintypes.SECURITY_ATTRIBUTES() sa.bInheritHandle = 1 try: hdesk = win32service.CreateDesktop(desktop_name, 0, win32con.MAXIMUM_ALLOWED, sa) except win32service.error: traceback.print_exc() errbuf = cStringIO.StringIO() traceback.print_exc(None, errbuf) win32api.MessageBox(0, errbuf.getvalue(), 'Desktop creation failed') return if start_explorer: s = win32process.STARTUPINFO() s.lpDesktop = desktop_name prc_info = win32process.CreateProcess(None, 'Explorer.exe', None, None, True, win32con.CREATE_NEW_CONSOLE, None, 'c:\\', s) th = thread.start_new_thread(new_icon, (hdesk, desktop_name)) hdesk.SwitchDesktop()
[ "def", "create_desktop", "(", "desktop_name", ",", "start_explorer", "=", "1", ")", ":", "sa", "=", "pywintypes", ".", "SECURITY_ATTRIBUTES", "(", ")", "sa", ".", "bInheritHandle", "=", "1", "try", ":", "hdesk", "=", "win32service", ".", "CreateDesktop", "(", "desktop_name", ",", "0", ",", "win32con", ".", "MAXIMUM_ALLOWED", ",", "sa", ")", "except", "win32service", ".", "error", ":", "traceback", ".", "print_exc", "(", ")", "errbuf", "=", "cStringIO", ".", "StringIO", "(", ")", "traceback", ".", "print_exc", "(", "None", ",", "errbuf", ")", "win32api", ".", "MessageBox", "(", "0", ",", "errbuf", ".", "getvalue", "(", ")", ",", "'Desktop creation failed'", ")", "return", "if", "start_explorer", ":", "s", "=", "win32process", ".", "STARTUPINFO", "(", ")", "s", ".", "lpDesktop", "=", "desktop_name", "prc_info", "=", "win32process", ".", "CreateProcess", "(", "None", ",", "'Explorer.exe'", ",", "None", ",", "None", ",", "True", ",", "win32con", ".", "CREATE_NEW_CONSOLE", ",", "None", ",", "'c:\\\\'", ",", "s", ")", "th", "=", "thread", ".", "start_new_thread", "(", "new_icon", ",", "(", "hdesk", ",", "desktop_name", ")", ")", "hdesk", ".", "SwitchDesktop", "(", ")" ]
creates a new desktop and spawns a thread running on it will also start a new icon thread on an existing desktop .
train
false
21,642
def clause2concepts(filename, rel_name, schema, closures=[]): concepts = [] subj = 0 pkey = schema[0] fields = schema[1:] records = _str2records(filename, rel_name) if (not (filename in not_unary)): concepts.append(unary_concept(pkey, subj, records)) for field in fields: obj = schema.index(field) concepts.append(binary_concept(field, closures, subj, obj, records)) return concepts
[ "def", "clause2concepts", "(", "filename", ",", "rel_name", ",", "schema", ",", "closures", "=", "[", "]", ")", ":", "concepts", "=", "[", "]", "subj", "=", "0", "pkey", "=", "schema", "[", "0", "]", "fields", "=", "schema", "[", "1", ":", "]", "records", "=", "_str2records", "(", "filename", ",", "rel_name", ")", "if", "(", "not", "(", "filename", "in", "not_unary", ")", ")", ":", "concepts", ".", "append", "(", "unary_concept", "(", "pkey", ",", "subj", ",", "records", ")", ")", "for", "field", "in", "fields", ":", "obj", "=", "schema", ".", "index", "(", "field", ")", "concepts", ".", "append", "(", "binary_concept", "(", "field", ",", "closures", ",", "subj", ",", "obj", ",", "records", ")", ")", "return", "concepts" ]
convert a file of prolog clauses into a list of concept objects .
train
false
21,643
def mark_bootstrapped(): pc_settings = sublime.load_settings(pc_settings_filename()) if (not pc_settings.get('bootstrapped')): pc_settings.set('bootstrapped', True) sublime.save_settings(pc_settings_filename())
[ "def", "mark_bootstrapped", "(", ")", ":", "pc_settings", "=", "sublime", ".", "load_settings", "(", "pc_settings_filename", "(", ")", ")", "if", "(", "not", "pc_settings", ".", "get", "(", "'bootstrapped'", ")", ")", ":", "pc_settings", ".", "set", "(", "'bootstrapped'", ",", "True", ")", "sublime", ".", "save_settings", "(", "pc_settings_filename", "(", ")", ")" ]
mark package control as successfully bootstrapped .
train
false
21,644
def w2p_pack_plugin(filename, path, plugin_name): filename = abspath(filename) path = abspath(path) if (not filename.endswith(('web2py.plugin.%s.w2p' % plugin_name))): raise Exception('Not a web2py plugin name') plugin_tarball = tarfile.open(filename, 'w:gz') try: app_dir = path while (app_dir[(-1)] == '/'): app_dir = app_dir[:(-1)] files1 = glob.glob(os.path.join(app_dir, ('*/plugin_%s.*' % plugin_name))) files2 = glob.glob(os.path.join(app_dir, ('*/plugin_%s/*' % plugin_name))) for file in (files1 + files2): plugin_tarball.add(file, arcname=file[(len(app_dir) + 1):]) finally: plugin_tarball.close()
[ "def", "w2p_pack_plugin", "(", "filename", ",", "path", ",", "plugin_name", ")", ":", "filename", "=", "abspath", "(", "filename", ")", "path", "=", "abspath", "(", "path", ")", "if", "(", "not", "filename", ".", "endswith", "(", "(", "'web2py.plugin.%s.w2p'", "%", "plugin_name", ")", ")", ")", ":", "raise", "Exception", "(", "'Not a web2py plugin name'", ")", "plugin_tarball", "=", "tarfile", ".", "open", "(", "filename", ",", "'w:gz'", ")", "try", ":", "app_dir", "=", "path", "while", "(", "app_dir", "[", "(", "-", "1", ")", "]", "==", "'/'", ")", ":", "app_dir", "=", "app_dir", "[", ":", "(", "-", "1", ")", "]", "files1", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "(", "'*/plugin_%s.*'", "%", "plugin_name", ")", ")", ")", "files2", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "app_dir", ",", "(", "'*/plugin_%s/*'", "%", "plugin_name", ")", ")", ")", "for", "file", "in", "(", "files1", "+", "files2", ")", ":", "plugin_tarball", ".", "add", "(", "file", ",", "arcname", "=", "file", "[", "(", "len", "(", "app_dir", ")", "+", "1", ")", ":", "]", ")", "finally", ":", "plugin_tarball", ".", "close", "(", ")" ]
packs the given plugin into a w2p file .
train
false
21,646
def get_file_hash(filePath): if (not filePath): return None md4 = hashlib.new('md4').copy def gen(f): while True: x = f.read(9728000) if x: (yield x) else: return def md4_hash(data): m = md4() m.update(data) return m with open(filePath, 'rb') as f: a = gen(f) hashes = [md4_hash(data).digest() for data in a] if (len(hashes) == 1): return hashes[0].encode('hex') else: return md4_hash(reduce((lambda a, d: (a + d)), hashes, '')).hexdigest()
[ "def", "get_file_hash", "(", "filePath", ")", ":", "if", "(", "not", "filePath", ")", ":", "return", "None", "md4", "=", "hashlib", ".", "new", "(", "'md4'", ")", ".", "copy", "def", "gen", "(", "f", ")", ":", "while", "True", ":", "x", "=", "f", ".", "read", "(", "9728000", ")", "if", "x", ":", "(", "yield", "x", ")", "else", ":", "return", "def", "md4_hash", "(", "data", ")", ":", "m", "=", "md4", "(", ")", "m", ".", "update", "(", "data", ")", "return", "m", "with", "open", "(", "filePath", ",", "'rb'", ")", "as", "f", ":", "a", "=", "gen", "(", "f", ")", "hashes", "=", "[", "md4_hash", "(", "data", ")", ".", "digest", "(", ")", "for", "data", "in", "a", "]", "if", "(", "len", "(", "hashes", ")", "==", "1", ")", ":", "return", "hashes", "[", "0", "]", ".", "encode", "(", "'hex'", ")", "else", ":", "return", "md4_hash", "(", "reduce", "(", "(", "lambda", "a", ",", "d", ":", "(", "a", "+", "d", ")", ")", ",", "hashes", ",", "''", ")", ")", ".", "hexdigest", "(", ")" ]
get the md5 hash of a given filename .
train
false
21,647
def selection(): return selection_model().selection()
[ "def", "selection", "(", ")", ":", "return", "selection_model", "(", ")", ".", "selection", "(", ")" ]
selection sort: scan the list to find its smallest element .
train
false
21,648
def _lookup_proxmox_task(upid): log.debug('Getting creation status for upid: {0}'.format(upid)) tasks = query('get', 'cluster/tasks') if tasks: for task in tasks: if (task['upid'] == upid): log.debug('Found upid task: {0}'.format(task)) return task return False
[ "def", "_lookup_proxmox_task", "(", "upid", ")", ":", "log", ".", "debug", "(", "'Getting creation status for upid: {0}'", ".", "format", "(", "upid", ")", ")", "tasks", "=", "query", "(", "'get'", ",", "'cluster/tasks'", ")", "if", "tasks", ":", "for", "task", "in", "tasks", ":", "if", "(", "task", "[", "'upid'", "]", "==", "upid", ")", ":", "log", ".", "debug", "(", "'Found upid task: {0}'", ".", "format", "(", "task", ")", ")", "return", "task", "return", "False" ]
retrieve the logs and retrieve the status for a upid .
train
true
21,649
def ParseConfigCommandLine(): if flags.FLAGS.config: CONFIG.Initialize(filename=flags.FLAGS.config, must_exist=True) else: raise RuntimeError('A config file is not specified.') if flags.FLAGS.secondary_configs: for config_file in flags.FLAGS.secondary_configs: CONFIG.LoadSecondaryConfig(config_file) for statement in flags.FLAGS.parameter: if ('=' not in statement): raise RuntimeError(('statement %s on command line not valid.' % statement)) (name, value) = statement.split('=', 1) CONFIG.global_override[name] = value for context in flags.FLAGS.context: if context: CONFIG.AddContext(context) if CONFIG['Config.writeback']: CONFIG.SetWriteBack(CONFIG['Config.writeback']) if flags.FLAGS.config_help: print 'Configuration overview.' CONFIG.PrintHelp() sys.exit(0)
[ "def", "ParseConfigCommandLine", "(", ")", ":", "if", "flags", ".", "FLAGS", ".", "config", ":", "CONFIG", ".", "Initialize", "(", "filename", "=", "flags", ".", "FLAGS", ".", "config", ",", "must_exist", "=", "True", ")", "else", ":", "raise", "RuntimeError", "(", "'A config file is not specified.'", ")", "if", "flags", ".", "FLAGS", ".", "secondary_configs", ":", "for", "config_file", "in", "flags", ".", "FLAGS", ".", "secondary_configs", ":", "CONFIG", ".", "LoadSecondaryConfig", "(", "config_file", ")", "for", "statement", "in", "flags", ".", "FLAGS", ".", "parameter", ":", "if", "(", "'='", "not", "in", "statement", ")", ":", "raise", "RuntimeError", "(", "(", "'statement %s on command line not valid.'", "%", "statement", ")", ")", "(", "name", ",", "value", ")", "=", "statement", ".", "split", "(", "'='", ",", "1", ")", "CONFIG", ".", "global_override", "[", "name", "]", "=", "value", "for", "context", "in", "flags", ".", "FLAGS", ".", "context", ":", "if", "context", ":", "CONFIG", ".", "AddContext", "(", "context", ")", "if", "CONFIG", "[", "'Config.writeback'", "]", ":", "CONFIG", ".", "SetWriteBack", "(", "CONFIG", "[", "'Config.writeback'", "]", ")", "if", "flags", ".", "FLAGS", ".", "config_help", ":", "print", "'Configuration overview.'", "CONFIG", ".", "PrintHelp", "(", ")", "sys", ".", "exit", "(", "0", ")" ]
parse all the command line options which control the config system .
train
true
21,651
def get_pillar(opts, grains, minion_id, saltenv=None, ext=None, funcs=None, pillar=None, pillarenv=None, rend=None): ptype = {'remote': RemotePillar, 'local': Pillar}.get(opts['file_client'], Pillar) log.debug('Determining pillar cache') if opts['pillar_cache']: log.info('Compiling pillar from cache') log.debug('get_pillar using pillar cache with ext: {0}'.format(ext)) return PillarCache(opts, grains, minion_id, saltenv, ext=ext, functions=funcs, pillar=pillar, pillarenv=pillarenv) return ptype(opts, grains, minion_id, saltenv, ext, functions=funcs, pillar=pillar, pillarenv=pillarenv, rend=rend)
[ "def", "get_pillar", "(", "opts", ",", "grains", ",", "minion_id", ",", "saltenv", "=", "None", ",", "ext", "=", "None", ",", "funcs", "=", "None", ",", "pillar", "=", "None", ",", "pillarenv", "=", "None", ",", "rend", "=", "None", ")", ":", "ptype", "=", "{", "'remote'", ":", "RemotePillar", ",", "'local'", ":", "Pillar", "}", ".", "get", "(", "opts", "[", "'file_client'", "]", ",", "Pillar", ")", "log", ".", "debug", "(", "'Determining pillar cache'", ")", "if", "opts", "[", "'pillar_cache'", "]", ":", "log", ".", "info", "(", "'Compiling pillar from cache'", ")", "log", ".", "debug", "(", "'get_pillar using pillar cache with ext: {0}'", ".", "format", "(", "ext", ")", ")", "return", "PillarCache", "(", "opts", ",", "grains", ",", "minion_id", ",", "saltenv", ",", "ext", "=", "ext", ",", "functions", "=", "funcs", ",", "pillar", "=", "pillar", ",", "pillarenv", "=", "pillarenv", ")", "return", "ptype", "(", "opts", ",", "grains", ",", "minion_id", ",", "saltenv", ",", "ext", ",", "functions", "=", "funcs", ",", "pillar", "=", "pillar", ",", "pillarenv", "=", "pillarenv", ",", "rend", "=", "rend", ")" ]
return the correct pillar driver based on the file_client option .
train
false
21,652
def complete_variable(text): if (text.find('.') != (-1)): var = text.split('.')[0] if (var in rline_mpstate.status.msgs): ret = [] for f in rline_mpstate.status.msgs[var].get_fieldnames(): ret.append(((var + '.') + f)) return ret return [] return rline_mpstate.status.msgs.keys()
[ "def", "complete_variable", "(", "text", ")", ":", "if", "(", "text", ".", "find", "(", "'.'", ")", "!=", "(", "-", "1", ")", ")", ":", "var", "=", "text", ".", "split", "(", "'.'", ")", "[", "0", "]", "if", "(", "var", "in", "rline_mpstate", ".", "status", ".", "msgs", ")", ":", "ret", "=", "[", "]", "for", "f", "in", "rline_mpstate", ".", "status", ".", "msgs", "[", "var", "]", ".", "get_fieldnames", "(", ")", ":", "ret", ".", "append", "(", "(", "(", "var", "+", "'.'", ")", "+", "f", ")", ")", "return", "ret", "return", "[", "]", "return", "rline_mpstate", ".", "status", ".", "msgs", ".", "keys", "(", ")" ]
complete a mavlink variable .
train
true
21,653
def H(s): return md5_hex(s)
[ "def", "H", "(", "s", ")", ":", "return", "md5_hex", "(", "s", ")" ]
unsigned short .
train
false
21,655
def decodeChallengeMessage(ntlm_data): FORMAT = '<8sIHHII8s8sHHI' FORMAT_SIZE = struct.calcsize(FORMAT) (signature, message_type, targetname_len, targetname_maxlen, targetname_offset, flags, challenge, _, targetinfo_len, targetinfo_maxlen, targetinfo_offset) = struct.unpack(FORMAT, ntlm_data[:FORMAT_SIZE]) assert (signature == 'NTLMSSP\x00') assert (message_type == 2) return (challenge, flags, ntlm_data[targetinfo_offset:(targetinfo_offset + targetinfo_len)])
[ "def", "decodeChallengeMessage", "(", "ntlm_data", ")", ":", "FORMAT", "=", "'<8sIHHII8s8sHHI'", "FORMAT_SIZE", "=", "struct", ".", "calcsize", "(", "FORMAT", ")", "(", "signature", ",", "message_type", ",", "targetname_len", ",", "targetname_maxlen", ",", "targetname_offset", ",", "flags", ",", "challenge", ",", "_", ",", "targetinfo_len", ",", "targetinfo_maxlen", ",", "targetinfo_offset", ")", "=", "struct", ".", "unpack", "(", "FORMAT", ",", "ntlm_data", "[", ":", "FORMAT_SIZE", "]", ")", "assert", "(", "signature", "==", "'NTLMSSP\\x00'", ")", "assert", "(", "message_type", "==", "2", ")", "return", "(", "challenge", ",", "flags", ",", "ntlm_data", "[", "targetinfo_offset", ":", "(", "targetinfo_offset", "+", "targetinfo_len", ")", "]", ")" ]
references: - [ms-nlmp]: 2 .
train
false
21,656
def rpow(self, rhs): if isinstance(rhs, variable.Variable): return PowVarVar()(rhs, self) _check_constant_type(rhs) return PowConstVar(rhs)(self)
[ "def", "rpow", "(", "self", ",", "rhs", ")", ":", "if", "isinstance", "(", "rhs", ",", "variable", ".", "Variable", ")", ":", "return", "PowVarVar", "(", ")", "(", "rhs", ",", "self", ")", "_check_constant_type", "(", "rhs", ")", "return", "PowConstVar", "(", "rhs", ")", "(", "self", ")" ]
element-wise power function .
train
false
21,657
@open_file(1, mode='wb') def write_graphml(G, path, encoding='utf-8', prettyprint=True, infer_numeric_types=False): writer = GraphMLWriter(encoding=encoding, prettyprint=prettyprint, infer_numeric_types=infer_numeric_types) writer.add_graph_element(G) writer.dump(path)
[ "@", "open_file", "(", "1", ",", "mode", "=", "'wb'", ")", "def", "write_graphml", "(", "G", ",", "path", ",", "encoding", "=", "'utf-8'", ",", "prettyprint", "=", "True", ",", "infer_numeric_types", "=", "False", ")", ":", "writer", "=", "GraphMLWriter", "(", "encoding", "=", "encoding", ",", "prettyprint", "=", "prettyprint", ",", "infer_numeric_types", "=", "infer_numeric_types", ")", "writer", ".", "add_graph_element", "(", "G", ")", "writer", ".", "dump", "(", "path", ")" ]
write g in graphml xml format to path parameters g : graph a networkx graph infer_numeric_types : boolean determine if numeric types should be generalized despite different python values .
train
false
21,658
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
21,659
def setuid(uid): os.setuid(parse_uid(uid))
[ "def", "setuid", "(", "uid", ")", ":", "os", ".", "setuid", "(", "parse_uid", "(", "uid", ")", ")" ]
version of :func:os .
train
false
21,661
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
21,662
def test_empty_lists_with_nones(Chart): chart = Chart() chart.add('A', [None, None]) chart.add('B', [None, 4, 4]) q = chart.render_pyquery() assert (len(q('.legend')) == 2)
[ "def", "test_empty_lists_with_nones", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", ")", "chart", ".", "add", "(", "'A'", ",", "[", "None", ",", "None", "]", ")", "chart", ".", "add", "(", "'B'", ",", "[", "None", ",", "4", ",", "4", "]", ")", "q", "=", "chart", ".", "render_pyquery", "(", ")", "assert", "(", "len", "(", "q", "(", "'.legend'", ")", ")", "==", "2", ")" ]
test chart rendering with a none filled serie .
train
false
21,663
def _append_container_types(inventory, host_type): for _host in inventory['_meta']['hostvars'].keys(): hdata = inventory['_meta']['hostvars'][_host] if ('container_name' in hdata): if hdata['container_name'].startswith(host_type): if ('physical_host' not in hdata): logger.debug('Set physical host for %s to %s', _host, host_type) hdata['physical_host'] = host_type
[ "def", "_append_container_types", "(", "inventory", ",", "host_type", ")", ":", "for", "_host", "in", "inventory", "[", "'_meta'", "]", "[", "'hostvars'", "]", ".", "keys", "(", ")", ":", "hdata", "=", "inventory", "[", "'_meta'", "]", "[", "'hostvars'", "]", "[", "_host", "]", "if", "(", "'container_name'", "in", "hdata", ")", ":", "if", "hdata", "[", "'container_name'", "]", ".", "startswith", "(", "host_type", ")", ":", "if", "(", "'physical_host'", "not", "in", "hdata", ")", ":", "logger", ".", "debug", "(", "'Set physical host for %s to %s'", ",", "_host", ",", "host_type", ")", "hdata", "[", "'physical_host'", "]", "=", "host_type" ]
append the "physical_host" type to all containers .
train
false
21,664
def RGS_generalized(m): d = zeros((m + 1)) for i in range(0, (m + 1)): d[(0, i)] = 1 for i in range(1, (m + 1)): for j in range(m): if (j <= (m - i)): d[(i, j)] = ((j * d[((i - 1), j)]) + d[((i - 1), (j + 1))]) else: d[(i, j)] = 0 return d
[ "def", "RGS_generalized", "(", "m", ")", ":", "d", "=", "zeros", "(", "(", "m", "+", "1", ")", ")", "for", "i", "in", "range", "(", "0", ",", "(", "m", "+", "1", ")", ")", ":", "d", "[", "(", "0", ",", "i", ")", "]", "=", "1", "for", "i", "in", "range", "(", "1", ",", "(", "m", "+", "1", ")", ")", ":", "for", "j", "in", "range", "(", "m", ")", ":", "if", "(", "j", "<=", "(", "m", "-", "i", ")", ")", ":", "d", "[", "(", "i", ",", "j", ")", "]", "=", "(", "(", "j", "*", "d", "[", "(", "(", "i", "-", "1", ")", ",", "j", ")", "]", ")", "+", "d", "[", "(", "(", "i", "-", "1", ")", ",", "(", "j", "+", "1", ")", ")", "]", ")", "else", ":", "d", "[", "(", "i", ",", "j", ")", "]", "=", "0", "return", "d" ]
computes the m + 1 generalized unrestricted growth strings and returns them as rows in matrix .
train
false
21,665
def iter_period(start, end, period): period_start = start increment = datetime.timedelta(seconds=period) for i in xrange(int(math.ceil((timeutils.delta_seconds(start, end) / float(period))))): next_start = (period_start + increment) (yield (period_start, next_start)) period_start = next_start
[ "def", "iter_period", "(", "start", ",", "end", ",", "period", ")", ":", "period_start", "=", "start", "increment", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "period", ")", "for", "i", "in", "xrange", "(", "int", "(", "math", ".", "ceil", "(", "(", "timeutils", ".", "delta_seconds", "(", "start", ",", "end", ")", "/", "float", "(", "period", ")", ")", ")", ")", ")", ":", "next_start", "=", "(", "period_start", "+", "increment", ")", "(", "yield", "(", "period_start", ",", "next_start", ")", ")", "period_start", "=", "next_start" ]
split a time from start to end in periods of a number of seconds .
train
false
21,666
def get_item_bom_rate(): item_bom_map = {} for b in frappe.db.sql(u'select item, (total_cost/quantity) as bom_rate\n DCTB DCTB from `tabBOM` where is_active=1 and is_default=1', as_dict=1): item_bom_map.setdefault(b.item, flt(b.bom_rate)) return item_bom_map
[ "def", "get_item_bom_rate", "(", ")", ":", "item_bom_map", "=", "{", "}", "for", "b", "in", "frappe", ".", "db", ".", "sql", "(", "u'select item, (total_cost/quantity) as bom_rate\\n DCTB DCTB from `tabBOM` where is_active=1 and is_default=1'", ",", "as_dict", "=", "1", ")", ":", "item_bom_map", ".", "setdefault", "(", "b", ".", "item", ",", "flt", "(", "b", ".", "bom_rate", ")", ")", "return", "item_bom_map" ]
get bom rate of an item from bom .
train
false
21,667
def is_dn_equal(dn1, dn2): if (not isinstance(dn1, list)): dn1 = ldap.dn.str2dn(utf8_encode(dn1)) if (not isinstance(dn2, list)): dn2 = ldap.dn.str2dn(utf8_encode(dn2)) if (len(dn1) != len(dn2)): return False for (rdn1, rdn2) in zip(dn1, dn2): if (not is_rdn_equal(rdn1, rdn2)): return False return True
[ "def", "is_dn_equal", "(", "dn1", ",", "dn2", ")", ":", "if", "(", "not", "isinstance", "(", "dn1", ",", "list", ")", ")", ":", "dn1", "=", "ldap", ".", "dn", ".", "str2dn", "(", "utf8_encode", "(", "dn1", ")", ")", "if", "(", "not", "isinstance", "(", "dn2", ",", "list", ")", ")", ":", "dn2", "=", "ldap", ".", "dn", ".", "str2dn", "(", "utf8_encode", "(", "dn2", ")", ")", "if", "(", "len", "(", "dn1", ")", "!=", "len", "(", "dn2", ")", ")", ":", "return", "False", "for", "(", "rdn1", ",", "rdn2", ")", "in", "zip", "(", "dn1", ",", "dn2", ")", ":", "if", "(", "not", "is_rdn_equal", "(", "rdn1", ",", "rdn2", ")", ")", ":", "return", "False", "return", "True" ]
return true if and only if the dns are equal .
train
false
21,668
@must_have_permission(WRITE) @must_not_be_registration def fork_pointer(auth, node, **kwargs): NodeRelation = apps.get_model('osf.NodeRelation') pointer_id = request.json.get('pointerId') pointer = NodeRelation.load(pointer_id) if (pointer is None): raise HTTPError(http.BAD_REQUEST) try: node.fork_pointer(pointer, auth=auth, save=True) except ValueError: raise HTTPError(http.BAD_REQUEST)
[ "@", "must_have_permission", "(", "WRITE", ")", "@", "must_not_be_registration", "def", "fork_pointer", "(", "auth", ",", "node", ",", "**", "kwargs", ")", ":", "NodeRelation", "=", "apps", ".", "get_model", "(", "'osf.NodeRelation'", ")", "pointer_id", "=", "request", ".", "json", ".", "get", "(", "'pointerId'", ")", "pointer", "=", "NodeRelation", ".", "load", "(", "pointer_id", ")", "if", "(", "pointer", "is", "None", ")", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ")", "try", ":", "node", ".", "fork_pointer", "(", "pointer", ",", "auth", "=", "auth", ",", "save", "=", "True", ")", "except", "ValueError", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ")" ]
fork a pointer .
train
false
21,670
def isInt(n): return isinstance(n, int_types)
[ "def", "isInt", "(", "n", ")", ":", "return", "isinstance", "(", "n", ",", "int_types", ")" ]
test if arg is an int .
train
false
21,672
def objattr(accessing_obj, accessed_obj, *args, **kwargs): return attr(accessed_obj, accessed_obj, *args, **kwargs)
[ "def", "objattr", "(", "accessing_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "attr", "(", "accessed_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "kwargs", ")" ]
usage: objattr objattr objattr works like attr .
train
false
21,673
def getheaderAnim(im): bb = 'GIF89a' bb += intToBin(im.size[0]) bb += intToBin(im.size[1]) bb += '\x87\x00\x00' return bb
[ "def", "getheaderAnim", "(", "im", ")", ":", "bb", "=", "'GIF89a'", "bb", "+=", "intToBin", "(", "im", ".", "size", "[", "0", "]", ")", "bb", "+=", "intToBin", "(", "im", ".", "size", "[", "1", "]", ")", "bb", "+=", "'\\x87\\x00\\x00'", "return", "bb" ]
animation header .
train
true
21,674
def And(*args): def reduce_and(cmp_intervala, cmp_intervalb): if ((cmp_intervala[0] is False) or (cmp_intervalb[0] is False)): first = False elif ((cmp_intervala[0] is None) or (cmp_intervalb[0] is None)): first = None else: first = True if ((cmp_intervala[1] is False) or (cmp_intervalb[1] is False)): second = False elif ((cmp_intervala[1] is None) or (cmp_intervalb[1] is None)): second = None else: second = True return (first, second) return reduce(reduce_and, args)
[ "def", "And", "(", "*", "args", ")", ":", "def", "reduce_and", "(", "cmp_intervala", ",", "cmp_intervalb", ")", ":", "if", "(", "(", "cmp_intervala", "[", "0", "]", "is", "False", ")", "or", "(", "cmp_intervalb", "[", "0", "]", "is", "False", ")", ")", ":", "first", "=", "False", "elif", "(", "(", "cmp_intervala", "[", "0", "]", "is", "None", ")", "or", "(", "cmp_intervalb", "[", "0", "]", "is", "None", ")", ")", ":", "first", "=", "None", "else", ":", "first", "=", "True", "if", "(", "(", "cmp_intervala", "[", "1", "]", "is", "False", ")", "or", "(", "cmp_intervalb", "[", "1", "]", "is", "False", ")", ")", ":", "second", "=", "False", "elif", "(", "(", "cmp_intervala", "[", "1", "]", "is", "None", ")", "or", "(", "cmp_intervalb", "[", "1", "]", "is", "None", ")", ")", ":", "second", "=", "None", "else", ":", "second", "=", "True", "return", "(", "first", ",", "second", ")", "return", "reduce", "(", "reduce_and", ",", "args", ")" ]
defines the three valued and behaviour for a 2-tuple of three valued logic values .
train
false
21,676
def process_locales(series): languages = dict(((k.lower(), v['native']) for (k, v) in product_details.languages.items())) for row in series: if ('data' in row): new = {} for (key, count) in row['data'].items(): if (key in languages): k = (u'%s (%s)' % (languages[key], key)) new[k] = count row['data'] = new (yield row)
[ "def", "process_locales", "(", "series", ")", ":", "languages", "=", "dict", "(", "(", "(", "k", ".", "lower", "(", ")", ",", "v", "[", "'native'", "]", ")", "for", "(", "k", ",", "v", ")", "in", "product_details", ".", "languages", ".", "items", "(", ")", ")", ")", "for", "row", "in", "series", ":", "if", "(", "'data'", "in", "row", ")", ":", "new", "=", "{", "}", "for", "(", "key", ",", "count", ")", "in", "row", "[", "'data'", "]", ".", "items", "(", ")", ":", "if", "(", "key", "in", "languages", ")", ":", "k", "=", "(", "u'%s (%s)'", "%", "(", "languages", "[", "key", "]", ",", "key", ")", ")", "new", "[", "k", "]", "=", "count", "row", "[", "'data'", "]", "=", "new", "(", "yield", "row", ")" ]
convert locale codes to pretty names .
train
false
21,677
def create_request_parameters(parent, request_model, params=None, index=None): if (params is None): params = {} for param in request_model.params: source = param.source target = param.target if (source == 'identifier'): value = getattr(parent, xform_name(param.name)) elif (source == 'data'): value = get_data_member(parent, param.path) elif (source in ['string', 'integer', 'boolean']): value = param.value elif (source == 'input'): continue else: raise NotImplementedError('Unsupported source type: {0}'.format(source)) build_param_structure(params, target, value, index) return params
[ "def", "create_request_parameters", "(", "parent", ",", "request_model", ",", "params", "=", "None", ",", "index", "=", "None", ")", ":", "if", "(", "params", "is", "None", ")", ":", "params", "=", "{", "}", "for", "param", "in", "request_model", ".", "params", ":", "source", "=", "param", ".", "source", "target", "=", "param", ".", "target", "if", "(", "source", "==", "'identifier'", ")", ":", "value", "=", "getattr", "(", "parent", ",", "xform_name", "(", "param", ".", "name", ")", ")", "elif", "(", "source", "==", "'data'", ")", ":", "value", "=", "get_data_member", "(", "parent", ",", "param", ".", "path", ")", "elif", "(", "source", "in", "[", "'string'", ",", "'integer'", ",", "'boolean'", "]", ")", ":", "value", "=", "param", ".", "value", "elif", "(", "source", "==", "'input'", ")", ":", "continue", "else", ":", "raise", "NotImplementedError", "(", "'Unsupported source type: {0}'", ".", "format", "(", "source", ")", ")", "build_param_structure", "(", "params", ",", "target", ",", "value", ",", "index", ")", "return", "params" ]
handle request parameters that can be filled in from identifiers .
train
false
21,678
@utils.singledispatch def print_item(ty, context, builder, val): raise NotImplementedError(('printing unimplemented for values of type %s' % (ty,)))
[ "@", "utils", ".", "singledispatch", "def", "print_item", "(", "ty", ",", "context", ",", "builder", ",", "val", ")", ":", "raise", "NotImplementedError", "(", "(", "'printing unimplemented for values of type %s'", "%", "(", "ty", ",", ")", ")", ")" ]
handle printing of a single value of the given numba type .
train
false
21,680
def is_develop_egg(dist): meta_provider = dist._provider egg_info_dir = os.path.dirname(meta_provider.egg_info) egg_name = pkg_resources.to_filename(dist.project_name) return (meta_provider.egg_info.endswith((egg_name + '.egg-info')) and os.path.exists(os.path.join(egg_info_dir, 'setup.py')))
[ "def", "is_develop_egg", "(", "dist", ")", ":", "meta_provider", "=", "dist", ".", "_provider", "egg_info_dir", "=", "os", ".", "path", ".", "dirname", "(", "meta_provider", ".", "egg_info", ")", "egg_name", "=", "pkg_resources", ".", "to_filename", "(", "dist", ".", "project_name", ")", "return", "(", "meta_provider", ".", "egg_info", ".", "endswith", "(", "(", "egg_name", "+", "'.egg-info'", ")", ")", "and", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "egg_info_dir", ",", "'setup.py'", ")", ")", ")" ]
is the distribution installed in development mode .
train
false
21,681
def render_view_to_iterable(context, request, name='', secure=True): response = render_view_to_response(context, request, name, secure) if (response is None): return None return response.app_iter
[ "def", "render_view_to_iterable", "(", "context", ",", "request", ",", "name", "=", "''", ",", "secure", "=", "True", ")", ":", "response", "=", "render_view_to_response", "(", "context", ",", "request", ",", "name", ",", "secure", ")", "if", "(", "response", "is", "None", ")", ":", "return", "None", "return", "response", ".", "app_iter" ]
call the :term:view callable configured with a :term:view configuration that matches the :term:view name name registered against the specified context and request and return an iterable object which represents the body of a response .
train
false
21,683
def showLogging(debug=False): try: log_level = logging.WARN log_format = LOG_FORMAT_DEBUG if debug: log_level = logging.DEBUG logging.basicConfig(level=log_level, format=log_format) except: logging.basicConfig()
[ "def", "showLogging", "(", "debug", "=", "False", ")", ":", "try", ":", "log_level", "=", "logging", ".", "WARN", "log_format", "=", "LOG_FORMAT_DEBUG", "if", "debug", ":", "log_level", "=", "logging", ".", "DEBUG", "logging", ".", "basicConfig", "(", "level", "=", "log_level", ",", "format", "=", "log_format", ")", "except", ":", "logging", ".", "basicConfig", "(", ")" ]
shortcut for enabling log dump .
train
true
21,684
def stop(name): cmd = '/etc/rc.d/{0} -f stop'.format(name) return (not __salt__['cmd.retcode'](cmd))
[ "def", "stop", "(", "name", ")", ":", "cmd", "=", "'/etc/rc.d/{0} -f stop'", ".", "format", "(", "name", ")", "return", "(", "not", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ")", ")" ]
stop the specified service cli example: .
train
false
21,685
def as_declarative(**kw): def decorate(cls): kw['cls'] = cls kw['name'] = cls.__name__ return declarative_base(**kw) return decorate
[ "def", "as_declarative", "(", "**", "kw", ")", ":", "def", "decorate", "(", "cls", ")", ":", "kw", "[", "'cls'", "]", "=", "cls", "kw", "[", "'name'", "]", "=", "cls", ".", "__name__", "return", "declarative_base", "(", "**", "kw", ")", "return", "decorate" ]
class decorator for :func: .
train
false
21,686
def _ParseManifest(manifest_string, jar_file_name): manifest_string = '\n'.join(manifest_string.splitlines()).rstrip('\n') section_strings = re.split('\n{2,}', manifest_string) parsed_sections = [_ParseManifestSection(s, jar_file_name) for s in section_strings] main_section = parsed_sections[0] sections = {} for entry in parsed_sections[1:]: name = entry.get('Name') if (name is None): raise InvalidJarError(('%s: Manifest entry has no Name attribute: %r' % (jar_file_name, entry))) else: sections[name] = entry return Manifest(main_section, sections)
[ "def", "_ParseManifest", "(", "manifest_string", ",", "jar_file_name", ")", ":", "manifest_string", "=", "'\\n'", ".", "join", "(", "manifest_string", ".", "splitlines", "(", ")", ")", ".", "rstrip", "(", "'\\n'", ")", "section_strings", "=", "re", ".", "split", "(", "'\\n{2,}'", ",", "manifest_string", ")", "parsed_sections", "=", "[", "_ParseManifestSection", "(", "s", ",", "jar_file_name", ")", "for", "s", "in", "section_strings", "]", "main_section", "=", "parsed_sections", "[", "0", "]", "sections", "=", "{", "}", "for", "entry", "in", "parsed_sections", "[", "1", ":", "]", ":", "name", "=", "entry", ".", "get", "(", "'Name'", ")", "if", "(", "name", "is", "None", ")", ":", "raise", "InvalidJarError", "(", "(", "'%s: Manifest entry has no Name attribute: %r'", "%", "(", "jar_file_name", ",", "entry", ")", ")", ")", "else", ":", "sections", "[", "name", "]", "=", "entry", "return", "Manifest", "(", "main_section", ",", "sections", ")" ]
parse a manifest object out of the given string .
train
false
21,688
def freqd(b, a=1, worN=None, whole=False, plot=None): (b, a) = map(atleast_1d, (b, a)) if whole: lastpoint = (2 * pi) else: lastpoint = pi if (worN is None): N = 512 w = numpy.linspace(0, lastpoint, N, endpoint=False) elif isinstance(worN, int): N = worN w = numpy.linspace(0, lastpoint, N, endpoint=False) else: w = worN w = atleast_1d(w) zm1 = exp(((-1j) * w)) h = (polyval(b[::(-1)], zm1) / polyval(a[::(-1)], zm1)) if (plot is not None): plot(w, h) return (w, h)
[ "def", "freqd", "(", "b", ",", "a", "=", "1", ",", "worN", "=", "None", ",", "whole", "=", "False", ",", "plot", "=", "None", ")", ":", "(", "b", ",", "a", ")", "=", "map", "(", "atleast_1d", ",", "(", "b", ",", "a", ")", ")", "if", "whole", ":", "lastpoint", "=", "(", "2", "*", "pi", ")", "else", ":", "lastpoint", "=", "pi", "if", "(", "worN", "is", "None", ")", ":", "N", "=", "512", "w", "=", "numpy", ".", "linspace", "(", "0", ",", "lastpoint", ",", "N", ",", "endpoint", "=", "False", ")", "elif", "isinstance", "(", "worN", ",", "int", ")", ":", "N", "=", "worN", "w", "=", "numpy", ".", "linspace", "(", "0", ",", "lastpoint", ",", "N", ",", "endpoint", "=", "False", ")", "else", ":", "w", "=", "worN", "w", "=", "atleast_1d", "(", "w", ")", "zm1", "=", "exp", "(", "(", "(", "-", "1j", ")", "*", "w", ")", ")", "h", "=", "(", "polyval", "(", "b", "[", ":", ":", "(", "-", "1", ")", "]", ",", "zm1", ")", "/", "polyval", "(", "a", "[", ":", ":", "(", "-", "1", ")", "]", ",", "zm1", ")", ")", "if", "(", "plot", "is", "not", "None", ")", ":", "plot", "(", "w", ",", "h", ")", "return", "(", "w", ",", "h", ")" ]
compute the frequency response of a digital filter .
train
false
21,689
def send_mail_comm(note): log.info((u'Sending emails for %s' % note.thread.obj)) if (note.note_type in comm.EMAIL_SENIOR_REVIEWERS_AND_DEV): rev_template = comm.EMAIL_SENIOR_REVIEWERS_AND_DEV[note.note_type]['reviewer'] email_recipients(get_senior_reviewers(), note, template=rev_template) dev_template = comm.EMAIL_SENIOR_REVIEWERS_AND_DEV[note.note_type]['developer'] email_recipients(get_developers(note), note, template=dev_template) else: email_recipients(get_recipients(note), note) if (note.note_type == comm.DEVELOPER_COMMENT): subject = ('%s: %s' % (unicode(comm.NOTE_TYPES[note.note_type]), note.thread.obj.name)) mail_template = comm.COMM_MAIL_MAP.get(note.note_type, 'generic') send_mail_jinja(subject, ('comm/emails/%s.html' % mail_template), get_mail_context(note, None), recipient_list=[settings.MKT_REVIEWS_EMAIL], from_email=settings.MKT_REVIEWERS_EMAIL, perm_setting='app_reviewed')
[ "def", "send_mail_comm", "(", "note", ")", ":", "log", ".", "info", "(", "(", "u'Sending emails for %s'", "%", "note", ".", "thread", ".", "obj", ")", ")", "if", "(", "note", ".", "note_type", "in", "comm", ".", "EMAIL_SENIOR_REVIEWERS_AND_DEV", ")", ":", "rev_template", "=", "comm", ".", "EMAIL_SENIOR_REVIEWERS_AND_DEV", "[", "note", ".", "note_type", "]", "[", "'reviewer'", "]", "email_recipients", "(", "get_senior_reviewers", "(", ")", ",", "note", ",", "template", "=", "rev_template", ")", "dev_template", "=", "comm", ".", "EMAIL_SENIOR_REVIEWERS_AND_DEV", "[", "note", ".", "note_type", "]", "[", "'developer'", "]", "email_recipients", "(", "get_developers", "(", "note", ")", ",", "note", ",", "template", "=", "dev_template", ")", "else", ":", "email_recipients", "(", "get_recipients", "(", "note", ")", ",", "note", ")", "if", "(", "note", ".", "note_type", "==", "comm", ".", "DEVELOPER_COMMENT", ")", ":", "subject", "=", "(", "'%s: %s'", "%", "(", "unicode", "(", "comm", ".", "NOTE_TYPES", "[", "note", ".", "note_type", "]", ")", ",", "note", ".", "thread", ".", "obj", ".", "name", ")", ")", "mail_template", "=", "comm", ".", "COMM_MAIL_MAP", ".", "get", "(", "note", ".", "note_type", ",", "'generic'", ")", "send_mail_jinja", "(", "subject", ",", "(", "'comm/emails/%s.html'", "%", "mail_template", ")", ",", "get_mail_context", "(", "note", ",", "None", ")", ",", "recipient_list", "=", "[", "settings", ".", "MKT_REVIEWS_EMAIL", "]", ",", "from_email", "=", "settings", ".", "MKT_REVIEWERS_EMAIL", ",", "perm_setting", "=", "'app_reviewed'", ")" ]
email utility used globally by the communication dashboard to send emails .
train
false
21,690
def action_method(wrapped): def wrapper(self, *arg, **kw): if (self._ainfo is None): self._ainfo = [] info = kw.pop('_info', None) backframes = (kw.pop('_backframes', 0) + 2) if (is_nonstr_iter(info) and (len(info) == 4)): info = ActionInfo(*info) if (info is None): try: f = traceback.extract_stack(limit=4) last_frame = ActionInfo(*f[(-1)]) if (last_frame.function == 'extract_stack'): f.pop() info = ActionInfo(*f[(- backframes)]) except: info = ActionInfo(None, 0, '', '') self._ainfo.append(info) try: result = wrapped(self, *arg, **kw) finally: self._ainfo.pop() return result if hasattr(wrapped, '__name__'): functools.update_wrapper(wrapper, wrapped) wrapper.__docobj__ = wrapped return wrapper
[ "def", "action_method", "(", "wrapped", ")", ":", "def", "wrapper", "(", "self", ",", "*", "arg", ",", "**", "kw", ")", ":", "if", "(", "self", ".", "_ainfo", "is", "None", ")", ":", "self", ".", "_ainfo", "=", "[", "]", "info", "=", "kw", ".", "pop", "(", "'_info'", ",", "None", ")", "backframes", "=", "(", "kw", ".", "pop", "(", "'_backframes'", ",", "0", ")", "+", "2", ")", "if", "(", "is_nonstr_iter", "(", "info", ")", "and", "(", "len", "(", "info", ")", "==", "4", ")", ")", ":", "info", "=", "ActionInfo", "(", "*", "info", ")", "if", "(", "info", "is", "None", ")", ":", "try", ":", "f", "=", "traceback", ".", "extract_stack", "(", "limit", "=", "4", ")", "last_frame", "=", "ActionInfo", "(", "*", "f", "[", "(", "-", "1", ")", "]", ")", "if", "(", "last_frame", ".", "function", "==", "'extract_stack'", ")", ":", "f", ".", "pop", "(", ")", "info", "=", "ActionInfo", "(", "*", "f", "[", "(", "-", "backframes", ")", "]", ")", "except", ":", "info", "=", "ActionInfo", "(", "None", ",", "0", ",", "''", ",", "''", ")", "self", ".", "_ainfo", ".", "append", "(", "info", ")", "try", ":", "result", "=", "wrapped", "(", "self", ",", "*", "arg", ",", "**", "kw", ")", "finally", ":", "self", ".", "_ainfo", ".", "pop", "(", ")", "return", "result", "if", "hasattr", "(", "wrapped", ",", "'__name__'", ")", ":", "functools", ".", "update_wrapper", "(", "wrapper", ",", "wrapped", ")", "wrapper", ".", "__docobj__", "=", "wrapped", "return", "wrapper" ]
wrapper to provide the right conflict info report data when a method that calls configurator .
train
false
21,693
def fixed_ip_associate_pool(context, network_id, instance_uuid=None, host=None): return IMPL.fixed_ip_associate_pool(context, network_id, instance_uuid, host)
[ "def", "fixed_ip_associate_pool", "(", "context", ",", "network_id", ",", "instance_uuid", "=", "None", ",", "host", "=", "None", ")", ":", "return", "IMPL", ".", "fixed_ip_associate_pool", "(", "context", ",", "network_id", ",", "instance_uuid", ",", "host", ")" ]
find free ip in network and associate it to instance or host .
train
false
21,694
def _default_to(parser, dest, value): if (getattr(parser.values, dest) is None): setattr(parser.values, dest, value)
[ "def", "_default_to", "(", "parser", ",", "dest", ",", "value", ")", ":", "if", "(", "getattr", "(", "parser", ".", "values", ",", "dest", ")", "is", "None", ")", ":", "setattr", "(", "parser", ".", "values", ",", "dest", ",", "value", ")" ]
helper function; set the given optino dest to *value* if its none .
train
false
21,696
def ffmpeg_read_image(filename, with_mask=True): if with_mask: pix_fmt = 'rgba' else: pix_fmt = 'rgb24' reader = FFMPEG_VideoReader(filename, pix_fmt=pix_fmt, check_duration=False) im = reader.lastread del reader return im
[ "def", "ffmpeg_read_image", "(", "filename", ",", "with_mask", "=", "True", ")", ":", "if", "with_mask", ":", "pix_fmt", "=", "'rgba'", "else", ":", "pix_fmt", "=", "'rgb24'", "reader", "=", "FFMPEG_VideoReader", "(", "filename", ",", "pix_fmt", "=", "pix_fmt", ",", "check_duration", "=", "False", ")", "im", "=", "reader", ".", "lastread", "del", "reader", "return", "im" ]
read an image file .
train
false
21,697
def getGcodeFileText(fileName, gcodeText): if (gcodeText != ''): return gcodeText if fileName.endswith('.gcode'): return archive.getFileText(fileName) return ''
[ "def", "getGcodeFileText", "(", "fileName", ",", "gcodeText", ")", ":", "if", "(", "gcodeText", "!=", "''", ")", ":", "return", "gcodeText", "if", "fileName", ".", "endswith", "(", "'.gcode'", ")", ":", "return", "archive", ".", "getFileText", "(", "fileName", ")", "return", "''" ]
get the gcode text from a file if it the gcode text is empty and if the file is a gcode file .
train
false
21,699
def dict_to_etree(d, parent): for (k, v) in d.items(): if (v is None): etree.SubElement(parent, k) elif isinstance(v, six.string_types): etree.SubElement(parent, k).text = v elif (isinstance(v, dict) or isinstance(v, odict)): child = etree.SubElement(parent, k) dict_to_etree(v, child) elif (not isinstance(v, collections.Sized)): etree.SubElement(parent, k).text = str(v) elif (len(v) == 0): etree.SubElement(parent, k) else: for e in v: child = etree.SubElement(parent, k) if (isinstance(e, dict) or isinstance(e, odict)): dict_to_etree(e, child) else: child.text = str(e)
[ "def", "dict_to_etree", "(", "d", ",", "parent", ")", ":", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "if", "(", "v", "is", "None", ")", ":", "etree", ".", "SubElement", "(", "parent", ",", "k", ")", "elif", "isinstance", "(", "v", ",", "six", ".", "string_types", ")", ":", "etree", ".", "SubElement", "(", "parent", ",", "k", ")", ".", "text", "=", "v", "elif", "(", "isinstance", "(", "v", ",", "dict", ")", "or", "isinstance", "(", "v", ",", "odict", ")", ")", ":", "child", "=", "etree", ".", "SubElement", "(", "parent", ",", "k", ")", "dict_to_etree", "(", "v", ",", "child", ")", "elif", "(", "not", "isinstance", "(", "v", ",", "collections", ".", "Sized", ")", ")", ":", "etree", ".", "SubElement", "(", "parent", ",", "k", ")", ".", "text", "=", "str", "(", "v", ")", "elif", "(", "len", "(", "v", ")", "==", "0", ")", ":", "etree", ".", "SubElement", "(", "parent", ",", "k", ")", "else", ":", "for", "e", "in", "v", ":", "child", "=", "etree", ".", "SubElement", "(", "parent", ",", "k", ")", "if", "(", "isinstance", "(", "e", ",", "dict", ")", "or", "isinstance", "(", "e", ",", "odict", ")", ")", ":", "dict_to_etree", "(", "e", ",", "child", ")", "else", ":", "child", ".", "text", "=", "str", "(", "e", ")" ]
takes a the dict whose value is either none or an instance of dict .
train
false
21,700
def getDistanceToLineByPaths(begin, end, paths): distanceToLine = (-987654321.0) for path in paths: distanceToLine = max(getDistanceToLineByPath(begin, end, path), distanceToLine) return distanceToLine
[ "def", "getDistanceToLineByPaths", "(", "begin", ",", "end", ",", "paths", ")", ":", "distanceToLine", "=", "(", "-", "987654321.0", ")", "for", "path", "in", "paths", ":", "distanceToLine", "=", "max", "(", "getDistanceToLineByPath", "(", "begin", ",", "end", ",", "path", ")", ",", "distanceToLine", ")", "return", "distanceToLine" ]
get the maximum distance from paths to an infinite line .
train
false
21,701
def GetChangesSample(): client = CreateClient() changes = client.GetChanges() for change in changes.entry: print change.title.text, change.changestamp.value
[ "def", "GetChangesSample", "(", ")", ":", "client", "=", "CreateClient", "(", ")", "changes", "=", "client", ".", "GetChanges", "(", ")", "for", "change", "in", "changes", ".", "entry", ":", "print", "change", ".", "title", ".", "text", ",", "change", ".", "changestamp", ".", "value" ]
get and display the changes for the user .
train
false
21,702
def decode_cert(b64der): try: return util.ComparableX509(OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der))) except OpenSSL.crypto.Error as error: raise errors.DeserializationError(error)
[ "def", "decode_cert", "(", "b64der", ")", ":", "try", ":", "return", "util", ".", "ComparableX509", "(", "OpenSSL", ".", "crypto", ".", "load_certificate", "(", "OpenSSL", ".", "crypto", ".", "FILETYPE_ASN1", ",", "decode_b64jose", "(", "b64der", ")", ")", ")", "except", "OpenSSL", ".", "crypto", ".", "Error", "as", "error", ":", "raise", "errors", ".", "DeserializationError", "(", "error", ")" ]
decode jose base-64 der-encoded certificate .
train
false
21,703
@docfiller def generic_laplace(input, derivative2, output=None, mode='reflect', cval=0.0, extra_arguments=(), extra_keywords=None): if (extra_keywords is None): extra_keywords = {} input = numpy.asarray(input) (output, return_value) = _ni_support._get_output(output, input) axes = list(range(input.ndim)) if (len(axes) > 0): modes = _ni_support._normalize_sequence(mode, len(axes)) derivative2(input, axes[0], output, modes[0], cval, *extra_arguments, **extra_keywords) for ii in range(1, len(axes)): tmp = derivative2(input, axes[ii], output.dtype, modes[ii], cval, *extra_arguments, **extra_keywords) output += tmp else: output[...] = input[...] return return_value
[ "@", "docfiller", "def", "generic_laplace", "(", "input", ",", "derivative2", ",", "output", "=", "None", ",", "mode", "=", "'reflect'", ",", "cval", "=", "0.0", ",", "extra_arguments", "=", "(", ")", ",", "extra_keywords", "=", "None", ")", ":", "if", "(", "extra_keywords", "is", "None", ")", ":", "extra_keywords", "=", "{", "}", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "(", "output", ",", "return_value", ")", "=", "_ni_support", ".", "_get_output", "(", "output", ",", "input", ")", "axes", "=", "list", "(", "range", "(", "input", ".", "ndim", ")", ")", "if", "(", "len", "(", "axes", ")", ">", "0", ")", ":", "modes", "=", "_ni_support", ".", "_normalize_sequence", "(", "mode", ",", "len", "(", "axes", ")", ")", "derivative2", "(", "input", ",", "axes", "[", "0", "]", ",", "output", ",", "modes", "[", "0", "]", ",", "cval", ",", "*", "extra_arguments", ",", "**", "extra_keywords", ")", "for", "ii", "in", "range", "(", "1", ",", "len", "(", "axes", ")", ")", ":", "tmp", "=", "derivative2", "(", "input", ",", "axes", "[", "ii", "]", ",", "output", ".", "dtype", ",", "modes", "[", "ii", "]", ",", "cval", ",", "*", "extra_arguments", ",", "**", "extra_keywords", ")", "output", "+=", "tmp", "else", ":", "output", "[", "...", "]", "=", "input", "[", "...", "]", "return", "return_value" ]
n-dimensional laplace filter using a provided second derivative function parameters %s derivative2 : callable callable with the following signature:: derivative2 see extra_arguments .
train
false
21,704
def attach_principal_policy(policyName, principal, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.attach_principal_policy(policyName=policyName, principal=principal) return {'attached': True} except ClientError as e: return {'attached': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "attach_principal_policy", "(", "policyName", ",", "principal", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "attach_principal_policy", "(", "policyName", "=", "policyName", ",", "principal", "=", "principal", ")", "return", "{", "'attached'", ":", "True", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'attached'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
attach the specified policy to the specified principal returns {attached: true} if the policy was attached {attached: false} if the policy was not attached .
train
false
21,707
def AES_new(key, IV=None): if (IV is None): IV = fast_urandom16() if HAVE_AES: return (AES.new(key, AES.MODE_CBC, IV), IV) else: return (PYAES.AESModeOfOperationCBC(key, iv=IV), IV)
[ "def", "AES_new", "(", "key", ",", "IV", "=", "None", ")", ":", "if", "(", "IV", "is", "None", ")", ":", "IV", "=", "fast_urandom16", "(", ")", "if", "HAVE_AES", ":", "return", "(", "AES", ".", "new", "(", "key", ",", "AES", ".", "MODE_CBC", ",", "IV", ")", ",", "IV", ")", "else", ":", "return", "(", "PYAES", ".", "AESModeOfOperationCBC", "(", "key", ",", "iv", "=", "IV", ")", ",", "IV", ")" ]
return an aes cipher object and random iv if none specified .
train
false
21,708
def console_get(context, console_id, instance_uuid=None): return IMPL.console_get(context, console_id, instance_uuid)
[ "def", "console_get", "(", "context", ",", "console_id", ",", "instance_uuid", "=", "None", ")", ":", "return", "IMPL", ".", "console_get", "(", "context", ",", "console_id", ",", "instance_uuid", ")" ]
get a specific console .
train
false
21,709
def _AppendTokenSubtype(node, subtype): pytree_utils.AppendNodeAnnotation(node, pytree_utils.Annotation.SUBTYPE, subtype)
[ "def", "_AppendTokenSubtype", "(", "node", ",", "subtype", ")", ":", "pytree_utils", ".", "AppendNodeAnnotation", "(", "node", ",", "pytree_utils", ".", "Annotation", ".", "SUBTYPE", ",", "subtype", ")" ]
append the tokens subtype only if its not already set .
train
false
21,710
def confirm_xxnet_exit(): is_xxnet_exit = False xlog.debug('start confirm_xxnet_exit') for i in range(30): if (http_request('http://127.0.0.1:8087/quit') == False): xlog.debug('good, xxnet:8087 cleared!') is_xxnet_exit = True break else: xlog.debug(('<%d>: try to terminate xxnet:8087' % i)) time.sleep(1) for i in range(30): host_port = config.get(['modules', 'launcher', 'control_port'], 8085) req_url = 'http://127.0.0.1:{port}/quit'.format(port=host_port) if (http_request(req_url) == False): xlog.debug(('good, xxnet:%s clear!' % host_port)) is_xxnet_exit = True break else: xlog.debug(('<%d>: try to terminate xxnet:%s' % (i, host_port))) time.sleep(1) xlog.debug('finished confirm_xxnet_exit') return is_xxnet_exit
[ "def", "confirm_xxnet_exit", "(", ")", ":", "is_xxnet_exit", "=", "False", "xlog", ".", "debug", "(", "'start confirm_xxnet_exit'", ")", "for", "i", "in", "range", "(", "30", ")", ":", "if", "(", "http_request", "(", "'http://127.0.0.1:8087/quit'", ")", "==", "False", ")", ":", "xlog", ".", "debug", "(", "'good, xxnet:8087 cleared!'", ")", "is_xxnet_exit", "=", "True", "break", "else", ":", "xlog", ".", "debug", "(", "(", "'<%d>: try to terminate xxnet:8087'", "%", "i", ")", ")", "time", ".", "sleep", "(", "1", ")", "for", "i", "in", "range", "(", "30", ")", ":", "host_port", "=", "config", ".", "get", "(", "[", "'modules'", ",", "'launcher'", ",", "'control_port'", "]", ",", "8085", ")", "req_url", "=", "'http://127.0.0.1:{port}/quit'", ".", "format", "(", "port", "=", "host_port", ")", "if", "(", "http_request", "(", "req_url", ")", "==", "False", ")", ":", "xlog", ".", "debug", "(", "(", "'good, xxnet:%s clear!'", "%", "host_port", ")", ")", "is_xxnet_exit", "=", "True", "break", "else", ":", "xlog", ".", "debug", "(", "(", "'<%d>: try to terminate xxnet:%s'", "%", "(", "i", ",", "host_port", ")", ")", ")", "time", ".", "sleep", "(", "1", ")", "xlog", ".", "debug", "(", "'finished confirm_xxnet_exit'", ")", "return", "is_xxnet_exit" ]
suppose xxnet is running .
train
false
21,713
def getComplexByPrefixBeginEnd(prefixBegin, prefixEnd, valueComplex, xmlElement): valueComplex = getComplexByPrefix(prefixBegin, valueComplex, xmlElement) if (prefixEnd in xmlElement.attributeDictionary): return (0.5 * getComplexByPrefix((valueComplex + valueComplex), prefixEnd, xmlElement)) else: return valueComplex
[ "def", "getComplexByPrefixBeginEnd", "(", "prefixBegin", ",", "prefixEnd", ",", "valueComplex", ",", "xmlElement", ")", ":", "valueComplex", "=", "getComplexByPrefix", "(", "prefixBegin", ",", "valueComplex", ",", "xmlElement", ")", "if", "(", "prefixEnd", "in", "xmlElement", ".", "attributeDictionary", ")", ":", "return", "(", "0.5", "*", "getComplexByPrefix", "(", "(", "valueComplex", "+", "valueComplex", ")", ",", "prefixEnd", ",", "xmlElement", ")", ")", "else", ":", "return", "valueComplex" ]
get complex from element node .
train
false
21,714
def post_save(sender, instance, created, **kwargs): if created: instance.at_first_save()
[ "def", "post_save", "(", "sender", ",", "instance", ",", "created", ",", "**", "kwargs", ")", ":", "if", "created", ":", "instance", ".", "at_first_save", "(", ")" ]
receives a signal just after the object is saved .
train
false
21,715
def runonce(exc_class=Exception): def runonce_meth(meth): @wraps(meth) def inner_runonce_meth(self, *args): if (not getattr(self, '_already_executed', False)): self._already_executed = True return meth(self, *args) raise exc_class() return inner_runonce_meth return runonce_meth
[ "def", "runonce", "(", "exc_class", "=", "Exception", ")", ":", "def", "runonce_meth", "(", "meth", ")", ":", "@", "wraps", "(", "meth", ")", "def", "inner_runonce_meth", "(", "self", ",", "*", "args", ")", ":", "if", "(", "not", "getattr", "(", "self", ",", "'_already_executed'", ",", "False", ")", ")", ":", "self", ".", "_already_executed", "=", "True", "return", "meth", "(", "self", ",", "*", "args", ")", "raise", "exc_class", "(", ")", "return", "inner_runonce_meth", "return", "runonce_meth" ]
function to decorate methods that should be called only once .
train
false
21,716
def check_vpc(vpc_id=None, vpc_name=None, region=None, key=None, keyid=None, profile=None): if (not _exactly_one((vpc_name, vpc_id))): raise SaltInvocationError('One (but not both) of vpc_id or vpc_name must be provided.') if vpc_name: vpc_id = _get_id(vpc_name=vpc_name, region=region, key=key, keyid=keyid, profile=profile) elif (not _find_vpcs(vpc_id=vpc_id, region=region, key=key, keyid=keyid, profile=profile)): log.info('VPC {0} does not exist.'.format(vpc_id)) return None return vpc_id
[ "def", "check_vpc", "(", "vpc_id", "=", "None", ",", "vpc_name", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "not", "_exactly_one", "(", "(", "vpc_name", ",", "vpc_id", ")", ")", ")", ":", "raise", "SaltInvocationError", "(", "'One (but not both) of vpc_id or vpc_name must be provided.'", ")", "if", "vpc_name", ":", "vpc_id", "=", "_get_id", "(", "vpc_name", "=", "vpc_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "elif", "(", "not", "_find_vpcs", "(", "vpc_id", "=", "vpc_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", ")", ":", "log", ".", "info", "(", "'VPC {0} does not exist.'", ".", "format", "(", "vpc_id", ")", ")", "return", "None", "return", "vpc_id" ]
check whether a vpc with the given name or id exists .
train
true
21,717
def get_str_from_wsgi(environ, key, default): value = get_bytes_from_wsgi(environ, key, default) return value.decode(UTF_8, errors='replace')
[ "def", "get_str_from_wsgi", "(", "environ", ",", "key", ",", "default", ")", ":", "value", "=", "get_bytes_from_wsgi", "(", "environ", ",", "key", ",", "default", ")", "return", "value", ".", "decode", "(", "UTF_8", ",", "errors", "=", "'replace'", ")" ]
get a value from the wsgi environ dictionary as str .
train
false
21,718
def check_cv(cv=3, y=None, classifier=False): if (cv is None): cv = 3 if isinstance(cv, numbers.Integral): if (classifier and (y is not None) and (type_of_target(y) in ('binary', 'multiclass'))): return StratifiedKFold(cv) else: return KFold(cv) if ((not hasattr(cv, 'split')) or isinstance(cv, str)): if ((not isinstance(cv, Iterable)) or isinstance(cv, str)): raise ValueError(('Expected cv as an integer, cross-validation object (from sklearn.model_selection) or an iterable. Got %s.' % cv)) return _CVIterableWrapper(cv) return cv
[ "def", "check_cv", "(", "cv", "=", "3", ",", "y", "=", "None", ",", "classifier", "=", "False", ")", ":", "if", "(", "cv", "is", "None", ")", ":", "cv", "=", "3", "if", "isinstance", "(", "cv", ",", "numbers", ".", "Integral", ")", ":", "if", "(", "classifier", "and", "(", "y", "is", "not", "None", ")", "and", "(", "type_of_target", "(", "y", ")", "in", "(", "'binary'", ",", "'multiclass'", ")", ")", ")", ":", "return", "StratifiedKFold", "(", "cv", ")", "else", ":", "return", "KFold", "(", "cv", ")", "if", "(", "(", "not", "hasattr", "(", "cv", ",", "'split'", ")", ")", "or", "isinstance", "(", "cv", ",", "str", ")", ")", ":", "if", "(", "(", "not", "isinstance", "(", "cv", ",", "Iterable", ")", ")", "or", "isinstance", "(", "cv", ",", "str", ")", ")", ":", "raise", "ValueError", "(", "(", "'Expected cv as an integer, cross-validation object (from sklearn.model_selection) or an iterable. Got %s.'", "%", "cv", ")", ")", "return", "_CVIterableWrapper", "(", "cv", ")", "return", "cv" ]
input checker utility for building a cv in a user friendly way .
train
false
21,719
@handle_response_format @treeio_login_required @module_admin_required() def pagefolder_view(request, folder_id, response_format='html'): folder = get_object_or_404(PageFolder, pk=folder_id) pages = Page.objects.filter(folder=folder).order_by('name') return render_to_response('core/administration/pagefolder_view', {'folder': folder, 'pages': pages}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "pagefolder_view", "(", "request", ",", "folder_id", ",", "response_format", "=", "'html'", ")", ":", "folder", "=", "get_object_or_404", "(", "PageFolder", ",", "pk", "=", "folder_id", ")", "pages", "=", "Page", ".", "objects", ".", "filter", "(", "folder", "=", "folder", ")", ".", "order_by", "(", "'name'", ")", "return", "render_to_response", "(", "'core/administration/pagefolder_view'", ",", "{", "'folder'", ":", "folder", ",", "'pages'", ":", "pages", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
folder for static pages view .
train
false
21,720
def _get_sph_harm(): if (LooseVersion(sp_version) < LooseVersion('0.17.1')): sph_harm = _sph_harm else: from scipy.special import sph_harm return sph_harm
[ "def", "_get_sph_harm", "(", ")", ":", "if", "(", "LooseVersion", "(", "sp_version", ")", "<", "LooseVersion", "(", "'0.17.1'", ")", ")", ":", "sph_harm", "=", "_sph_harm", "else", ":", "from", "scipy", ".", "special", "import", "sph_harm", "return", "sph_harm" ]
helper to get a usable spherical harmonic function .
train
false
21,721
def _encoded_cookies(cookies): cookie_encoder = SimpleCookie() for (k, v) in cookies.items(): cookie_encoder[k] = v return cookie_encoder.output(header='', sep=';').encode('ascii')
[ "def", "_encoded_cookies", "(", "cookies", ")", ":", "cookie_encoder", "=", "SimpleCookie", "(", ")", "for", "(", "k", ",", "v", ")", "in", "cookies", ".", "items", "(", ")", ":", "cookie_encoder", "[", "k", "]", "=", "v", "return", "cookie_encoder", ".", "output", "(", "header", "=", "''", ",", "sep", "=", "';'", ")", ".", "encode", "(", "'ascii'", ")" ]
encode dict of cookies to ascii string .
train
false
21,723
def _3d_to_2d(fig, xyz): from mayavi.core.scene import Scene if (not isinstance(fig, Scene)): raise TypeError(('fig must be an instance of Scene, found type %s' % type(fig))) xyz = np.column_stack([xyz, np.ones(xyz.shape[0])]) comb_trans_mat = _get_world_to_view_matrix(fig.scene) view_coords = np.dot(comb_trans_mat, xyz.T).T norm_view_coords = (view_coords / view_coords[:, 3].reshape((-1), 1)) view_to_disp_mat = _get_view_to_display_matrix(fig.scene) xy = np.dot(view_to_disp_mat, norm_view_coords.T).T xy = xy[:, :2] return xy
[ "def", "_3d_to_2d", "(", "fig", ",", "xyz", ")", ":", "from", "mayavi", ".", "core", ".", "scene", "import", "Scene", "if", "(", "not", "isinstance", "(", "fig", ",", "Scene", ")", ")", ":", "raise", "TypeError", "(", "(", "'fig must be an instance of Scene, found type %s'", "%", "type", "(", "fig", ")", ")", ")", "xyz", "=", "np", ".", "column_stack", "(", "[", "xyz", ",", "np", ".", "ones", "(", "xyz", ".", "shape", "[", "0", "]", ")", "]", ")", "comb_trans_mat", "=", "_get_world_to_view_matrix", "(", "fig", ".", "scene", ")", "view_coords", "=", "np", ".", "dot", "(", "comb_trans_mat", ",", "xyz", ".", "T", ")", ".", "T", "norm_view_coords", "=", "(", "view_coords", "/", "view_coords", "[", ":", ",", "3", "]", ".", "reshape", "(", "(", "-", "1", ")", ",", "1", ")", ")", "view_to_disp_mat", "=", "_get_view_to_display_matrix", "(", "fig", ".", "scene", ")", "xy", "=", "np", ".", "dot", "(", "view_to_disp_mat", ",", "norm_view_coords", ".", "T", ")", ".", "T", "xy", "=", "xy", "[", ":", ",", ":", "2", "]", "return", "xy" ]
convert 3d points to a 2d perspective using a mayavi scene .
train
false
21,724
def test_handled_unhandled_exception(exception_app): (request, response) = sanic_endpoint_test(exception_app, uri='/divide_by_zero') assert (response.status == 500) soup = BeautifulSoup(response.body, 'html.parser') assert (soup.h1.text == 'Internal Server Error') message = ' '.join(soup.p.text.split()) assert (message == 'The server encountered an internal error and cannot complete your request.')
[ "def", "test_handled_unhandled_exception", "(", "exception_app", ")", ":", "(", "request", ",", "response", ")", "=", "sanic_endpoint_test", "(", "exception_app", ",", "uri", "=", "'/divide_by_zero'", ")", "assert", "(", "response", ".", "status", "==", "500", ")", "soup", "=", "BeautifulSoup", "(", "response", ".", "body", ",", "'html.parser'", ")", "assert", "(", "soup", ".", "h1", ".", "text", "==", "'Internal Server Error'", ")", "message", "=", "' '", ".", "join", "(", "soup", ".", "p", ".", "text", ".", "split", "(", ")", ")", "assert", "(", "message", "==", "'The server encountered an internal error and cannot complete your request.'", ")" ]
test that an exception not built into sanic is handled .
train
false
21,725
@register.simple_tag(takes_context=True) def params_and_context(context, arg): return ('params_and_context - Expected result (context value: %s): %s' % (context['value'], arg))
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "params_and_context", "(", "context", ",", "arg", ")", ":", "return", "(", "'params_and_context - Expected result (context value: %s): %s'", "%", "(", "context", "[", "'value'", "]", ",", "arg", ")", ")" ]
expected params_and_context __doc__ .
train
false
21,726
def trivial(width, height): d = {('x', 0, i): i for i in range(width)} for j in range(1, height): d.update({('x', j, i): (noop, ('x', (j - 1), i)) for i in range(width)}) return (d, [('x', (height - 1), i) for i in range(width)])
[ "def", "trivial", "(", "width", ",", "height", ")", ":", "d", "=", "{", "(", "'x'", ",", "0", ",", "i", ")", ":", "i", "for", "i", "in", "range", "(", "width", ")", "}", "for", "j", "in", "range", "(", "1", ",", "height", ")", ":", "d", ".", "update", "(", "{", "(", "'x'", ",", "j", ",", "i", ")", ":", "(", "noop", ",", "(", "'x'", ",", "(", "j", "-", "1", ")", ",", "i", ")", ")", "for", "i", "in", "range", "(", "width", ")", "}", ")", "return", "(", "d", ",", "[", "(", "'x'", ",", "(", "height", "-", "1", ")", ",", "i", ")", "for", "i", "in", "range", "(", "width", ")", "]", ")" ]
a trivial test .
train
false
21,727
def fatal_error(message): sys.stderr.write(message) sys.exit(1)
[ "def", "fatal_error", "(", "message", ")", ":", "sys", ".", "stderr", ".", "write", "(", "message", ")", "sys", ".", "exit", "(", "1", ")" ]
print fatal error messages exit with an error code .
train
false
21,729
def sub64(a, b): return np.subtract(a, b, dtype=np.uint64)
[ "def", "sub64", "(", "a", ",", "b", ")", ":", "return", "np", ".", "subtract", "(", "a", ",", "b", ",", "dtype", "=", "np", ".", "uint64", ")" ]
return a 64-bit integer difference of a and b .
train
false