id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
26,094
@pytest.fixture() def schema_match(manager): def match(schema, response): validator = jsonschema.Draft4Validator(schema) errors = list(validator.iter_errors(response)) return [dict(value=list(e.path), message=e.message) for e in errors] return match
[ "@", "pytest", ".", "fixture", "(", ")", "def", "schema_match", "(", "manager", ")", ":", "def", "match", "(", "schema", ",", "response", ")", ":", "validator", "=", "jsonschema", ".", "Draft4Validator", "(", "schema", ")", "errors", "=", "list", "(", ...
this fixture enables verifying json schema .
train
false
26,095
def server_show(server_id, profile=None): conn = _auth(profile) return conn.server_show(server_id)
[ "def", "server_show", "(", "server_id", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "server_show", "(", "server_id", ")" ]
return detailed information for an active server cli example: .
train
true
26,097
def rsqrt(x): return (1.0 / sqrt(x))
[ "def", "rsqrt", "(", "x", ")", ":", "return", "(", "1.0", "/", "sqrt", "(", "x", ")", ")" ]
computes elementwise reciprocal of square root of input :math:x_i .
train
false
26,098
def footer_context_processor(request): return dict([('IS_REQUEST_IN_MICROSITE', is_request_in_themed_site())])
[ "def", "footer_context_processor", "(", "request", ")", ":", "return", "dict", "(", "[", "(", "'IS_REQUEST_IN_MICROSITE'", ",", "is_request_in_themed_site", "(", ")", ")", "]", ")" ]
checks the site name to determine whether to use the edx .
train
false
26,100
def unsqueeze_2x2(input_): if isinstance(input_, (float, int)): return input_ shape = input_.get_shape().as_list() batch_size = shape[0] height = shape[1] width = shape[2] channels = shape[3] if ((channels % 4) != 0): raise ValueError('Number of channels not divisible by 4.') res = tf.reshape(input_, [batch_size, height, width, (channels // 4), 2, 2]) res = tf.transpose(res, [0, 1, 4, 2, 5, 3]) res = tf.reshape(res, [batch_size, (2 * height), (2 * width), (channels // 4)]) return res
[ "def", "unsqueeze_2x2", "(", "input_", ")", ":", "if", "isinstance", "(", "input_", ",", "(", "float", ",", "int", ")", ")", ":", "return", "input_", "shape", "=", "input_", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "batch_size", "=", "sh...
unsqueezing operation: reshape to convert channels into space .
train
false
26,101
def is_text_serializer(serializer): return isinstance(serializer.dumps({}), text_type)
[ "def", "is_text_serializer", "(", "serializer", ")", ":", "return", "isinstance", "(", "serializer", ".", "dumps", "(", "{", "}", ")", ",", "text_type", ")" ]
checks wheather a serializer generates text or binary .
train
false
26,102
def _infer_dimension_(spectrum, n_samples, n_features): n_spectrum = len(spectrum) ll = np.empty(n_spectrum) for rank in range(n_spectrum): ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features) return ll.argmax()
[ "def", "_infer_dimension_", "(", "spectrum", ",", "n_samples", ",", "n_features", ")", ":", "n_spectrum", "=", "len", "(", "spectrum", ")", "ll", "=", "np", ".", "empty", "(", "n_spectrum", ")", "for", "rank", "in", "range", "(", "n_spectrum", ")", ":", ...
infers the dimension of a dataset of shape the dataset is described by its spectrum spectrum .
train
false
26,103
def _fft_out_chunks(a, n, axis): if (n is None): return a.chunks chunks = list(a.chunks) chunks[axis] = (n,) return chunks
[ "def", "_fft_out_chunks", "(", "a", ",", "n", ",", "axis", ")", ":", "if", "(", "n", "is", "None", ")", ":", "return", "a", ".", "chunks", "chunks", "=", "list", "(", "a", ".", "chunks", ")", "chunks", "[", "axis", "]", "=", "(", "n", ",", ")...
for computing the output chunks of fft and ifft .
train
false
26,104
def processEscapes(str): str = re.sub('\\\\\\\\', '\', str) str = re.sub('\\\\"', '"', str) str = re.sub("\\\\'", ''', str) str = re.sub('\\\\\\.', '.', str) str = re.sub('\\\\-', '-', str) str = re.sub('\\\\`', '`', str) return str
[ "def", "processEscapes", "(", "str", ")", ":", "str", "=", "re", ".", "sub", "(", "'\\\\\\\\\\\\\\\\'", ",", "'\'", ",", "str", ")", "str", "=", "re", ".", "sub", "(", "'\\\\\\\\\"'", ",", "'"'", ",", "str", ")", "str", "=", "re", ".", "sub...
parameter: string .
train
false
26,105
def organization_follower_count(context, data_dict): return group_follower_count(context, data_dict)
[ "def", "organization_follower_count", "(", "context", ",", "data_dict", ")", ":", "return", "group_follower_count", "(", "context", ",", "data_dict", ")" ]
return the number of followers of an organization .
train
false
26,106
def _read_int2(fid): return np.fromfile(fid, '>i2', 1)[0]
[ "def", "_read_int2", "(", "fid", ")", ":", "return", "np", ".", "fromfile", "(", "fid", ",", "'>i2'", ",", "1", ")", "[", "0", "]" ]
read int from short .
train
false
26,107
def host_os_is(osname): return (os.name == osname)
[ "def", "host_os_is", "(", "osname", ")", ":", "return", "(", "os", ".", "name", "==", "osname", ")" ]
check to see if the host os matches the query .
train
false
26,108
def fetch_dhcp_options_for_vpc(vpc_conn, vpc_id): vpcs = vpc_conn.get_all_vpcs(vpc_ids=[vpc_id]) if ((len(vpcs) != 1) or (vpcs[0].dhcp_options_id == 'default')): return None dhcp_options = vpc_conn.get_all_dhcp_options(dhcp_options_ids=[vpcs[0].dhcp_options_id]) if (len(dhcp_options) != 1): return None return dhcp_options[0]
[ "def", "fetch_dhcp_options_for_vpc", "(", "vpc_conn", ",", "vpc_id", ")", ":", "vpcs", "=", "vpc_conn", ".", "get_all_vpcs", "(", "vpc_ids", "=", "[", "vpc_id", "]", ")", "if", "(", "(", "len", "(", "vpcs", ")", "!=", "1", ")", "or", "(", "vpcs", "["...
returns the dhcp options object currently associated with the requested vpc id using the vpc connection variable .
train
false
26,109
def is_bumper_enabled(video): bumper_last_view_date = getattr(video, 'bumper_last_view_date', None) utc_now = datetime.utcnow().replace(tzinfo=pytz.utc) periodicity = settings.FEATURES.get('SHOW_BUMPER_PERIODICITY', 0) has_viewed = any([video.bumper_do_not_show_again, (bumper_last_view_date and ((bumper_last_view_date + timedelta(seconds=periodicity)) > utc_now))]) is_studio = getattr(video.system, 'is_author_mode', False) return bool(((not is_studio) and settings.FEATURES.get('ENABLE_VIDEO_BUMPER') and get_bumper_settings(video) and edxval_api and (not has_viewed)))
[ "def", "is_bumper_enabled", "(", "video", ")", ":", "bumper_last_view_date", "=", "getattr", "(", "video", ",", "'bumper_last_view_date'", ",", "None", ")", "utc_now", "=", "datetime", ".", "utcnow", "(", ")", ".", "replace", "(", "tzinfo", "=", "pytz", ".",...
check if bumper enabled .
train
false
26,110
def by_type(typename, objects=None): if (objects is None): objects = gc.get_objects() try: if ('.' in typename): return [o for o in objects if (_long_typename(o) == typename)] else: return [o for o in objects if (_short_typename(o) == typename)] finally: del objects
[ "def", "by_type", "(", "typename", ",", "objects", "=", "None", ")", ":", "if", "(", "objects", "is", "None", ")", ":", "objects", "=", "gc", ".", "get_objects", "(", ")", "try", ":", "if", "(", "'.'", "in", "typename", ")", ":", "return", "[", "...
return objects tracked by the garbage collector with a given class name .
train
false
26,111
def enter_star_ratings(context, val=3): for id_ in STAR_CONTAINER_IDS: rate_id(context, id_, val=val)
[ "def", "enter_star_ratings", "(", "context", ",", "val", "=", "3", ")", ":", "for", "id_", "in", "STAR_CONTAINER_IDS", ":", "rate_id", "(", "context", ",", "id_", ",", "val", "=", "val", ")" ]
enters a value for all three star rating forms .
train
false
26,112
@nottest def normalize_test_names(test_suite): for test in test_suite._tests: test.methodname = re.sub('0x(.*?)>', '0xfffffff>', test.methodname)
[ "@", "nottest", "def", "normalize_test_names", "(", "test_suite", ")", ":", "for", "test", "in", "test_suite", ".", "_tests", ":", "test", ".", "methodname", "=", "re", ".", "sub", "(", "'0x(.*?)>'", ",", "'0xfffffff>'", ",", "test", ".", "methodname", ")"...
tests which are generated on the fly have names like: foo .
train
false
26,113
def name_for_number(numobj, lang, script=None, region=None): ntype = number_type(numobj) if _is_mobile(ntype): return name_for_valid_number(numobj, lang, script, region) return U_EMPTY_STRING
[ "def", "name_for_number", "(", "numobj", ",", "lang", ",", "script", "=", "None", ",", "region", "=", "None", ")", ":", "ntype", "=", "number_type", "(", "numobj", ")", "if", "_is_mobile", "(", "ntype", ")", ":", "return", "name_for_valid_number", "(", "...
returns a carrier name for the given phonenumber object .
train
true
26,114
def serialize_positional_argument(argument_type, argument_value): if (argument_type in ['string', 'number', 'float']): argument_value = (str(argument_value) if argument_value else '') elif (argument_type == 'boolean'): if (argument_value is not None): argument_value = ('1' if bool(argument_value) else '0') else: argument_value = '' elif (argument_type == 'list'): argument_value = (','.join(argument_value) if argument_value else '') elif (argument_type == 'object'): argument_value = (json.dumps(argument_value) if argument_value else '') elif (argument_type is 'null'): argument_value = '' else: argument_value = (str(argument_value) if argument_value else '') return argument_value
[ "def", "serialize_positional_argument", "(", "argument_type", ",", "argument_value", ")", ":", "if", "(", "argument_type", "in", "[", "'string'", ",", "'number'", ",", "'float'", "]", ")", ":", "argument_value", "=", "(", "str", "(", "argument_value", ")", "if...
serialize the provided positional argument .
train
false
26,115
def time_labels(seconds, is_long=False): time_labels = [] for (count_per_unit, _, _) in TIME_UNITS: if (abs(seconds) >= count_per_unit): time_labels.append(_get_label(TIME_UNITS, seconds, 0, is_long)) seconds %= count_per_unit return time_labels
[ "def", "time_labels", "(", "seconds", ",", "is_long", "=", "False", ")", ":", "time_labels", "=", "[", "]", "for", "(", "count_per_unit", ",", "_", ",", "_", ")", "in", "TIME_UNITS", ":", "if", "(", "abs", "(", "seconds", ")", ">=", "count_per_unit", ...
provides a list of label conversions for each time unit .
train
false
26,116
def generate_nonce(): return random.randrange(1000000000, 2000000000)
[ "def", "generate_nonce", "(", ")", ":", "return", "random", ".", "randrange", "(", "1000000000", ",", "2000000000", ")" ]
generate pseudorandom nonce that is unlikely to repeat .
train
false
26,117
def test_roles_decorator_overrides_env_roles(): @roles('r1') def command(): pass eq_effective_roles(command, ['r1'], env={'roledefs': fake_roles, 'roles': ['r2']})
[ "def", "test_roles_decorator_overrides_env_roles", "(", ")", ":", "@", "roles", "(", "'r1'", ")", "def", "command", "(", ")", ":", "pass", "eq_effective_roles", "(", "command", ",", "[", "'r1'", "]", ",", "env", "=", "{", "'roledefs'", ":", "fake_roles", "...
if @roles is used it replaces any env .
train
false
26,118
def transform_joe(t, a1, a2, theta): def _check_args(a1, a2, theta): condth = (theta > 0) conda1 = ((a1 > 0) and (a1 <= 1)) conda2 = ((a2 > 0) and (a2 <= 1)) return (condth and conda1 and conda2) if (not np.all(_check_args(a1, a2, theta))): raise ValueError('invalid args') transf = (1 - ((((a1 * (1 - t)) ** ((-1.0) / theta)) + ((a2 * t) ** ((-1.0) / theta))) ** (- theta))) return transf
[ "def", "transform_joe", "(", "t", ",", "a1", ",", "a2", ",", "theta", ")", ":", "def", "_check_args", "(", "a1", ",", "a2", ",", "theta", ")", ":", "condth", "=", "(", "theta", ">", "0", ")", "conda1", "=", "(", "(", "a1", ">", "0", ")", "and...
asymmetric negative logistic model of joe 1990 special case: a1=a2=1 : symmetric negative logistic of galambos 1978 restrictions: - theta in - a1 .
train
false
26,119
def coordinate_from_string(coord_string): match = COORD_RE.match(coord_string.upper()) if (not match): msg = ('Invalid cell coordinates (%s)' % coord_string) raise CellCoordinatesException(msg) (column, row) = match.groups() return (column, int(row))
[ "def", "coordinate_from_string", "(", "coord_string", ")", ":", "match", "=", "COORD_RE", ".", "match", "(", "coord_string", ".", "upper", "(", ")", ")", "if", "(", "not", "match", ")", ":", "msg", "=", "(", "'Invalid cell coordinates (%s)'", "%", "coord_str...
convert a coordinate string like b12 to a tuple .
train
false
26,120
def _sanitize_query(q): auth_project = acl.get_limited_to_project(pecan.request.headers) if auth_project: proj_q = [i for i in q if (i.field == 'project_id')] for i in proj_q: if ((auth_project != i.value) or (i.op != 'eq')): errstr = ('Not Authorized to access project %s %s' % (i.op, i.value)) raise wsme.exc.ClientSideError(errstr) if (not proj_q): q.append(Query(field='project_id', op='eq', value=auth_project)) return q
[ "def", "_sanitize_query", "(", "q", ")", ":", "auth_project", "=", "acl", ".", "get_limited_to_project", "(", "pecan", ".", "request", ".", "headers", ")", "if", "auth_project", ":", "proj_q", "=", "[", "i", "for", "i", "in", "q", "if", "(", "i", ".", ...
check the query to see if: 1) the request is comming from admin - then allow full visibility 2) non-admin - make sure that the query includes the requesters project .
train
false
26,121
def repositories_by(username, type=None, sort=None, direction=None, number=(-1), etag=None): if login: return gh.repositories_by(username, type, sort, direction, number, etag) return iter([])
[ "def", "repositories_by", "(", "username", ",", "type", "=", "None", ",", "sort", "=", "None", ",", "direction", "=", "None", ",", "number", "=", "(", "-", "1", ")", ",", "etag", "=", "None", ")", ":", "if", "login", ":", "return", "gh", ".", "re...
list public repositories for the specified username .
train
false
26,122
def configure_models(instance, sender, **kwargs): if (DYNAMIC_DATASTORE not in settings.DATABASES): return base_file = instance.get_base_file()[0] if ((base_file is None) or (base_file.name != u'shp')): return filename = base_file.file.path mapping = file2pgtable(filename, instance.name) (model_description, __) = ModelDescription.objects.get_or_create(name=instance.name) generate_model(model_description, mapping, db_key=DYNAMIC_DATASTORE) TheModel = model_description.get_django_model() if (not has_datastore): lm = LayerMapping(TheModel, filename, mapping, encoding=instance.charset, using=DYNAMIC_DATASTORE, transform=None) lm.save() else: post_save_layer(instance, sender, **kwargs)
[ "def", "configure_models", "(", "instance", ",", "sender", ",", "**", "kwargs", ")", ":", "if", "(", "DYNAMIC_DATASTORE", "not", "in", "settings", ".", "DATABASES", ")", ":", "return", "base_file", "=", "instance", ".", "get_base_file", "(", ")", "[", "0",...
save to postgis if there is a datastore .
train
false
26,123
@register.filter def classes(field): return field.widget.attrs.get('class', None)
[ "@", "register", ".", "filter", "def", "classes", "(", "field", ")", ":", "return", "field", ".", "widget", ".", "attrs", ".", "get", "(", "'class'", ",", "None", ")" ]
returns css classes of a field .
train
false
26,124
def quarantine_session(request, locations): request.session['third_party_auth_quarantined_modules'] = locations
[ "def", "quarantine_session", "(", "request", ",", "locations", ")", ":", "request", ".", "session", "[", "'third_party_auth_quarantined_modules'", "]", "=", "locations" ]
set a session variable indicating that the session is restricted to being used in views contained in the modules listed by string in the locations argument .
train
false
26,125
def capwords(s, sep=None): return (sep or ' ').join((x.capitalize() for x in s.split(sep)))
[ "def", "capwords", "(", "s", ",", "sep", "=", "None", ")", ":", "return", "(", "sep", "or", "' '", ")", ".", "join", "(", "(", "x", ".", "capitalize", "(", ")", "for", "x", "in", "s", ".", "split", "(", "sep", ")", ")", ")" ]
capwords -> string split the argument into words using split .
train
true
26,127
def _get_clusters_spatial(s, neighbors): r = np.ones(s.shape, dtype=bool) clusters = list() next_ind = (0 if (s.size > 0) else None) while (next_ind is not None): t_inds = [next_ind] r[next_ind] = False icount = 1 while (icount <= len(t_inds)): ind = t_inds[(icount - 1)] buddies = np.where(r)[0] buddies = buddies[np.in1d(s[buddies], neighbors[s[ind]], assume_unique=True)] t_inds += buddies.tolist() r[buddies] = False icount += 1 next_ind = np.argmax(r) if (next_ind == 0): next_ind = None clusters.append(s[t_inds]) return clusters
[ "def", "_get_clusters_spatial", "(", "s", ",", "neighbors", ")", ":", "r", "=", "np", ".", "ones", "(", "s", ".", "shape", ",", "dtype", "=", "bool", ")", "clusters", "=", "list", "(", ")", "next_ind", "=", "(", "0", "if", "(", "s", ".", "size", ...
form spatial clusters using neighbor lists .
train
false
26,128
def fix_package_dirname(package_dirname): package_dirname = package_dirname.replace('-', '_') pre = '' if package_dirname[0].isdigit(): pre = 'p' return (pre + package_dirname)
[ "def", "fix_package_dirname", "(", "package_dirname", ")", ":", "package_dirname", "=", "package_dirname", ".", "replace", "(", "'-'", ",", "'_'", ")", "pre", "=", "''", "if", "package_dirname", "[", "0", "]", ".", "isdigit", "(", ")", ":", "pre", "=", "...
convert package_dirname to a valid package name string .
train
false
26,132
def count_params(x): return np.prod(x.shape.eval())
[ "def", "count_params", "(", "x", ")", ":", "return", "np", ".", "prod", "(", "x", ".", "shape", ".", "eval", "(", ")", ")" ]
returns the number of scalars in a keras variable .
train
false
26,133
def bases_mro(bases): for base in bases: for class_ in base.__mro__: (yield class_)
[ "def", "bases_mro", "(", "bases", ")", ":", "for", "base", "in", "bases", ":", "for", "class_", "in", "base", ".", "__mro__", ":", "(", "yield", "class_", ")" ]
yield classes in the order that methods should be looked up from the base classes of an object .
train
false
26,135
def _ensureOldClass(cls): if (not (type(cls) is types.ClassType)): from twisted.python.reflect import fullyQualifiedName raise ValueError('twisted.python._oldstyle._oldStyle is being used to decorate a new-style class ({cls}). This should only be used to decorate old-style classes.'.format(cls=fullyQualifiedName(cls))) return cls
[ "def", "_ensureOldClass", "(", "cls", ")", ":", "if", "(", "not", "(", "type", "(", "cls", ")", "is", "types", ".", "ClassType", ")", ")", ":", "from", "twisted", ".", "python", ".", "reflect", "import", "fullyQualifiedName", "raise", "ValueError", "(", ...
ensure that c{cls} is an old-style class .
train
false
26,137
def dist_from_args(ctx, args): return Distribution.get_distribution(ctx, name=args.dist_name, recipes=split_argument_list(args.requirements), extra_dist_dirs=split_argument_list(args.extra_dist_dirs), require_perfect_match=args.require_perfect_match)
[ "def", "dist_from_args", "(", "ctx", ",", "args", ")", ":", "return", "Distribution", ".", "get_distribution", "(", "ctx", ",", "name", "=", "args", ".", "dist_name", ",", "recipes", "=", "split_argument_list", "(", "args", ".", "requirements", ")", ",", "...
parses out any distribution-related arguments .
train
false
26,139
def get_state_link(request, option=None, val='', VALID_OPTIONS=('state', 'user', 'text', 'taskstate')): states = [] val = quote_plus(val) assert ((option is None) or (option in VALID_OPTIONS)) states = dict() for o in VALID_OPTIONS: if (o in request.GET): states[o] = request.GET[o] if (option is not None): states[option] = val return '&'.join([('%s=%s' % (key, quote_plus(value))) for (key, value) in states.iteritems()])
[ "def", "get_state_link", "(", "request", ",", "option", "=", "None", ",", "val", "=", "''", ",", "VALID_OPTIONS", "=", "(", "'state'", ",", "'user'", ",", "'text'", ",", "'taskstate'", ")", ")", ":", "states", "=", "[", "]", "val", "=", "quote_plus", ...
constructs the query string for the state of the current query for the jobs page .
train
false
26,140
def lower_dict(input_dict): return {k.lower(): v for (k, v) in input_dict.iteritems()}
[ "def", "lower_dict", "(", "input_dict", ")", ":", "return", "{", "k", ".", "lower", "(", ")", ":", "v", "for", "(", "k", ",", "v", ")", "in", "input_dict", ".", "iteritems", "(", ")", "}" ]
convert all keys in a dictionary to lowercase; keep their original values .
train
false
26,142
def compare_nrmse(im_true, im_test, norm_type='Euclidean'): _assert_compatible(im_true, im_test) (im_true, im_test) = _as_floats(im_true, im_test) norm_type = norm_type.lower() if (norm_type == 'euclidean'): denom = np.sqrt(np.mean((im_true * im_true), dtype=np.float64)) elif (norm_type == 'min-max'): denom = (im_true.max() - im_true.min()) elif (norm_type == 'mean'): denom = im_true.mean() else: raise ValueError('Unsupported norm_type') return (np.sqrt(compare_mse(im_true, im_test)) / denom)
[ "def", "compare_nrmse", "(", "im_true", ",", "im_test", ",", "norm_type", "=", "'Euclidean'", ")", ":", "_assert_compatible", "(", "im_true", ",", "im_test", ")", "(", "im_true", ",", "im_test", ")", "=", "_as_floats", "(", "im_true", ",", "im_test", ")", ...
compute the normalized root mean-squared error between two images .
train
false
26,143
def get_word_alignment(num, force_arch=64, _machine_word_size=MACHINE_WORD_SIZE): max_uint64 = 18446744073709551615L max_uint32 = 4294967295 max_uint16 = 65535 max_uint8 = 255 if ((force_arch == 64) and (_machine_word_size >= 64) and (num > max_uint32)): return (64, 8, max_uint64, 'Q') elif (num > max_uint16): return (32, 4, max_uint32, 'L') elif (num > max_uint8): return (16, 2, max_uint16, 'H') else: return (8, 1, max_uint8, 'B')
[ "def", "get_word_alignment", "(", "num", ",", "force_arch", "=", "64", ",", "_machine_word_size", "=", "MACHINE_WORD_SIZE", ")", ":", "max_uint64", "=", "18446744073709551615", "L", "max_uint32", "=", "4294967295", "max_uint16", "=", "65535", "max_uint8", "=", "25...
returns alignment details for the given number based on the platform python is running on .
train
false
26,145
@handle_response_format @treeio_login_required def equity_edit(request, equity_id, response_format='html'): equity = get_object_or_404(Equity, pk=equity_id) if request.POST: if ('cancel' not in request.POST): form = EquityForm(request.user.profile, request.POST, instance=equity) if form.is_valid(): equity = form.save() return HttpResponseRedirect(reverse('finance_equity_view', args=[equity.id])) else: return HttpResponseRedirect(reverse('finance_equity_view', args=[equity.id])) else: form = EquityForm(request.user.profile, instance=equity) return render_to_response('finance/equity_edit', {'form': form, 'equity': equity}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "equity_edit", "(", "request", ",", "equity_id", ",", "response_format", "=", "'html'", ")", ":", "equity", "=", "get_object_or_404", "(", "Equity", ",", "pk", "=", "equity_id", ")", "if", "requ...
equity edit page .
train
false
26,146
def copy_and_tag(variable, brick, call, role, application_name, name): copy = variable.copy() copy.name = _variable_name(brick.name, application_name, name) add_annotation(copy, brick) add_annotation(copy, call) copy.tag.name = name add_role(copy, role) return copy
[ "def", "copy_and_tag", "(", "variable", ",", "brick", ",", "call", ",", "role", ",", "application_name", ",", "name", ")", ":", "copy", "=", "variable", ".", "copy", "(", ")", "copy", ".", "name", "=", "_variable_name", "(", "brick", ".", "name", ",", ...
helper method to copy a variable and annotate it .
train
false
26,147
def is_discoverable(label): try: mod = import_module(label) except (ImportError, TypeError): pass else: return hasattr(mod, '__path__') return os.path.isdir(os.path.abspath(label))
[ "def", "is_discoverable", "(", "label", ")", ":", "try", ":", "mod", "=", "import_module", "(", "label", ")", "except", "(", "ImportError", ",", "TypeError", ")", ":", "pass", "else", ":", "return", "hasattr", "(", "mod", ",", "'__path__'", ")", "return"...
check if a test label points to a python package or file directory .
train
false
26,148
@receiver(post_save, sender=SiteConfiguration) def update_site_configuration_history(sender, instance, **kwargs): SiteConfigurationHistory.objects.create(site=instance.site, values=instance.values, enabled=instance.enabled)
[ "@", "receiver", "(", "post_save", ",", "sender", "=", "SiteConfiguration", ")", "def", "update_site_configuration_history", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "SiteConfigurationHistory", ".", "objects", ".", "create", "(", "site", "=...
add site configuration changes to site configuration history .
train
false
26,149
def cell_get(context, cell_name): return IMPL.cell_get(context, cell_name)
[ "def", "cell_get", "(", "context", ",", "cell_name", ")", ":", "return", "IMPL", ".", "cell_get", "(", "context", ",", "cell_name", ")" ]
get a specific child cell .
train
false
26,150
def get_plan_id(kwargs=None, call=None): if (call == 'action'): raise SaltCloudException('The show_instance action must be called with -f or --function.') if (kwargs is None): kwargs = {} label = kwargs.get('label', None) if (label is None): raise SaltCloudException("The get_plan_id function requires a 'label'.") return avail_sizes()[label]['PLANID']
[ "def", "get_plan_id", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudException", "(", "'The show_instance action must be called with -f or --function.'", ")", "if", "(", "kwargs", ...
returns the linode plan id .
train
false
26,151
def kegg_list(database, org=None): if (isinstance(database, str) and (database in ['pathway', 'module']) and org): resp = _q('list', database, org) elif (isinstance(database, str) and database and org): raise Exception('Invalid database arg for kegg list request.') else: if (isinstance(database, list) and (len(database) <= 100)): database = '+'.join(database) elif (isinstance(database, list) and (len(database) > 100)): raise Exception('Maximuim number of databases is 100 for kegg list query') resp = _q('list', database) return resp
[ "def", "kegg_list", "(", "database", ",", "org", "=", "None", ")", ":", "if", "(", "isinstance", "(", "database", ",", "str", ")", "and", "(", "database", "in", "[", "'pathway'", ",", "'module'", "]", ")", "and", "org", ")", ":", "resp", "=", "_q",...
kegg list - entry list for database .
train
false
26,152
def process_read_exception(exc, path): if (exc.errno == errno.ENOENT): raise CommandExecutionError('{0} does not exist'.format(path)) elif (exc.errno == errno.EACCES): raise CommandExecutionError('Permission denied reading from {0}'.format(path)) else: raise CommandExecutionError('Error {0} encountered reading from {1}: {2}'.format(exc.errno, path, exc.strerror))
[ "def", "process_read_exception", "(", "exc", ",", "path", ")", ":", "if", "(", "exc", ".", "errno", "==", "errno", ".", "ENOENT", ")", ":", "raise", "CommandExecutionError", "(", "'{0} does not exist'", ".", "format", "(", "path", ")", ")", "elif", "(", ...
common code for raising exceptions when reading a file fails .
train
false
26,154
def get_session_log(): return get_log_for_pid(static.PID)
[ "def", "get_session_log", "(", ")", ":", "return", "get_log_for_pid", "(", "static", ".", "PID", ")" ]
fetches the logs for the current active session from the debug log file .
train
false
26,156
def check_ip(addr): try: addr = addr.rsplit('/', 1) except AttributeError: return False if salt.utils.network.is_ipv4(addr[0]): try: if (1 <= int(addr[1]) <= 32): return True except ValueError: return False except IndexError: return True if salt.utils.network.is_ipv6(addr[0]): try: if (8 <= int(addr[1]) <= 128): return True except ValueError: return False except IndexError: return True return False
[ "def", "check_ip", "(", "addr", ")", ":", "try", ":", "addr", "=", "addr", ".", "rsplit", "(", "'/'", ",", "1", ")", "except", "AttributeError", ":", "return", "False", "if", "salt", ".", "utils", ".", "network", ".", "is_ipv4", "(", "addr", "[", "...
check if address is a valid ip .
train
true
26,157
def get_ip_address(name, ip_q): try: ip_addr = ip_q.get(timeout=1) while (ip_addr in USED_IPS): ip_addr = ip_q.get(timeout=1) else: USED_IPS.add(ip_addr) return str(ip_addr) except AttributeError: return None except Queue.Empty: raise SystemExit(('Cannot retrieve requested amount of IP addresses. Increase the %s range in your openstack_user_config.yml.' % name))
[ "def", "get_ip_address", "(", "name", ",", "ip_q", ")", ":", "try", ":", "ip_addr", "=", "ip_q", ".", "get", "(", "timeout", "=", "1", ")", "while", "(", "ip_addr", "in", "USED_IPS", ")", ":", "ip_addr", "=", "ip_q", ".", "get", "(", "timeout", "="...
retrieves the local ip address of a network interface .
train
false
26,159
def _bind_sockets(address, port): ss = netutil.bind_sockets(port=(port or 0), address=address) assert len(ss) ports = {s.getsockname()[1] for s in ss} assert (len(ports) == 1), 'Multiple ports assigned??' actual_port = ports.pop() if port: assert (actual_port == port) return (ss, actual_port)
[ "def", "_bind_sockets", "(", "address", ",", "port", ")", ":", "ss", "=", "netutil", ".", "bind_sockets", "(", "port", "=", "(", "port", "or", "0", ")", ",", "address", "=", "address", ")", "assert", "len", "(", "ss", ")", "ports", "=", "{", "s", ...
like tornado .
train
true
26,163
def _expand_address(addy): ret = {} ret.update(addy.__dict__) ret['extra']['zone'] = addy.region.name return ret
[ "def", "_expand_address", "(", "addy", ")", ":", "ret", "=", "{", "}", "ret", ".", "update", "(", "addy", ".", "__dict__", ")", "ret", "[", "'extra'", "]", "[", "'zone'", "]", "=", "addy", ".", "region", ".", "name", "return", "ret" ]
convert the libcloud gceaddress object into something more serializable .
train
true
26,164
def make_menu(prefix, file_path, remove=False): if (not on_win): return elif basename(prefix).startswith(u'_'): log.warn(u"Environment name starts with underscore '_'. Skipping menu installation.") return import menuinst try: menuinst.install(join(prefix, win_path_ok(file_path)), remove, prefix) except: stdoutlog.error(u'menuinst Exception:') stdoutlog.error(traceback.format_exc())
[ "def", "make_menu", "(", "prefix", ",", "file_path", ",", "remove", "=", "False", ")", ":", "if", "(", "not", "on_win", ")", ":", "return", "elif", "basename", "(", "prefix", ")", ".", "startswith", "(", "u'_'", ")", ":", "log", ".", "warn", "(", "...
create cross-platform menu items passes all menu config files %prefix%/menu/* .
train
false
26,165
def identity(x): return x
[ "def", "identity", "(", "x", ")", ":", "return", "x" ]
the identity activation function .
train
false
26,166
def separate_words(text, min_word_return_size): splitter = re.compile('[^a-zA-Z0-9_\\+\\-/]') words = [] for single_word in splitter.split(text): current_word = single_word.strip().lower() if ((len(current_word) > min_word_return_size) and (current_word != '') and (not is_number(current_word))): words.append(current_word) return words
[ "def", "separate_words", "(", "text", ",", "min_word_return_size", ")", ":", "splitter", "=", "re", ".", "compile", "(", "'[^a-zA-Z0-9_\\\\+\\\\-/]'", ")", "words", "=", "[", "]", "for", "single_word", "in", "splitter", ".", "split", "(", "text", ")", ":", ...
utility function to return a list of all words that are have a length greater than a specified number of characters .
train
true
26,168
def get_current_version_name(): return os.environ['CURRENT_VERSION_ID'].split('.')[0]
[ "def", "get_current_version_name", "(", ")", ":", "return", "os", ".", "environ", "[", "'CURRENT_VERSION_ID'", "]", ".", "split", "(", "'.'", ")", "[", "0", "]" ]
returns the version of the current instance .
train
false
26,172
def format_stack_db_object(stack): updated_time = heat_timeutils.isotime(stack.updated_at) created_time = heat_timeutils.isotime(stack.created_at) deleted_time = heat_timeutils.isotime(stack.deleted_at) tags = None if stack.tags: tags = [t.tag for t in stack.tags] info = {rpc_api.STACK_ID: dict(stack.identifier()), rpc_api.STACK_NAME: stack.name, rpc_api.STACK_DESCRIPTION: '', rpc_api.STACK_ACTION: stack.action, rpc_api.STACK_STATUS: stack.status, rpc_api.STACK_STATUS_DATA: stack.status_reason, rpc_api.STACK_CREATION_TIME: created_time, rpc_api.STACK_UPDATED_TIME: updated_time, rpc_api.STACK_DELETION_TIME: deleted_time, rpc_api.STACK_OWNER: stack.username, rpc_api.STACK_PARENT: stack.owner_id, rpc_api.STACK_USER_PROJECT_ID: stack.stack_user_project_id, rpc_api.STACK_TAGS: tags} return info
[ "def", "format_stack_db_object", "(", "stack", ")", ":", "updated_time", "=", "heat_timeutils", ".", "isotime", "(", "stack", ".", "updated_at", ")", "created_time", "=", "heat_timeutils", ".", "isotime", "(", "stack", ".", "created_at", ")", "deleted_time", "="...
return a summary representation of the given stack .
train
false
26,173
def is_platform_little_endian(): return (sys.byteorder == 'little')
[ "def", "is_platform_little_endian", "(", ")", ":", "return", "(", "sys", ".", "byteorder", "==", "'little'", ")" ]
am i little endian .
train
false
26,174
def disconnect_entry_signals(): post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_PING_DIRECTORIES) post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS) post_save.disconnect(sender=Entry, dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE) post_delete.disconnect(sender=Entry, dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE)
[ "def", "disconnect_entry_signals", "(", ")", ":", "post_save", ".", "disconnect", "(", "sender", "=", "Entry", ",", "dispatch_uid", "=", "ENTRY_PS_PING_DIRECTORIES", ")", "post_save", ".", "disconnect", "(", "sender", "=", "Entry", ",", "dispatch_uid", "=", "ENT...
disconnect all the signals on entry model .
train
true
26,175
def make_dssp_dict(filename): with open(filename, 'r') as handle: return _make_dssp_dict(handle)
[ "def", "make_dssp_dict", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "handle", ":", "return", "_make_dssp_dict", "(", "handle", ")" ]
dssp dictionary mapping identifers to properties .
train
false
26,176
def move_users(): group = Group.objects.get(name=u'Users') for user in User.objects.all(): user.groups.add(group)
[ "def", "move_users", "(", ")", ":", "group", "=", "Group", ".", "objects", ".", "get", "(", "name", "=", "u'Users'", ")", "for", "user", "in", "User", ".", "objects", ".", "all", "(", ")", ":", "user", ".", "groups", ".", "add", "(", "group", ")"...
moves users to default group .
train
false
26,177
def is_short_syllable(w, before=None): if (before != None): i = (((before < 0) and (len(w) + before)) or before) return is_short_syllable(w[max(0, (i - 3)):i]) if ((len(w) == 3) and is_consonant(w[0]) and is_vowel(w[1]) and is_consonant(w[2]) and (w[2] not in 'wxY')): return True if ((len(w) == 2) and is_vowel(w[0]) and is_consonant(w[1])): return True return False
[ "def", "is_short_syllable", "(", "w", ",", "before", "=", "None", ")", ":", "if", "(", "before", "!=", "None", ")", ":", "i", "=", "(", "(", "(", "before", "<", "0", ")", "and", "(", "len", "(", "w", ")", "+", "before", ")", ")", "or", "befor...
a short syllable in a word is either: - a vowel followed by a non-vowel other than w .
train
false
26,179
def _get_suggested_filename(path): filename = os.path.basename(path) filename = re.sub('\\([0-9]+\\)$', '', filename) filename = urllib.parse.unquote(filename) filename = os.path.basename(filename) return filename
[ "def", "_get_suggested_filename", "(", "path", ")", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "filename", "=", "re", ".", "sub", "(", "'\\\\([0-9]+\\\\)$'", ",", "''", ",", "filename", ")", "filename", "=", "urllib", "."...
convert a path we got from chromium to a suggested filename .
train
false
26,180
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
26,181
def _fontFamilyValidator(families): match = _fontRegexReplacements['__FONT_FAMILY_SINGLE'] for f in families.split(u','): if (not match(f.strip())): return False return True
[ "def", "_fontFamilyValidator", "(", "families", ")", ":", "match", "=", "_fontRegexReplacements", "[", "'__FONT_FAMILY_SINGLE'", "]", "for", "f", "in", "families", ".", "split", "(", "u','", ")", ":", "if", "(", "not", "match", "(", "f", ".", "strip", "(",...
check if font-family value is valid .
train
false
26,183
def _root_linearmixing_doc(): pass
[ "def", "_root_linearmixing_doc", "(", ")", ":", "pass" ]
options nit : int .
train
false
26,184
def _create_extn_pattern(single_extn_symbols): return (((((((((((((_RFC3966_EXTN_PREFIX + _CAPTURING_EXTN_DIGITS) + u('|')) + u('[ \\u00A0\\t,]*(?:e?xt(?:ensi(?:o\\u0301?|\\u00F3))?n?|')) + u('\\uFF45?\\uFF58\\uFF54\\uFF4E?|')) + u('[')) + single_extn_symbols) + u(']|int|anexo|\\uFF49\\uFF4E\\uFF54)')) + u('[:\\.\\uFF0E]?[ \\u00A0\\t,-]*')) + _CAPTURING_EXTN_DIGITS) + u('#?|')) + u('[- ]+(')) + _DIGITS) + u('{1,5})#'))
[ "def", "_create_extn_pattern", "(", "single_extn_symbols", ")", ":", "return", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "(", "_RFC3966_EXTN_PREFIX", "+", "_CAPTURING_EXTN_DIGITS", ")", "+", "u", "(", "'|'", ")", ")", "+", "u", "(", ...
helper initialiser method to create the regular-expression pattern to match extensions .
train
true
26,186
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
26,187
def case_event(): def prep(r): if (not r.component): list_fields = ['date', (T('ID'), 'person_id$pe_label'), 'person_id', 'type_id', (T('Registered by'), 'created_by'), 'comments'] r.resource.configure(list_fields=list_fields) return True s3.prep = prep return s3_rest_controller()
[ "def", "case_event", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "(", "not", "r", ".", "component", ")", ":", "list_fields", "=", "[", "'date'", ",", "(", "T", "(", "'ID'", ")", ",", "'person_id$pe_label'", ")", ",", "'person_id'", ",",...
case event types: restful crud controller .
train
false
26,188
def byte(num): return pack('B', num)
[ "def", "byte", "(", "num", ")", ":", "return", "pack", "(", "'B'", ",", "num", ")" ]
converts a number between 0 and 255 to a base-256 representation .
train
false
26,189
def encode_block_crawl(payload): return pack(crawl_response_format, *(payload.up, payload.down, payload.total_up_requester, payload.total_down_requester, payload.sequence_number_requester, payload.previous_hash_requester, payload.total_up_responder, payload.total_down_responder, payload.sequence_number_responder, payload.previous_hash_responder, payload.public_key_requester, payload.signature_requester, payload.public_key_responder, payload.signature_responder))
[ "def", "encode_block_crawl", "(", "payload", ")", ":", "return", "pack", "(", "crawl_response_format", ",", "*", "(", "payload", ".", "up", ",", "payload", ".", "down", ",", "payload", ".", "total_up_requester", ",", "payload", ".", "total_down_requester", ","...
this function encodes a block for the crawler .
train
false
26,190
def uncheck(value, *checkboxes): if value: for checkbox in checkboxes: checkbox.setChecked(False)
[ "def", "uncheck", "(", "value", ",", "*", "checkboxes", ")", ":", "if", "value", ":", "for", "checkbox", "in", "checkboxes", ":", "checkbox", ".", "setChecked", "(", "False", ")" ]
uncheck the specified checkboxes if value is true .
train
false
26,191
def test_finalizer(): global val, called val = None called = False class X(object, ): def __new__(cls): global val if (val == None): val = object.__new__(cls) return val def __del__(self): called = True a = X() b = X() AreEqual(id(a), id(b)) import gc gc.collect() AreEqual(called, False)
[ "def", "test_finalizer", "(", ")", ":", "global", "val", ",", "called", "val", "=", "None", "called", "=", "False", "class", "X", "(", "object", ",", ")", ":", "def", "__new__", "(", "cls", ")", ":", "global", "val", "if", "(", "val", "==", "None",...
returning the same object from __new__ shouldnt cause it to be finalized .
train
false
26,194
@verbose def _make_ecg(inst, start, stop, verbose=None): if (not any(((c in inst) for c in ['mag', 'grad']))): raise ValueError('Unable to generate artificial ECG channel') for ch in ['mag', 'grad']: if (ch in inst): break logger.info('Reconstructing ECG signal from {0}'.format({'mag': 'Magnetometers', 'grad': 'Gradiometers'}[ch])) picks = pick_types(inst.info, meg=ch, eeg=False, ref_meg=False) if isinstance(inst, BaseRaw): (ecg, times) = inst[picks, start:stop] elif isinstance(inst, BaseEpochs): ecg = np.hstack(inst.copy().crop(start, stop).get_data()) times = inst.times elif isinstance(inst, Evoked): ecg = inst.data times = inst.times return (ecg.mean(0), times)
[ "@", "verbose", "def", "_make_ecg", "(", "inst", ",", "start", ",", "stop", ",", "verbose", "=", "None", ")", ":", "if", "(", "not", "any", "(", "(", "(", "c", "in", "inst", ")", "for", "c", "in", "[", "'mag'", ",", "'grad'", "]", ")", ")", "...
create ecg signal from cross channel average .
train
false
26,195
def stSpectralCentroidAndSpread(X, fs): ind = (numpy.arange(1, (len(X) + 1)) * (fs / (2.0 * len(X)))) Xt = X.copy() Xt = (Xt / Xt.max()) NUM = numpy.sum((ind * Xt)) DEN = (numpy.sum(Xt) + eps) C = (NUM / DEN) S = numpy.sqrt((numpy.sum((((ind - C) ** 2) * Xt)) / DEN)) C = (C / (fs / 2.0)) S = (S / (fs / 2.0)) return (C, S)
[ "def", "stSpectralCentroidAndSpread", "(", "X", ",", "fs", ")", ":", "ind", "=", "(", "numpy", ".", "arange", "(", "1", ",", "(", "len", "(", "X", ")", "+", "1", ")", ")", "*", "(", "fs", "/", "(", "2.0", "*", "len", "(", "X", ")", ")", ")"...
computes spectral centroid of frame (given abs) .
train
true
26,196
def maybe_download(filename, work_directory): if (not os.path.exists(work_directory)): os.mkdir(work_directory) filepath = os.path.join(work_directory, filename) if (not os.path.exists(filepath)): (filepath, _) = urllib.urlretrieve((SOURCE_URL + filename), filepath) statinfo = os.stat(filepath) print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.') return filepath
[ "def", "maybe_download", "(", "filename", ",", "work_directory", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "work_directory", ")", ")", ":", "os", ".", "mkdir", "(", "work_directory", ")", "filepath", "=", "os", ".", "path", "."...
download a set of files in temporary local folder .
train
false
26,197
def parseSqliteTableSchema(value): if value: table = {} columns = {} for match in re.finditer('(\\w+)\\s+(INT|INTEGER|TINYINT|SMALLINT|MEDIUMINT|BIGINT|UNSIGNED BIG INT|INT2|INT8|INTEGER|CHARACTER|VARCHAR|VARYING CHARACTER|NCHAR|NATIVE CHARACTER|NVARCHAR|TEXT|CLOB|TEXT|BLOB|NONE|REAL|DOUBLE|DOUBLE PRECISION|FLOAT|REAL|NUMERIC|DECIMAL|BOOLEAN|DATE|DATETIME|NUMERIC)\\b', value, re.I): columns[match.group(1)] = match.group(2) table[conf.tbl] = columns kb.data.cachedColumns[conf.db] = table
[ "def", "parseSqliteTableSchema", "(", "value", ")", ":", "if", "value", ":", "table", "=", "{", "}", "columns", "=", "{", "}", "for", "match", "in", "re", ".", "finditer", "(", "'(\\\\w+)\\\\s+(INT|INTEGER|TINYINT|SMALLINT|MEDIUMINT|BIGINT|UNSIGNED BIG INT|INT2|INT8|I...
parses table column names and types from specified sqlite table schema .
train
false
26,198
def setElementNodeDictionaryMatrix(elementNode, matrix4X4): if (elementNode.xmlObject == None): elementNode.attributes.update(matrix4X4.getAttributes('matrix.')) else: elementNode.xmlObject.matrix4X4 = matrix4X4
[ "def", "setElementNodeDictionaryMatrix", "(", "elementNode", ",", "matrix4X4", ")", ":", "if", "(", "elementNode", ".", "xmlObject", "==", "None", ")", ":", "elementNode", ".", "attributes", ".", "update", "(", "matrix4X4", ".", "getAttributes", "(", "'matrix.'"...
set the element attribute dictionary or element matrix to the matrix .
train
false
26,199
def label_for_filter(model, field_name, lookup_expr, exclude=False): name = verbose_field_name(model, field_name) verbose_expression = ([_('exclude'), name] if exclude else [name]) if isinstance(lookup_expr, six.string_types): verbose_expression += [verbose_lookup_expr(lookup_expr)] verbose_expression = [force_text(part) for part in verbose_expression if part] verbose_expression = pretty_name(' '.join(verbose_expression)) return verbose_expression
[ "def", "label_for_filter", "(", "model", ",", "field_name", ",", "lookup_expr", ",", "exclude", "=", "False", ")", ":", "name", "=", "verbose_field_name", "(", "model", ",", "field_name", ")", "verbose_expression", "=", "(", "[", "_", "(", "'exclude'", ")", ...
create a generic label suitable for a filter .
train
false
26,200
def _indent_genbank(information, indent): info_parts = information.split('\n') output_info = (info_parts[0] + '\n') for info_part in info_parts[1:]: output_info += (((' ' * indent) + info_part) + '\n') return output_info
[ "def", "_indent_genbank", "(", "information", ",", "indent", ")", ":", "info_parts", "=", "information", ".", "split", "(", "'\\n'", ")", "output_info", "=", "(", "info_parts", "[", "0", "]", "+", "'\\n'", ")", "for", "info_part", "in", "info_parts", "[", ...
write out information with the specified indent .
train
false
26,201
@csrf_exempt def csw_global_dispatch(request): if (settings.CATALOGUE['default']['ENGINE'] != 'geonode.catalogue.backends.pycsw_local'): return HttpResponseRedirect(settings.CATALOGUE['default']['URL']) mdict = dict(settings.PYCSW['CONFIGURATION'], **CONFIGURATION) env = request.META.copy() env.update({'local.app_root': os.path.dirname(__file__), 'REQUEST_URI': request.build_absolute_uri()}) csw = server.Csw(mdict, env, version='2.0.2') content = csw.dispatch_wsgi() if isinstance(content, list): content = content[1] return HttpResponse(content, content_type=csw.contenttype)
[ "@", "csrf_exempt", "def", "csw_global_dispatch", "(", "request", ")", ":", "if", "(", "settings", ".", "CATALOGUE", "[", "'default'", "]", "[", "'ENGINE'", "]", "!=", "'geonode.catalogue.backends.pycsw_local'", ")", ":", "return", "HttpResponseRedirect", "(", "se...
pycsw wrapper .
train
false
26,202
def mock_literal(s): if isinstance(s, unicode): u = unicode(s) else: u = unicode(s, encoding='ascii') return u.upper()
[ "def", "mock_literal", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "u", "=", "unicode", "(", "s", ")", "else", ":", "u", "=", "unicode", "(", "s", ",", "encoding", "=", "'ascii'", ")", "return", "u", ".", "upper", ...
for use as the literal keyword argument to the renderengine constructor .
train
false
26,204
def normalize_timedeltas(context, builder, left, right, leftty, rightty): factor = npdatetime.get_timedelta_conversion_factor(leftty.unit, rightty.unit) if (factor is not None): return (scale_by_constant(builder, left, factor), right) factor = npdatetime.get_timedelta_conversion_factor(rightty.unit, leftty.unit) if (factor is not None): return (left, scale_by_constant(builder, right, factor)) raise RuntimeError(('cannot normalize %r and %r' % (leftty, rightty)))
[ "def", "normalize_timedeltas", "(", "context", ",", "builder", ",", "left", ",", "right", ",", "leftty", ",", "rightty", ")", ":", "factor", "=", "npdatetime", ".", "get_timedelta_conversion_factor", "(", "leftty", ".", "unit", ",", "rightty", ".", "unit", "...
scale either *left* or *right* to the others unit .
train
false
26,205
def elem_is_visible_with_wait(context, elem, wait_time=MAX_WAIT_TIME): def _visiblity_of(): context.browser.execute_script(('$(window).scrollLeft(%s);$(window).scrollTop(%s);' % (elem.location['x'], elem.location['y']))) return elem.is_displayed() try: WebDriverWait(context.browser, wait_time).until((lambda browser: _visiblity_of())) return True except (TimeoutException, StaleElementReferenceException): return False
[ "def", "elem_is_visible_with_wait", "(", "context", ",", "elem", ",", "wait_time", "=", "MAX_WAIT_TIME", ")", ":", "def", "_visiblity_of", "(", ")", ":", "context", ".", "browser", ".", "execute_script", "(", "(", "'$(window).scrollLeft(%s);$(window).scrollTop(%s);'",...
waits for the element to become visible .
train
false
26,206
def get_distributions_for_columns(data, columns): domain = data.domain columns = [(col if isinstance(col, int) else domain.index(col)) for col in columns] try: dist_unks = data._compute_distributions(columns) except NotImplementedError: return [get_distribution(data, i) for i in columns] else: return [get_distribution(dist, domain[col], unknown) for (col, (dist, unknown)) in zip(columns, dist_unks)]
[ "def", "get_distributions_for_columns", "(", "data", ",", "columns", ")", ":", "domain", "=", "data", ".", "domain", "columns", "=", "[", "(", "col", "if", "isinstance", "(", "col", ",", "int", ")", "else", "domain", ".", "index", "(", "col", ")", ")",...
compute the distributions for columns .
train
false
26,207
def get_turns_since(state, maximum=8): planes = np.zeros((maximum, state.size, state.size)) for x in range(state.size): for y in range(state.size): if (state.stone_ages[x][y] >= 0): planes[(min(state.stone_ages[x][y], (maximum - 1)), x, y)] = 1 return planes
[ "def", "get_turns_since", "(", "state", ",", "maximum", "=", "8", ")", ":", "planes", "=", "np", ".", "zeros", "(", "(", "maximum", ",", "state", ".", "size", ",", "state", ".", "size", ")", ")", "for", "x", "in", "range", "(", "state", ".", "siz...
a feature encoding the age of the stone at each location up to maximum note: - the [maximum-1] plane is used for any stone with age greater than or equal to maximum - empty locations are all-zero features .
train
false
26,208
def connect_to_images(region=None, public=True): return _create_client(ep_name='image', region=region, public=public)
[ "def", "connect_to_images", "(", "region", "=", "None", ",", "public", "=", "True", ")", ":", "return", "_create_client", "(", "ep_name", "=", "'image'", ",", "region", "=", "region", ",", "public", "=", "public", ")" ]
creates a client for working with images .
train
true
26,209
def _process(G, name, value): if isinstance(value, str): value = to_unicode(value) complex_value_str = None if (isinstance(value, list) or isinstance(value, dict)): complex_value_str = str(value) is_jinja_expr = (jinja_utils.is_jinja_expression(value) or jinja_utils.is_jinja_expression(complex_value_str)) if is_jinja_expr: G.add_node(name, template=value) template_ast = ENV.parse(value) LOG.debug('Template ast: %s', template_ast) dependencies = meta.find_undeclared_variables(template_ast) LOG.debug('Dependencies: %s', dependencies) if dependencies: for dependency in dependencies: G.add_edge(dependency, name) else: G.add_node(name, value=value)
[ "def", "_process", "(", "G", ",", "name", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "str", ")", ":", "value", "=", "to_unicode", "(", "value", ")", "complex_value_str", "=", "None", "if", "(", "isinstance", "(", "value", ",", "li...
determines whether parameter is a template or a value .
train
false
26,210
def srpc(*params, **kparams): kparams['_no_ctx'] = True return rpc(*params, **kparams)
[ "def", "srpc", "(", "*", "params", ",", "**", "kparams", ")", ":", "kparams", "[", "'_no_ctx'", "]", "=", "True", "return", "rpc", "(", "*", "params", ",", "**", "kparams", ")" ]
method decorator to tag a method as a remote procedure call .
train
false
26,213
def register_jitable(*args, **kwargs): def wrap(fn): @overload(fn, jit_options=kwargs) def ov_wrap(*args, **kwargs): return fn return fn if kwargs: return wrap else: return wrap(*args)
[ "def", "register_jitable", "(", "*", "args", ",", "**", "kwargs", ")", ":", "def", "wrap", "(", "fn", ")", ":", "@", "overload", "(", "fn", ",", "jit_options", "=", "kwargs", ")", "def", "ov_wrap", "(", "*", "args", ",", "**", "kwargs", ")", ":", ...
register a regular python function that can be executed by the python interpreter and can be compiled into a nopython function when referenced by other jited functions .
train
false
26,215
def autofill_unprovided_options(): options.package_name = get_formatted_string(u'package name', options.package_name, options.extension_name, LowerCaseWithUnderscores()) options.class_name = get_formatted_string(u'class name', options.class_name, options.extension_name, CamelCase()) if (options.description is None): options.description = (u'Extension %s' % options.extension_name)
[ "def", "autofill_unprovided_options", "(", ")", ":", "options", ".", "package_name", "=", "get_formatted_string", "(", "u'package name'", ",", "options", ".", "package_name", ",", "options", ".", "extension_name", ",", "LowerCaseWithUnderscores", "(", ")", ")", "opt...
this will autofill all the empty necessary options that can be auto- generated from the necessary fields .
train
false
26,216
def vn_info(call=None, kwargs=None): if (call != 'function'): raise SaltCloudSystemExit('The vn_info function must be called with -f or --function.') if (kwargs is None): kwargs = {} name = kwargs.get('name', None) vn_id = kwargs.get('vn_id', None) if vn_id: if name: log.warning("Both the 'vn_id' and 'name' arguments were provided. 'vn_id' will take precedence.") elif name: vn_id = get_vn_id(kwargs={'name': name}) else: raise SaltCloudSystemExit("The vn_info function requires either a 'name' or a 'vn_id' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) response = server.one.vn.info(auth, int(vn_id)) if (response[0] is False): return response[1] else: info = {} tree = _get_xml(response[1]) info[tree.find('NAME').text] = _xml_to_dict(tree) return info
[ "def", "vn_info", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_info function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is"...
retrieves information for the virtual network .
train
true
26,218
def check_digest_auth(user, passwd): if request.headers.get('Authorization'): credentails = parse_authorization_header(request.headers.get('Authorization')) if (not credentails): return response_hash = response(credentails, passwd, dict(uri=(request.script_root + request.path), body=request.data, method=request.method)) if (credentails.get('response') == response_hash): return True return False
[ "def", "check_digest_auth", "(", "user", ",", "passwd", ")", ":", "if", "request", ".", "headers", ".", "get", "(", "'Authorization'", ")", ":", "credentails", "=", "parse_authorization_header", "(", "request", ".", "headers", ".", "get", "(", "'Authorization'...
check user authentication using http digest auth .
train
false
26,221
def fixed_points(G): return [n for n in G if (G.out_degree(n) == 0)]
[ "def", "fixed_points", "(", "G", ")", ":", "return", "[", "n", "for", "n", "in", "G", "if", "(", "G", ".", "out_degree", "(", "n", ")", "==", "0", ")", "]" ]
return a list of fixed points for the discrete dynamical system represented by the digraph g .
train
false
26,222
def apns_fetch_inactive_ids(certfile=None): with closing(_apns_create_socket_to_feedback(certfile)) as socket: inactive_ids = [] for (ts, registration_id) in _apns_receive_feedback(socket): inactive_ids.append(codecs.encode(registration_id, 'hex_codec')) return inactive_ids
[ "def", "apns_fetch_inactive_ids", "(", "certfile", "=", "None", ")", ":", "with", "closing", "(", "_apns_create_socket_to_feedback", "(", "certfile", ")", ")", "as", "socket", ":", "inactive_ids", "=", "[", "]", "for", "(", "ts", ",", "registration_id", ")", ...
queries the apns server for ids that are no longer active since the last fetch .
train
false
26,223
def test_topic_save(forum, user): post = Post(content='Test Content') topic = Topic(title='Test Title') assert (forum.last_post_id is None) assert (forum.post_count == 0) assert (forum.topic_count == 0) topic.save(forum=forum, post=post, user=user) assert (topic.title == 'Test Title') topic.title = 'Test Edit Title' topic.save() assert (topic.title == 'Test Edit Title') assert (topic.first_post_id == post.id) assert (topic.last_post_id == post.id) assert (forum.last_post_id == post.id) assert (forum.post_count == 1) assert (forum.topic_count == 1)
[ "def", "test_topic_save", "(", "forum", ",", "user", ")", ":", "post", "=", "Post", "(", "content", "=", "'Test Content'", ")", "topic", "=", "Topic", "(", "title", "=", "'Test Title'", ")", "assert", "(", "forum", ".", "last_post_id", "is", "None", ")",...
test the save topic method with creating and editing a topic .
train
false
26,224
def _guess_expansion(f, x): from sympy import expand_trig from sympy.functions.elementary.trigonometric import TrigonometricFunction res = [(f, 'original integrand')] orig = res[(-1)][0] saw = {orig} expanded = expand_mul(orig) if (expanded not in saw): res += [(expanded, 'expand_mul')] saw.add(expanded) expanded = expand(orig) if (expanded not in saw): res += [(expanded, 'expand')] saw.add(expanded) if orig.has(TrigonometricFunction, HyperbolicFunction): expanded = expand_mul(expand_trig(orig)) if (expanded not in saw): res += [(expanded, 'expand_trig, expand_mul')] saw.add(expanded) return res
[ "def", "_guess_expansion", "(", "f", ",", "x", ")", ":", "from", "sympy", "import", "expand_trig", "from", "sympy", ".", "functions", ".", "elementary", ".", "trigonometric", "import", "TrigonometricFunction", "res", "=", "[", "(", "f", ",", "'original integra...
try to guess sensible rewritings for integrand f(x) .
train
false
26,226
def _InitApiApprovalFromAff4Object(api_approval, approval_obj): api_approval.id = approval_obj.urn.Basename() api_approval.reason = approval_obj.Get(approval_obj.Schema.REASON) test_token = access_control.ACLToken(username=approval_obj.Get(approval_obj.Schema.REQUESTOR)) try: approval_obj.CheckAccess(test_token) api_approval.is_valid = True except access_control.UnauthorizedAccess as e: api_approval.is_valid = False api_approval.is_valid_message = utils.SmartStr(e) notified_users = approval_obj.Get(approval_obj.Schema.NOTIFIED_USERS) if notified_users: api_approval.notified_users = sorted((u.strip() for u in notified_users.split(','))) email_cc = approval_obj.Get(approval_obj.Schema.EMAIL_CC) email_cc_addresses = sorted((s.strip() for s in email_cc.split(','))) api_approval.email_cc_addresses = (set(email_cc_addresses) - set(api_approval.notified_users)) api_approval.approvers = sorted(approval_obj.GetNonExpiredApprovers()) return api_approval
[ "def", "_InitApiApprovalFromAff4Object", "(", "api_approval", ",", "approval_obj", ")", ":", "api_approval", ".", "id", "=", "approval_obj", ".", "urn", ".", "Basename", "(", ")", "api_approval", ".", "reason", "=", "approval_obj", ".", "Get", "(", "approval_obj...
initializes apiapproval from an aff4 object .
train
true
26,227
def get_salt_interface(vm_, opts): salt_host = salt.config.get_cloud_config_value('salt_interface', vm_, opts, default=False, search_global=False) if (salt_host is False): salt_host = salt.config.get_cloud_config_value('ssh_interface', vm_, opts, default='public_ips', search_global=False) return salt_host
[ "def", "get_salt_interface", "(", "vm_", ",", "opts", ")", ":", "salt_host", "=", "salt", ".", "config", ".", "get_cloud_config_value", "(", "'salt_interface'", ",", "vm_", ",", "opts", ",", "default", "=", "False", ",", "search_global", "=", "False", ")", ...
return the salt_interface type to connect to .
train
true