id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
29,996
def same_pyname(expected, pyname): if ((expected is None) or (pyname is None)): return False if (expected == pyname): return True if ((type(expected) not in (pynames.ImportedModule, pynames.ImportedName)) and (type(pyname) not in (pynames.ImportedModule, pynames.ImportedName))): return False return ((expected.get_definition_location() == pyname.get_definition_location()) and (expected.get_object() == pyname.get_object()))
[ "def", "same_pyname", "(", "expected", ",", "pyname", ")", ":", "if", "(", "(", "expected", "is", "None", ")", "or", "(", "pyname", "is", "None", ")", ")", ":", "return", "False", "if", "(", "expected", "==", "pyname", ")", ":", "return", "True", "...
check whether expected and pyname are the same .
train
true
29,997
def authenticated_userid(request): return request.authenticated_userid
[ "def", "authenticated_userid", "(", "request", ")", ":", "return", "request", ".", "authenticated_userid" ]
a function that returns the value of the property :attr:pyramid .
train
false
29,998
def create_sink(sink_name, destination_bucket, filter_): logging_client = logging.Client() destination = 'storage.googleapis.com/{bucket}'.format(bucket=destination_bucket) sink = logging_client.sink(sink_name, filter_, destination) if sink.exists(): print 'Sink {} already exists.'.format(sink.name) return sink.create() print 'Created sink {}'.format(sink.name)
[ "def", "create_sink", "(", "sink_name", ",", "destination_bucket", ",", "filter_", ")", ":", "logging_client", "=", "logging", ".", "Client", "(", ")", "destination", "=", "'storage.googleapis.com/{bucket}'", ".", "format", "(", "bucket", "=", "destination_bucket", ...
return a new sink with a numeric id incremented in a threadsafe way .
train
false
30,000
def list_port_fwd(zone, permanent=True): ret = [] cmd = '--zone={0} --list-forward-ports'.format(zone) if permanent: cmd += ' --permanent' for i in __firewall_cmd(cmd).splitlines(): (src, proto, dest, addr) = i.split(':') ret.append({'Source port': src.split('=')[1], 'Protocol': proto.split('=')[1], 'Destination port': dest.split('=')[1], 'Destination address': addr.split('=')[1]}) return ret
[ "def", "list_port_fwd", "(", "zone", ",", "permanent", "=", "True", ")", ":", "ret", "=", "[", "]", "cmd", "=", "'--zone={0} --list-forward-ports'", ".", "format", "(", "zone", ")", "if", "permanent", ":", "cmd", "+=", "' --permanent'", "for", "i", "in", ...
list port forwarding .
train
true
30,002
@then(u'the command output should contain exactly "{text}"') def step_command_output_should_contain_exactly_text(context, text): expected_text = text if (('{__WORKDIR__}' in text) or ('{__CWD__}' in text)): expected_text = textutil.template_substitute(text, __WORKDIR__=posixpath_normpath(context.workdir), __CWD__=posixpath_normpath(os.getcwd())) actual_output = context.command_result.output textutil.assert_text_should_contain_exactly(actual_output, expected_text)
[ "@", "then", "(", "u'the command output should contain exactly \"{text}\"'", ")", "def", "step_command_output_should_contain_exactly_text", "(", "context", ",", "text", ")", ":", "expected_text", "=", "text", "if", "(", "(", "'{__WORKDIR__}'", "in", "text", ")", "or", ...
verifies that the command output of the last command contains the expected text .
train
true
30,003
def fps(f, x=None, x0=0, dir=1, hyper=True, order=4, rational=True, full=False): f = sympify(f) if (x is None): free = f.free_symbols if (len(free) == 1): x = free.pop() elif (not free): return f else: raise NotImplementedError('multivariate formal power series') result = compute_fps(f, x, x0, dir, hyper, order, rational, full) if (result is None): return f return FormalPowerSeries(f, x, x0, dir, result)
[ "def", "fps", "(", "f", ",", "x", "=", "None", ",", "x0", "=", "0", ",", "dir", "=", "1", ",", "hyper", "=", "True", ",", "order", "=", "4", ",", "rational", "=", "True", ",", "full", "=", "False", ")", ":", "f", "=", "sympify", "(", "f", ...
generates formal power series of f .
train
false
30,004
def string_to_scopes(scopes): if (not scopes): return [] elif isinstance(scopes, six.string_types): return scopes.split(' ') else: return scopes
[ "def", "string_to_scopes", "(", "scopes", ")", ":", "if", "(", "not", "scopes", ")", ":", "return", "[", "]", "elif", "isinstance", "(", "scopes", ",", "six", ".", "string_types", ")", ":", "return", "scopes", ".", "split", "(", "' '", ")", "else", "...
converts stringifed scope value to a list .
train
true
30,006
def _get_cibfile_tmp(cibname): cibfile_tmp = '{0}.tmp'.format(_get_cibfile(cibname)) log.trace('cibfile_tmp: {0}'.format(cibfile_tmp)) return cibfile_tmp
[ "def", "_get_cibfile_tmp", "(", "cibname", ")", ":", "cibfile_tmp", "=", "'{0}.tmp'", ".", "format", "(", "_get_cibfile", "(", "cibname", ")", ")", "log", ".", "trace", "(", "'cibfile_tmp: {0}'", ".", "format", "(", "cibfile_tmp", ")", ")", "return", "cibfil...
get the full path of a temporary cib-file with the name of the cib .
train
true
30,007
def user_has_role(user_db, role): assert isinstance(role, six.string_types) if (not cfg.CONF.rbac.enable): return True user_role_dbs = rbac_services.get_roles_for_user(user_db=user_db) user_role_names = [role_db.name for role_db in user_role_dbs] return (role in user_role_names)
[ "def", "user_has_role", "(", "user_db", ",", "role", ")", ":", "assert", "isinstance", "(", "role", ",", "six", ".", "string_types", ")", "if", "(", "not", "cfg", ".", "CONF", ".", "rbac", ".", "enable", ")", ":", "return", "True", "user_role_dbs", "="...
check whether this user has access to this role .
train
false
30,008
def make_path(*path_components): path_components = [quote(component) for component in path_components if component] path = '/'.join(path_components) if (not path.startswith('/')): path = ('/' + path) return path
[ "def", "make_path", "(", "*", "path_components", ")", ":", "path_components", "=", "[", "quote", "(", "component", ")", "for", "component", "in", "path_components", "if", "component", "]", "path", "=", "'/'", ".", "join", "(", "path_components", ")", "if", ...
smash together the path components .
train
true
30,014
@a_new_decorator def a_function_requiring_decoration(): print 'I am the function which needs some decoration to remove my foul smell'
[ "@", "a_new_decorator", "def", "a_function_requiring_decoration", "(", ")", ":", "print", "'I am the function which needs some decoration to remove my foul smell'" ]
hey you! decorate me! .
train
false
30,016
@ajax_required def get_vfolder_units(request, **kwargs): search_form = UnitSearchForm(request.GET, user=request.user) vfolder = get_object_or_404(VirtualFolder, name=kwargs.get('vfolder_name')) if (not search_form.is_valid()): errors = search_form.errors.as_data() if ('path' in errors): for error in errors['path']: if (error.code == 'max_length'): raise Http400(_('Path too long.')) elif (error.code == 'required'): raise Http400(_('Arguments missing.')) raise Http404(forms.ValidationError(search_form.errors).messages) search_form.cleaned_data['vfolder'] = vfolder backend = search_backend.get(VirtualFolder)(request.user, **search_form.cleaned_data) (total, start, end, units_qs) = backend.search() return JsonResponse({'start': start, 'end': end, 'total': total, 'unitGroups': GroupedResults(units_qs).data})
[ "@", "ajax_required", "def", "get_vfolder_units", "(", "request", ",", "**", "kwargs", ")", ":", "search_form", "=", "UnitSearchForm", "(", "request", ".", "GET", ",", "user", "=", "request", ".", "user", ")", "vfolder", "=", "get_object_or_404", "(", "Virtu...
gets source and target texts and its metadata .
train
false
30,017
@handle_response_format @treeio_login_required def item_delete(request, knowledgeItem_id, response_format='html'): item = get_object_or_404(KnowledgeItem, pk=knowledgeItem_id) items = Object.filter_permitted(manager=KnowledgeItem.objects, user=request.user.profile, mode='r') if (not request.user.profile.has_permission(item, mode='w')): return user_denied(request, message="You don't have access to this Knowledge Item") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): item.trash = True item.save() else: item.delete() return HttpResponseRedirect(reverse('knowledge_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('knowledge_item_view', args=[item.folder.treepath, item.treepath])) context = _get_default_context(request) context.update({'item': item, 'items': items}) return render_to_response('knowledge/item_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "item_delete", "(", "request", ",", "knowledgeItem_id", ",", "response_format", "=", "'html'", ")", ":", "item", "=", "get_object_or_404", "(", "KnowledgeItem", ",", "pk", "=", "knowledgeItem_id", "...
item delete .
train
false
30,019
def keyname(vm_): return config.get_cloud_config_value('keyname', vm_, __opts__, search_global=False)
[ "def", "keyname", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'keyname'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")" ]
return the keyname .
train
false
30,021
def convert_SelfReferenceProperty(model, prop, kwargs): return None
[ "def", "convert_SelfReferenceProperty", "(", "model", ",", "prop", ",", "kwargs", ")", ":", "return", "None" ]
returns a form field for a db .
train
false
30,022
def override(method): method.override = True return method
[ "def", "override", "(", "method", ")", ":", "method", ".", "override", "=", "True", "return", "method" ]
takes a override class or function and assigns it dunder arguments form the overidden one .
train
false
30,023
def get_compatible(): basedir = os.path.dirname(os.path.abspath('.')) workflow_dirs = [f for f in os.listdir(basedir) if os.path.isdir(os.path.join(basedir, f))] workflows = [] for d in workflow_dirs: workflows.append(Workflow(os.path.join(basedir, d))) workflows = [w for w in workflows if w.alleyoop] autolog(('found %s compatible workflows' % len(workflows))) return sorted(workflows, key=(lambda x: x.name))
[ "def", "get_compatible", "(", ")", ":", "basedir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "'.'", ")", ")", "workflow_dirs", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "basedir", ")", ...
gets a list if compatible workflows .
train
false
30,024
def pkcs12_key_as_pem(private_key_bytes, private_key_password): private_key_password = _helpers._to_bytes(private_key_password) pkcs12 = crypto.load_pkcs12(private_key_bytes, private_key_password) return crypto.dump_privatekey(crypto.FILETYPE_PEM, pkcs12.get_privatekey())
[ "def", "pkcs12_key_as_pem", "(", "private_key_bytes", ",", "private_key_password", ")", ":", "private_key_password", "=", "_helpers", ".", "_to_bytes", "(", "private_key_password", ")", "pkcs12", "=", "crypto", ".", "load_pkcs12", "(", "private_key_bytes", ",", "priva...
convert the contents of a pkcs#12 key to pem using pyopenssl .
train
true
30,026
def find_combining_chars(text): if (isinstance(text, str) and (sys.version_info < (3, 0))): return [] return [i for (i, c) in enumerate(text) if unicodedata.combining(c)]
[ "def", "find_combining_chars", "(", "text", ")", ":", "if", "(", "isinstance", "(", "text", ",", "str", ")", "and", "(", "sys", ".", "version_info", "<", "(", "3", ",", "0", ")", ")", ")", ":", "return", "[", "]", "return", "[", "i", "for", "(", ...
return indices of all combining chars in unicode string text .
train
false
30,027
def make_gax_logging_api(client): channel = make_secure_channel(client._connection.credentials, DEFAULT_USER_AGENT, LoggingServiceV2Client.SERVICE_ADDRESS) generated = LoggingServiceV2Client(channel=channel) return _LoggingAPI(generated, client)
[ "def", "make_gax_logging_api", "(", "client", ")", ":", "channel", "=", "make_secure_channel", "(", "client", ".", "_connection", ".", "credentials", ",", "DEFAULT_USER_AGENT", ",", "LoggingServiceV2Client", ".", "SERVICE_ADDRESS", ")", "generated", "=", "LoggingServi...
create an instance of the gax logging api .
train
false
30,029
@mock_ec2 def test_igw_detach(): conn = boto.connect_vpc(u'the_key', u'the_secret') igw = conn.create_internet_gateway() vpc = conn.create_vpc(VPC_CIDR) conn.attach_internet_gateway(igw.id, vpc.id) with assert_raises(JSONResponseError) as ex: conn.detach_internet_gateway(igw.id, vpc.id, dry_run=True) ex.exception.reason.should.equal(u'DryRunOperation') ex.exception.status.should.equal(400) ex.exception.message.should.equal(u'An error occurred (DryRunOperation) when calling the DetachInternetGateway operation: Request would have succeeded, but DryRun flag is set') conn.detach_internet_gateway(igw.id, vpc.id) igw = conn.get_all_internet_gateways()[0] igw.attachments.should.have.length_of(0)
[ "@", "mock_ec2", "def", "test_igw_detach", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "igw", "=", "conn", ".", "create_internet_gateway", "(", ")", "vpc", "=", "conn", ".", "create_vpc", "(", "VPC_...
internet gateway detach .
train
false
30,030
def ConvertValues(default_metadata, values, token=None, options=None): batch_data = [(default_metadata, obj) for obj in values] return ConvertValuesWithMetadata(batch_data, token=token, options=options)
[ "def", "ConvertValues", "(", "default_metadata", ",", "values", ",", "token", "=", "None", ",", "options", "=", "None", ")", ":", "batch_data", "=", "[", "(", "default_metadata", ",", "obj", ")", "for", "obj", "in", "values", "]", "return", "ConvertValuesW...
converts a set of rdfvalues into a set of export-friendly rdfvalues .
train
true
30,032
def write_records(records, format, f): record_struct = Struct(format) for r in records: f.write(record_struct.pack(*r))
[ "def", "write_records", "(", "records", ",", "format", ",", "f", ")", ":", "record_struct", "=", "Struct", "(", "format", ")", "for", "r", "in", "records", ":", "f", ".", "write", "(", "record_struct", ".", "pack", "(", "*", "r", ")", ")" ]
write a sequence of tuples to a binary file of structures .
train
false
30,034
def set_project_cfg_facts_if_unset(facts): config = {'default_node_selector': '', 'project_request_message': '', 'project_request_template': '', 'mcs_allocator_range': 's0:/2', 'mcs_labels_per_project': 5, 'uid_allocator_range': '1000000000-1999999999/10000'} if ('master' in facts): for (key, value) in config.items(): if (key not in facts['master']): facts['master'][key] = value return facts
[ "def", "set_project_cfg_facts_if_unset", "(", "facts", ")", ":", "config", "=", "{", "'default_node_selector'", ":", "''", ",", "'project_request_message'", ":", "''", ",", "'project_request_template'", ":", "''", ",", "'mcs_allocator_range'", ":", "'s0:/2'", ",", "...
set project configuration facts if not already present in facts dict dict: args: facts : existing facts returns: dict: the facts dict updated with the generated project configuration facts if they were not already present .
train
false
30,036
def registered_tasks(request): return JsonResponse({'regular': tasks.regular().keys(), 'periodic': tasks.periodic().keys()})
[ "def", "registered_tasks", "(", "request", ")", ":", "return", "JsonResponse", "(", "{", "'regular'", ":", "tasks", ".", "regular", "(", ")", ".", "keys", "(", ")", ",", "'periodic'", ":", "tasks", ".", "periodic", "(", ")", ".", "keys", "(", ")", "}...
view returning all defined tasks as a json object .
train
false
30,037
def _parse_author(elm_author, ctx): xp_templ = 'normalize-space(fb:%s/text())' author = ctx.XPath((xp_templ % 'first-name'))(elm_author) lname = ctx.XPath((xp_templ % 'last-name'))(elm_author) mname = ctx.XPath((xp_templ % 'middle-name'))(elm_author) if mname: author = ((author + ' ') + mname).strip() if lname: author = ((author + ' ') + lname).strip() if (not author): nname = ctx.XPath((xp_templ % 'nickname'))(elm_author) if nname: author = nname return author
[ "def", "_parse_author", "(", "elm_author", ",", "ctx", ")", ":", "xp_templ", "=", "'normalize-space(fb:%s/text())'", "author", "=", "ctx", ".", "XPath", "(", "(", "xp_templ", "%", "'first-name'", ")", ")", "(", "elm_author", ")", "lname", "=", "ctx", ".", ...
returns a list of display author and sortable author .
train
false
30,038
def minmax_normalize(X, low, high, minX=None, maxX=None, dtype=np.float): if (minX is None): minX = np.min(X) if (maxX is None): maxX = np.max(X) minX = float(minX) maxX = float(maxX) X = (X - minX) X = (X / (maxX - minX)) X = (X * (high - low)) X = (X + low) return np.asarray(X, dtype=dtype)
[ "def", "minmax_normalize", "(", "X", ",", "low", ",", "high", ",", "minX", "=", "None", ",", "maxX", "=", "None", ",", "dtype", "=", "np", ".", "float", ")", ":", "if", "(", "minX", "is", "None", ")", ":", "minX", "=", "np", ".", "min", "(", ...
min-max normalize a given matrix to given range [low .
train
false
30,039
def utf8_encoding(t): return ''.join(((('%%%X' % ord(x)) if (ord(x) > 127) else x) for x in t))
[ "def", "utf8_encoding", "(", "t", ")", ":", "return", "''", ".", "join", "(", "(", "(", "(", "'%%%X'", "%", "ord", "(", "x", ")", ")", "if", "(", "ord", "(", "x", ")", ">", "127", ")", "else", "x", ")", "for", "x", "in", "t", ")", ")" ]
utf-8 encoding .
train
false
30,040
def networkx_from_pydot(D, create_using=None): from warnings import warn warn('networkx_from_pydot is replaced by from_pydot', DeprecationWarning) return from_pydot(D)
[ "def", "networkx_from_pydot", "(", "D", ",", "create_using", "=", "None", ")", ":", "from", "warnings", "import", "warn", "warn", "(", "'networkx_from_pydot is replaced by from_pydot'", ",", "DeprecationWarning", ")", "return", "from_pydot", "(", "D", ")" ]
create a networkx graph from a pydot graph .
train
false
30,041
def dmp_positive_p(f, u, K): return K.is_positive(dmp_ground_LC(f, u, K))
[ "def", "dmp_positive_p", "(", "f", ",", "u", ",", "K", ")", ":", "return", "K", ".", "is_positive", "(", "dmp_ground_LC", "(", "f", ",", "u", ",", "K", ")", ")" ]
return true if lc(f) is positive .
train
false
30,043
def emr_endpoint_for_region(region): region = _fix_region(region) if (not region): return _EMR_REGIONLESS_ENDPOINT else: return (_EMR_REGION_ENDPOINT % {'region': region})
[ "def", "emr_endpoint_for_region", "(", "region", ")", ":", "region", "=", "_fix_region", "(", "region", ")", "if", "(", "not", "region", ")", ":", "return", "_EMR_REGIONLESS_ENDPOINT", "else", ":", "return", "(", "_EMR_REGION_ENDPOINT", "%", "{", "'region'", "...
get the host for elastic mapreduce in the given aws region .
train
false
30,045
def _get_array(data, position, obj_end, opts, element_name): size = _UNPACK_INT(data[position:(position + 4)])[0] end = ((position + size) - 1) if (data[end:(end + 1)] != '\x00'): raise InvalidBSON('bad eoo') position += 4 end -= 1 result = [] append = result.append index = data.index getter = _ELEMENT_GETTER while (position < end): element_type = data[position:(position + 1)] position = (index('\x00', position) + 1) try: (value, position) = getter[element_type](data, position, obj_end, opts, element_name) except KeyError: _raise_unknown_type(element_type, element_name) append(value) if (position != (end + 1)): raise InvalidBSON('bad array length') return (result, (position + 1))
[ "def", "_get_array", "(", "data", ",", "position", ",", "obj_end", ",", "opts", ",", "element_name", ")", ":", "size", "=", "_UNPACK_INT", "(", "data", "[", "position", ":", "(", "position", "+", "4", ")", "]", ")", "[", "0", "]", "end", "=", "(", ...
decode a bson array to python list .
train
true
30,046
def test_user_redirect_deprecated(app): name = 'wash' cookies = app.login_user(name) r = get_page('/user/baduser', app, cookies=cookies, hub=False) r.raise_for_status() print urlparse(r.url) path = urlparse(r.url).path assert (path == ujoin(app.base_url, ('/user/%s' % name))) r = get_page('/user/baduser/test.ipynb', app, cookies=cookies, hub=False) r.raise_for_status() print urlparse(r.url) path = urlparse(r.url).path assert (path == ujoin(app.base_url, ('/user/%s/test.ipynb' % name))) r = get_page('/user/baduser/test.ipynb', app, hub=False) r.raise_for_status() print urlparse(r.url) path = urlparse(r.url).path assert (path == ujoin(app.base_url, '/hub/login')) query = urlparse(r.url).query assert (query == urlencode({'next': ujoin(app.base_url, '/hub/user/baduser/test.ipynb')}))
[ "def", "test_user_redirect_deprecated", "(", "app", ")", ":", "name", "=", "'wash'", "cookies", "=", "app", ".", "login_user", "(", "name", ")", "r", "=", "get_page", "(", "'/user/baduser'", ",", "app", ",", "cookies", "=", "cookies", ",", "hub", "=", "F...
redirecting from /user/someonelse/ urls .
train
false
30,047
def listFiles(root, patterns=u'*', recurse=1, return_folders=0): import os.path import fnmatch pattern_list = patterns.split(u';') results = [] for (dirname, dirs, files) in os.walk(root): for name in files: fullname = os.path.normpath(os.path.join(dirname, name)) if (return_folders or os.path.isfile(fullname)): for pattern in pattern_list: if fnmatch.fnmatch(name, pattern): results.append(fullname) break if (not recurse): break return results
[ "def", "listFiles", "(", "root", ",", "patterns", "=", "u'*'", ",", "recurse", "=", "1", ",", "return_folders", "=", "0", ")", ":", "import", "os", ".", "path", "import", "fnmatch", "pattern_list", "=", "patterns", ".", "split", "(", "u';'", ")", "resu...
recursively list files from parmar and martelli in the python cookbook .
train
false
30,048
def adjust_gl_view(w, h): h = max(h, 1) w = max(w, 1) glViewport(0, 0, w, h) glMatrixMode(GL_PROJECTION) glLoadIdentity() glOrtho(0, w, h, 0, (-1), 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity()
[ "def", "adjust_gl_view", "(", "w", ",", "h", ")", ":", "h", "=", "max", "(", "h", ",", "1", ")", "w", "=", "max", "(", "w", ",", "1", ")", "glViewport", "(", "0", ",", "0", ",", "w", ",", "h", ")", "glMatrixMode", "(", "GL_PROJECTION", ")", ...
adjust view onto our scene .
train
false
30,050
def _prep_hit(unrounded): also_log = u'' rounded = unrounded if (unrounded < MINIMUM_CHARGE): rounded = MINIMUM_CHARGE also_log = (u' [rounded up from $%s]' % unrounded) (upcharged, fee) = upcharge(rounded) cents = int((upcharged * 100)) amount_str = u'%d cents ($%s%s + $%s fee = $%s)' amount_str %= (cents, rounded, also_log, fee, upcharged) return (cents, amount_str, upcharged, fee)
[ "def", "_prep_hit", "(", "unrounded", ")", ":", "also_log", "=", "u''", "rounded", "=", "unrounded", "if", "(", "unrounded", "<", "MINIMUM_CHARGE", ")", ":", "rounded", "=", "MINIMUM_CHARGE", "also_log", "=", "(", "u' [rounded up from $%s]'", "%", "unrounded", ...
takes an amount in dollars .
train
false
30,053
def IsEC2Instance(instance): return (re.match(kEC2InstanceRe, instance) is not None)
[ "def", "IsEC2Instance", "(", "instance", ")", ":", "return", "(", "re", ".", "match", "(", "kEC2InstanceRe", ",", "instance", ")", "is", "not", "None", ")" ]
return true if the instance name passed in matches the aws instance naming pattern .
train
false
30,058
def get_preview_plugin(data_dict, return_first=False): data_dict['resource']['on_same_domain'] = on_same_domain(data_dict) plugins_that_can_preview = [] plugins_fixable = [] for plugin in p.PluginImplementations(p.IResourcePreview): p_info = {'plugin': plugin, 'quality': 1} data = plugin.can_preview(data_dict) if isinstance(data, bool): p_info['can_preview'] = data else: p_info.update(data) if p_info['can_preview']: if return_first: plugin plugins_that_can_preview.append(p_info) elif p_info.get('fixable'): plugins_fixable.append(p_info) num_plugins = len(plugins_that_can_preview) if (num_plugins == 0): for plug in plugins_fixable: log.info(('%s would allow previews. To fix: %s' % (plug['plugin'], plug['fixable']))) preview_plugin = None elif (num_plugins == 1): preview_plugin = plugins_that_can_preview[0]['plugin'] else: plugs = [pl['plugin'] for pl in plugins_that_can_preview] log.warn('Multiple previews are possible. {0}'.format(plugs)) preview_plugin = max(plugins_that_can_preview, key=(lambda x: x['quality']))['plugin'] return preview_plugin
[ "def", "get_preview_plugin", "(", "data_dict", ",", "return_first", "=", "False", ")", ":", "data_dict", "[", "'resource'", "]", "[", "'on_same_domain'", "]", "=", "on_same_domain", "(", "data_dict", ")", "plugins_that_can_preview", "=", "[", "]", "plugins_fixable...
determines whether there is an extension that can preview the resource .
train
false
30,059
def get_all_apps(with_internal_apps=True, sites_path=None): if (not sites_path): sites_path = local.sites_path apps = get_file_items(os.path.join(sites_path, u'apps.txt'), raise_not_found=True) if with_internal_apps: for app in get_file_items(os.path.join(local.site_path, u'apps.txt')): if (app not in apps): apps.append(app) if (u'frappe' in apps): apps.remove(u'frappe') apps.insert(0, u'frappe') return apps
[ "def", "get_all_apps", "(", "with_internal_apps", "=", "True", ",", "sites_path", "=", "None", ")", ":", "if", "(", "not", "sites_path", ")", ":", "sites_path", "=", "local", ".", "sites_path", "apps", "=", "get_file_items", "(", "os", ".", "path", ".", ...
get list of all apps via sites/apps .
train
false
30,060
def addusersitepackages(known_paths): global USER_BASE, USER_SITE, ENABLE_USER_SITE env_base = os.environ.get('PYTHONUSERBASE', None) def joinuser(*args): return os.path.expanduser(os.path.join(*args)) if (os.name == 'nt'): base = (os.environ.get('APPDATA') or '~') USER_BASE = (env_base if env_base else joinuser(base, 'Python')) USER_SITE = os.path.join(USER_BASE, (('Python' + sys.version[0]) + sys.version[2]), 'site-packages') else: USER_BASE = (env_base if env_base else joinuser('~', '.local')) USER_SITE = os.path.join(USER_BASE, 'lib', ('python' + sys.version[:3]), 'site-packages') if (ENABLE_USER_SITE and os.path.isdir(USER_SITE)): addsitedir(USER_SITE, known_paths) return known_paths
[ "def", "addusersitepackages", "(", "known_paths", ")", ":", "global", "USER_BASE", ",", "USER_SITE", ",", "ENABLE_USER_SITE", "env_base", "=", "os", ".", "environ", ".", "get", "(", "'PYTHONUSERBASE'", ",", "None", ")", "def", "joinuser", "(", "*", "args", "...
add a per user site-package to sys .
train
true
30,061
def clear_sessions_relative(months=1, dry_run=False): logger.warn('Clearing sessions older than {0} months'.format(months)) now = timezone.now() delta = relativedelta.relativedelta(months=months) clear_sessions((now - delta), dry_run=dry_run)
[ "def", "clear_sessions_relative", "(", "months", "=", "1", ",", "dry_run", "=", "False", ")", ":", "logger", ".", "warn", "(", "'Clearing sessions older than {0} months'", ".", "format", "(", "months", ")", ")", "now", "=", "timezone", ".", "now", "(", ")", ...
remove all sessions last modified over months months ago .
train
false
30,062
def nerdingoff(): print 'This is my first Git project. Wahoo!' logo = "\n ,#@@@@@;\n #@@@@@@@@@@@@\n @@@@@@@@@@@@@@@+\n @@@@@@@@@@@@@@@@@@\n .@@@@@@@@@@@@@@@@@@@\n @@@@@@@@@@@@@@@@@@@@,\n @@@@@@@@@@@@@@@@@@@@@\n ;@@@@@@@@@@@@@@@@@@@@@@@,\n @@@@,@@@@@@'` ,@@@@@@\n `@@@@' '@@@\n @@@@@ @@\n @@@@@ '@\n @@@@@ @\n @@@@, @\n @@@@ @\n.@@@@ @\n;@@@@ @\n+@@@+:@ @# @' ; @,+@@+@ .@\n+@@@#@:@@@@@@@@ @. @@@@@@@@@@\n.@@@@ @@@@@@@@@@. @@@@@@@@@@\n @@@+@@@@@@@@@@. .@@@@@@@@@'\n @@+#@@@@@@@@@ @@@@@@@@@#\n `@+ @@@@@@@@@+ @@@@@@@@\n +@@@@@@@@ @@@@@@@\n @@@@@+@ ,@ ,@\n +,\n" new_logo = '' for c in logo: new_logo = ((new_logo + c) + ' ') print new_logo
[ "def", "nerdingoff", "(", ")", ":", "print", "'This is my first Git project. Wahoo!'", "logo", "=", "\"\\n ,#@@@@@;\\n #@@@@@@@@@@@@\\n @@@@@@@@@@@@@@@+\\n @@@@@@@@@@@@@@@@@@\\n .@@@@@@@@@@@@@@@@@@@\\n @@@@@@@@@@@@@@@@@@@@,\\n @@@@@@@@@@@@@@@@@@@@@\\n ...
prints an ascii version of the personal logo of nerdingoff ascii was generated using: URL .
train
false
30,063
def test_hosts_stripped_env_hosts(): def command(): pass myenv = {'hosts': [' foo ', 'bar '], 'roles': [], 'exclude_hosts': []} eq_hosts(command, ['foo', 'bar'], env=myenv)
[ "def", "test_hosts_stripped_env_hosts", "(", ")", ":", "def", "command", "(", ")", ":", "pass", "myenv", "=", "{", "'hosts'", ":", "[", "' foo '", ",", "'bar '", "]", ",", "'roles'", ":", "[", "]", ",", "'exclude_hosts'", ":", "[", "]", "}", "eq_hosts"...
make sure hosts defined in env .
train
false
30,064
def download_updates(names): salt.utils.warn_until('Fluorine', "This function is replaced by 'download' as of Salt Nitrogen. Thiswarning will be removed in Salt Fluorine.") return download(names)
[ "def", "download_updates", "(", "names", ")", ":", "salt", ".", "utils", ".", "warn_until", "(", "'Fluorine'", ",", "\"This function is replaced by 'download' as of Salt Nitrogen. Thiswarning will be removed in Salt Fluorine.\"", ")", "return", "download", "(", "names", ")" ]
downloads all available updates .
train
false
30,065
def activate_egg(eggpath): try: d = next(pkg_resources.find_distributions(eggpath)) except StopIteration: raise ValueError('Unknown or corrupt egg') d.activate() settings_module = d.get_entry_info('scrapy', 'settings').module_name os.environ.setdefault('SCRAPY_SETTINGS_MODULE', settings_module)
[ "def", "activate_egg", "(", "eggpath", ")", ":", "try", ":", "d", "=", "next", "(", "pkg_resources", ".", "find_distributions", "(", "eggpath", ")", ")", "except", "StopIteration", ":", "raise", "ValueError", "(", "'Unknown or corrupt egg'", ")", "d", ".", "...
activate a scrapy egg file .
train
false
30,066
def forward_enable(src, dst, ipaddr): run(settings.iptables, '-A', 'FORWARD', '-i', src, '-o', dst, '--source', ipaddr, '-j', 'ACCEPT') run(settings.iptables, '-A', 'FORWARD', '-i', dst, '-o', src, '--destination', ipaddr, '-j', 'ACCEPT')
[ "def", "forward_enable", "(", "src", ",", "dst", ",", "ipaddr", ")", ":", "run", "(", "settings", ".", "iptables", ",", "'-A'", ",", "'FORWARD'", ",", "'-i'", ",", "src", ",", "'-o'", ",", "dst", ",", "'--source'", ",", "ipaddr", ",", "'-j'", ",", ...
enable forwarding a specific ip address from one interface into another .
train
false
30,068
def delete_customer_gateway(customer_gateway_id=None, customer_gateway_name=None, region=None, key=None, keyid=None, profile=None): return _delete_resource(resource='customer_gateway', name=customer_gateway_name, resource_id=customer_gateway_id, region=region, key=key, keyid=keyid, profile=profile)
[ "def", "delete_customer_gateway", "(", "customer_gateway_id", "=", "None", ",", "customer_gateway_name", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "return", "_delete_re...
given a customer gateway id or name .
train
true
30,069
def random_uniform(shape, low=0.0, high=1.0, dtype=None, seed=None): if (dtype is None): dtype = floatx() if (seed is None): seed = np.random.randint(10000000.0) return tf.random_uniform(shape, minval=low, maxval=high, dtype=dtype, seed=seed)
[ "def", "random_uniform", "(", "shape", ",", "low", "=", "0.0", ",", "high", "=", "1.0", ",", "dtype", "=", "None", ",", "seed", "=", "None", ")", ":", "if", "(", "dtype", "is", "None", ")", ":", "dtype", "=", "floatx", "(", ")", "if", "(", "see...
returns a tensor with uniform distribution # arguments shape: a tuple of integers .
train
false
30,070
def check_flags(required_flags, configuration): for flag in required_flags: if (not getattr(configuration, flag, None)): msg = (_('Configuration value %s is not set.') % flag) raise exception.InvalidInput(reason=msg)
[ "def", "check_flags", "(", "required_flags", ",", "configuration", ")", ":", "for", "flag", "in", "required_flags", ":", "if", "(", "not", "getattr", "(", "configuration", ",", "flag", ",", "None", ")", ")", ":", "msg", "=", "(", "_", "(", "'Configuratio...
ensure that the flags we care about are set .
train
false
30,071
def generate_control(tests, kernels=None, platform=None, is_server=False, profilers=(), client_control_file='', profile_only=None, upload_kernel_config=False): _sanity_check_generate_control(is_server=is_server, kernels=kernels, client_control_file=client_control_file, upload_kernel_config=upload_kernel_config) control_file_text = '' if kernels: control_file_text = get_kernel_stanza(kernels, platform, is_server=is_server, upload_kernel_config=upload_kernel_config) else: control_file_text = EMPTY_TEMPLATE (prepend, append) = _get_profiler_commands(profilers, is_server, profile_only) control_file_text += get_tests_stanza(tests, is_server, prepend, append, client_control_file) return control_file_text
[ "def", "generate_control", "(", "tests", ",", "kernels", "=", "None", ",", "platform", "=", "None", ",", "is_server", "=", "False", ",", "profilers", "=", "(", ")", ",", "client_control_file", "=", "''", ",", "profile_only", "=", "None", ",", "upload_kerne...
generate a control file for a sequence of tests .
train
false
30,074
def process_submitted_answer(exploration_id, unused_exploration_version, state_name, rule_spec_string, answer): answer_log = StateRuleAnswerLogModel.get_or_create(exploration_id, state_name, rule_spec_string) if (answer in answer_log.answers): answer_log.answers[answer] += 1 else: answer_log.answers[answer] = 1 try: answer_log.put() except Exception as e: logging.error(e)
[ "def", "process_submitted_answer", "(", "exploration_id", ",", "unused_exploration_version", ",", "state_name", ",", "rule_spec_string", ",", "answer", ")", ":", "answer_log", "=", "StateRuleAnswerLogModel", ".", "get_or_create", "(", "exploration_id", ",", "state_name", ...
adds an answer to the answer log for the rule it hits .
train
false
30,076
@contextlib.contextmanager def cd(newpath): oldpath = os.getcwd() os.chdir(newpath) try: (yield) finally: try: os.chdir(oldpath) except OSError: pass
[ "@", "contextlib", ".", "contextmanager", "def", "cd", "(", "newpath", ")", ":", "oldpath", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "newpath", ")", "try", ":", "(", "yield", ")", "finally", ":", "try", ":", "os", ".", "chdir", ...
context manager that keeps directory state when calling remote operations .
train
false
30,077
def lognormal(mean=0.0, sigma=1.0, size=None, dtype=float): rs = generator.get_random_state() return rs.lognormal(mean, sigma, size=size, dtype=dtype)
[ "def", "lognormal", "(", "mean", "=", "0.0", ",", "sigma", "=", "1.0", ",", "size", "=", "None", ",", "dtype", "=", "float", ")", ":", "rs", "=", "generator", ".", "get_random_state", "(", ")", "return", "rs", ".", "lognormal", "(", "mean", ",", "s...
returns an array of samples drawn from a log normal distribution .
train
false
30,078
def topic_name_to_id(course, name): return '{course}_{run}_{name}'.format(course=course.location.course, run=course.url_name, name=name)
[ "def", "topic_name_to_id", "(", "course", ",", "name", ")", ":", "return", "'{course}_{run}_{name}'", ".", "format", "(", "course", "=", "course", ".", "location", ".", "course", ",", "run", "=", "course", ".", "url_name", ",", "name", "=", "name", ")" ]
given a discussion topic name .
train
false
30,079
def remove_all_instructors(course_key): staff_role = CourseStaffRole(course_key) staff_role.remove_users(*staff_role.users_with_role()) instructor_role = CourseInstructorRole(course_key) instructor_role.remove_users(*instructor_role.users_with_role())
[ "def", "remove_all_instructors", "(", "course_key", ")", ":", "staff_role", "=", "CourseStaffRole", "(", "course_key", ")", "staff_role", ".", "remove_users", "(", "*", "staff_role", ".", "users_with_role", "(", ")", ")", "instructor_role", "=", "CourseInstructorRol...
removes all instructor and staff users from the given course .
train
false
30,080
def report_configuration(): config = odoo.tools.config _logger.info('Odoo version %s', __version__) if os.path.isfile(config.rcfile): _logger.info(('Using configuration file at ' + config.rcfile)) _logger.info('addons paths: %s', odoo.modules.module.ad_paths) host = (config['db_host'] or os.environ.get('PGHOST', 'default')) port = (config['db_port'] or os.environ.get('PGPORT', 'default')) user = (config['db_user'] or os.environ.get('PGUSER', 'default')) _logger.info('database: %s@%s:%s', user, host, port)
[ "def", "report_configuration", "(", ")", ":", "config", "=", "odoo", ".", "tools", ".", "config", "_logger", ".", "info", "(", "'Odoo version %s'", ",", "__version__", ")", "if", "os", ".", "path", ".", "isfile", "(", "config", ".", "rcfile", ")", ":", ...
log the server version and some configuration values .
train
false
30,082
@login_required @enforce_shopping_cart_enabled def remove_item(request): item_id = (request.GET.get('id') or request.POST.get('id') or '-1') items = OrderItem.objects.filter(id=item_id, status='cart').select_subclasses() if (not len(items)): log.exception(u'Cannot remove cart OrderItem id=%s. DoesNotExist or item is already purchased', item_id) else: item = items[0] if (item.user == request.user): Order.remove_cart_item_from_order(item, request.user) item.order.update_order_type() return HttpResponse('OK')
[ "@", "login_required", "@", "enforce_shopping_cart_enabled", "def", "remove_item", "(", "request", ")", ":", "item_id", "=", "(", "request", ".", "GET", ".", "get", "(", "'id'", ")", "or", "request", ".", "POST", ".", "get", "(", "'id'", ")", "or", "'-1'...
this will remove an item from the user cart and also delete the corresponding coupon codes redemption .
train
false
30,083
def add_views_to_resource(context, resource_dict, dataset_dict=None, view_types=[], create_datastore_views=False): if (not dataset_dict): dataset_dict = logic.get_action('package_show')(context, {'id': resource_dict['package_id']}) if (not view_types): view_plugins = get_default_view_plugins(create_datastore_views) else: view_plugins = get_view_plugins(view_types) if (not view_plugins): return [] existing_views = p.toolkit.get_action('resource_view_list')(context, {'id': resource_dict['id']}) existing_view_types = ([v['view_type'] for v in existing_views] if existing_views else []) created_views = [] for view_plugin in view_plugins: view_info = view_plugin.info() if (view_info['name'] in existing_view_types): continue if view_plugin.can_view({'resource': resource_dict, 'package': dataset_dict}): view = {'resource_id': resource_dict['id'], 'view_type': view_info['name'], 'title': view_info.get('default_title', _('View')), 'description': view_info.get('default_description', '')} view_dict = p.toolkit.get_action('resource_view_create')(context, view) created_views.append(view_dict) return created_views
[ "def", "add_views_to_resource", "(", "context", ",", "resource_dict", ",", "dataset_dict", "=", "None", ",", "view_types", "=", "[", "]", ",", "create_datastore_views", "=", "False", ")", ":", "if", "(", "not", "dataset_dict", ")", ":", "dataset_dict", "=", ...
creates the provided views on the provided resource views to create are provided as a list of view_types .
train
false
30,084
def _nested_assign(ref, value): if (isinstance(ref, list) or isinstance(ref, tuple)): if (len(ref) != len(value)): raise ValueError('ref and value have different lengths.') result = [_nested_assign(r, v) for (r, v) in zip(ref, value)] if isinstance(ref, tuple): return tuple(result) return result else: return tf.assign(ref, value)
[ "def", "_nested_assign", "(", "ref", ",", "value", ")", ":", "if", "(", "isinstance", "(", "ref", ",", "list", ")", "or", "isinstance", "(", "ref", ",", "tuple", ")", ")", ":", "if", "(", "len", "(", "ref", ")", "!=", "len", "(", "value", ")", ...
returns a nested collection of tensorflow assign operations .
train
false
30,085
def modClearTitleRefs(s, titlesRefs, namesRefs, charactersRefs): return re_titleRef.sub('\\1', s)
[ "def", "modClearTitleRefs", "(", "s", ",", "titlesRefs", ",", "namesRefs", ",", "charactersRefs", ")", ":", "return", "re_titleRef", ".", "sub", "(", "'\\\\1'", ",", "s", ")" ]
remove titles references .
train
false
30,087
def get_user_permission_doctypes(user_permission_doctypes, user_permissions): if cint(frappe.db.get_single_value(u'System Settings', u'ignore_user_permissions_if_missing')): user_permission_doctypes = [list(set(doctypes).intersection(set(user_permissions.keys()))) for doctypes in user_permission_doctypes] if (len(user_permission_doctypes) > 1): common = user_permission_doctypes[0] for i in xrange(1, len(user_permission_doctypes), 1): common = list(set(common).intersection(set(user_permission_doctypes[i]))) if (not common): break if common: for doctypes in user_permission_doctypes: if (set(common) == set(doctypes)): user_permission_doctypes = [common] break return user_permission_doctypes
[ "def", "get_user_permission_doctypes", "(", "user_permission_doctypes", ",", "user_permissions", ")", ":", "if", "cint", "(", "frappe", ".", "db", ".", "get_single_value", "(", "u'System Settings'", ",", "u'ignore_user_permissions_if_missing'", ")", ")", ":", "user_perm...
returns a list of list like [["user" .
train
false
30,088
def assertReadFrom(testcase, client, member, *args, **kwargs): for _ in range(10): testcase.assertEqual(member, read_from_which_host(client, *args, **kwargs))
[ "def", "assertReadFrom", "(", "testcase", ",", "client", ",", "member", ",", "*", "args", ",", "**", "kwargs", ")", ":", "for", "_", "in", "range", "(", "10", ")", ":", "testcase", ".", "assertEqual", "(", "member", ",", "read_from_which_host", "(", "c...
check that a query with the given mode and tag_sets reads from the expected replica-set member .
train
false
30,089
def formatWithCall(formatString, mapping): return unicode(aFormatter.vformat(formatString, (), CallMapping(mapping)))
[ "def", "formatWithCall", "(", "formatString", ",", "mapping", ")", ":", "return", "unicode", "(", "aFormatter", ".", "vformat", "(", "formatString", ",", "(", ")", ",", "CallMapping", "(", "mapping", ")", ")", ")" ]
format a string like l{unicode .
train
false
30,090
def cache_region(region, *deco_args): cache = [None] def decorate(func): namespace = util.func_namespace(func) def cached(*args): reg = cache_regions[region] if (not reg.get('enabled', True)): return func(*args) if (not cache[0]): if (region not in cache_regions): raise BeakerException(('Cache region not configured: %s' % region)) cache[0] = Cache._get_cache(namespace, reg) cache_key = ' '.join(map(str, (deco_args + args))) def go(): return func(*args) return cache[0].get_value(cache_key, createfunc=go) cached._arg_namespace = namespace cached._arg_region = region return cached return decorate
[ "def", "cache_region", "(", "region", ",", "*", "deco_args", ")", ":", "cache", "=", "[", "None", "]", "def", "decorate", "(", "func", ")", ":", "namespace", "=", "util", ".", "func_namespace", "(", "func", ")", "def", "cached", "(", "*", "args", ")"...
decorate a function to cache itself using a cache region the region decorator requires arguments if there are more than 2 of the same named function .
train
false
30,091
def test_closed_group(expr, assumptions, key): return _fuzzy_group((ask(key(a), assumptions) for a in expr.args), quick_exit=True)
[ "def", "test_closed_group", "(", "expr", ",", "assumptions", ",", "key", ")", ":", "return", "_fuzzy_group", "(", "(", "ask", "(", "key", "(", "a", ")", ",", "assumptions", ")", "for", "a", "in", "expr", ".", "args", ")", ",", "quick_exit", "=", "Tru...
test for membership in a group with respect to the current operation .
train
false
30,092
def IPNetwork(address, version=None): if version: if (version == 4): return IPv4Network(address) elif (version == 6): return IPv6Network(address) try: return IPv4Network(address) except (IPv4IpValidationError, IPv4NetmaskValidationError): pass try: return IPv6Network(address) except (IPv6IpValidationError, IPv6NetmaskValidationError): pass raise ValueError(('%r does not appear to be an IPv4 or IPv6 network' % address))
[ "def", "IPNetwork", "(", "address", ",", "version", "=", "None", ")", ":", "if", "version", ":", "if", "(", "version", "==", "4", ")", ":", "return", "IPv4Network", "(", "address", ")", "elif", "(", "version", "==", "6", ")", ":", "return", "IPv6Netw...
take an ip string/int and return an object of the correct type .
train
false
30,094
def run_pydoc(module_name, *args, **env): args = (args + (module_name,)) (rc, out, err) = assert_python_ok('-B', pydoc.__file__, *args, **env) return out.strip()
[ "def", "run_pydoc", "(", "module_name", ",", "*", "args", ",", "**", "env", ")", ":", "args", "=", "(", "args", "+", "(", "module_name", ",", ")", ")", "(", "rc", ",", "out", ",", "err", ")", "=", "assert_python_ok", "(", "'-B'", ",", "pydoc", "....
runs pydoc on the specified module .
train
false
30,095
def _split_colors(color, n): (r, g, b, a) = color (h, s, v) = rgb_to_hsv(r, g, b) gradient_range = np.sqrt((n / 10.0)) if (v > 0.5): v_max = min(0.95, (v + (gradient_range / 2))) v_min = max(0.05, (v_max - gradient_range)) else: v_min = max(0.05, (v - (gradient_range / 2))) v_max = min(0.95, (v_min + gradient_range)) hsv_colors = ((h, s, v_) for v_ in np.linspace(v_min, v_max, n)) rgb_colors = (hsv_to_rgb(h_, s_, v_) for (h_, s_, v_) in hsv_colors) rgba_colors = ((r_, g_, b_, a) for (r_, g_, b_) in rgb_colors) return tuple(rgba_colors)
[ "def", "_split_colors", "(", "color", ",", "n", ")", ":", "(", "r", ",", "g", ",", "b", ",", "a", ")", "=", "color", "(", "h", ",", "s", ",", "v", ")", "=", "rgb_to_hsv", "(", "r", ",", "g", ",", "b", ")", "gradient_range", "=", "np", ".", ...
create n colors in hsv space that occupy a gradient in value .
train
false
30,096
def cmp_sccs(sccs1, sccs2): s1 = sccs1.split('.') s2 = sccs2.split('.') if (s1[0] != s2[0]): return _cmp(s1[0], s2[0]) s1 = [int(x) for x in s1[1:]] s2 = [int(x) for x in s2[1:]] return _cmp(s1, s2)
[ "def", "cmp_sccs", "(", "sccs1", ",", "sccs2", ")", ":", "s1", "=", "sccs1", ".", "split", "(", "'.'", ")", "s2", "=", "sccs2", ".", "split", "(", "'.'", ")", "if", "(", "s1", "[", "0", "]", "!=", "s2", "[", "0", "]", ")", ":", "return", "_...
order scop concise classification strings .
train
false
30,098
def test_column_mapped_to_nonexistant_field(): class FaultyPersonTable(PersonTable, ): missing = tables.Column() table = FaultyPersonTable(Person.objects.all()) table.as_html(request)
[ "def", "test_column_mapped_to_nonexistant_field", "(", ")", ":", "class", "FaultyPersonTable", "(", "PersonTable", ",", ")", ":", "missing", "=", "tables", ".", "Column", "(", ")", "table", "=", "FaultyPersonTable", "(", "Person", ".", "objects", ".", "all", "...
issue #9 describes how if a table has a column that has an accessor that targets a non-existent field .
train
false
30,099
def advanced_search(pattern): query_parsed = QUERY.parseString(pattern) return Entry.published.filter(query_parsed[0]).distinct()
[ "def", "advanced_search", "(", "pattern", ")", ":", "query_parsed", "=", "QUERY", ".", "parseString", "(", "pattern", ")", "return", "Entry", ".", "published", ".", "filter", "(", "query_parsed", "[", "0", "]", ")", ".", "distinct", "(", ")" ]
parse the grammar of a pattern and build a queryset with it .
train
true
30,100
@pytest.mark.parametrize('url, valid, has_err_string', [('http://www.example.com/', True, False), ('', False, False), ('://', False, True)]) def test_raise_cmdexc_if_invalid(url, valid, has_err_string): qurl = QUrl(url) assert (qurl.isValid() == valid) if valid: urlutils.raise_cmdexc_if_invalid(qurl) else: assert (bool(qurl.errorString()) == has_err_string) with pytest.raises(cmdexc.CommandError) as excinfo: urlutils.raise_cmdexc_if_invalid(qurl) if has_err_string: expected_text = ('Invalid URL - ' + qurl.errorString()) else: expected_text = 'Invalid URL' assert (str(excinfo.value) == expected_text)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'url, valid, has_err_string'", ",", "[", "(", "'http://www.example.com/'", ",", "True", ",", "False", ")", ",", "(", "''", ",", "False", ",", "False", ")", ",", "(", "'://'", ",", "False", ",", "True"...
test raise_cmdexc_if_invalid .
train
false
30,101
def path(dev): return info(dev).get('P', None)
[ "def", "path", "(", "dev", ")", ":", "return", "info", "(", "dev", ")", ".", "get", "(", "'P'", ",", "None", ")" ]
return the path .
train
false
30,102
def count_set_bits(n): return ((1 + count_set_bits((n & (n - 1)))) if n else 0)
[ "def", "count_set_bits", "(", "n", ")", ":", "return", "(", "(", "1", "+", "count_set_bits", "(", "(", "n", "&", "(", "n", "-", "1", ")", ")", ")", ")", "if", "n", "else", "0", ")" ]
number of 1 bits in binary expansion of a nonnnegative integer .
train
false
30,103
def extract_scenario_data(scenario_result): return {'name': scenario_result.scenario.name, 'duration': _get_duration(scenario_result.scenario), 'outline': scenario_result.outline, 'meta': {'total': scenario_result.total_steps, 'success': len(scenario_result.steps_passed), 'failures': len(scenario_result.steps_failed), 'skipped': len(scenario_result.steps_skipped), 'undefined': len(scenario_result.steps_undefined)}, 'steps': [extract_step_data(step) for step in scenario_result.all_steps]}
[ "def", "extract_scenario_data", "(", "scenario_result", ")", ":", "return", "{", "'name'", ":", "scenario_result", ".", "scenario", ".", "name", ",", "'duration'", ":", "_get_duration", "(", "scenario_result", ".", "scenario", ")", ",", "'outline'", ":", "scenar...
extract data from a scenarioresult instance .
train
false
30,104
def _make_writable_recursive(path): if sys.platform.startswith('win'): return for (root, dirs, files) in os.walk(path, topdown=False): for f in (dirs + files): _make_writable(os.path.join(root, f))
[ "def", "_make_writable_recursive", "(", "path", ")", ":", "if", "sys", ".", "platform", ".", "startswith", "(", "'win'", ")", ":", "return", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "path", ",", "topdown", "="...
recursively set writable .
train
false
30,105
def on_cleanup_list(filename, skip_nzb=False): lst = cfg.cleanup_list() if lst: (name, ext) = os.path.splitext(filename) ext = ext.strip().lower() name = name.strip() for k in lst: item = k.strip().strip('.').lower() item = ('.' + item) if (((item == ext) or ((ext == '') and (item == name))) and (not (skip_nzb and (item == '.nzb')))): return True return False
[ "def", "on_cleanup_list", "(", "filename", ",", "skip_nzb", "=", "False", ")", ":", "lst", "=", "cfg", ".", "cleanup_list", "(", ")", "if", "lst", ":", "(", "name", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "ex...
return true if a filename matches the clean-up list .
train
false
30,106
def message_reply(request, object_id, template_name='messages/message_form.html'): original_message = get_object_or_404(Message, pk=object_id) next = request.GET.get('next', None) initial = {'to_user': original_message.from_user, 'subject': ('Re: %s' % original_message.subject)} form = MessageForm((request.POST or None), initial=initial) if form.is_valid(): message = form.save(commit=False) message.object = original_message.object message.from_user = request.user message = form.save() return HttpResponseRedirect((next or reverse('messages:messages'))) return render_to_response(template_name, {'form': form, 'message': original_message, 'next': next}, context_instance=RequestContext(request))
[ "def", "message_reply", "(", "request", ",", "object_id", ",", "template_name", "=", "'messages/message_form.html'", ")", ":", "original_message", "=", "get_object_or_404", "(", "Message", ",", "pk", "=", "object_id", ")", "next", "=", "request", ".", "GET", "."...
handles a reply to a specific message .
train
false
30,108
def get_build_architecture(): prefix = ' bit (' i = sys.version.find(prefix) if (i == (-1)): return 'Intel' j = sys.version.find(')', i) return sys.version[(i + len(prefix)):j]
[ "def", "get_build_architecture", "(", ")", ":", "prefix", "=", "' bit ('", "i", "=", "sys", ".", "version", ".", "find", "(", "prefix", ")", "if", "(", "i", "==", "(", "-", "1", ")", ")", ":", "return", "'Intel'", "j", "=", "sys", ".", "version", ...
return the processor architecture .
train
false
30,110
def argrelextrema(data, comparator, axis=0, order=1, mode='clip'): results = _boolrelextrema(data, comparator, axis, order, mode) return np.where(results)
[ "def", "argrelextrema", "(", "data", ",", "comparator", ",", "axis", "=", "0", ",", "order", "=", "1", ",", "mode", "=", "'clip'", ")", ":", "results", "=", "_boolrelextrema", "(", "data", ",", "comparator", ",", "axis", ",", "order", ",", "mode", ")...
calculate the relative extrema of data .
train
true
30,111
@printing_func def reify_scala_sources(sources, dependency_addresses): kwargs = sources._asdict() kwargs[u'dependencies'] = list(set(dependency_addresses)) return ScalaSources(**kwargs)
[ "@", "printing_func", "def", "reify_scala_sources", "(", "sources", ",", "dependency_addresses", ")", ":", "kwargs", "=", "sources", ".", "_asdict", "(", ")", "kwargs", "[", "u'dependencies'", "]", "=", "list", "(", "set", "(", "dependency_addresses", ")", ")"...
given a scalainferreddepssources object and its inferred dependencies .
train
false
30,112
@contextmanager def lockfile(filename, shared=False): with open(filename, 'w') as lock: with flock(lock, shared=shared): (yield)
[ "@", "contextmanager", "def", "lockfile", "(", "filename", ",", "shared", "=", "False", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "lock", ":", "with", "flock", "(", "lock", ",", "shared", "=", "shared", ")", ":", "(", "yield",...
lock a file using flock(2) for the duration of a with statement .
train
false
30,113
def ensure_unicode(s): if isinstance(s, unicode): return s if hasattr(s, 'decode'): return s.decode() msg = 'Object %s is neither a bytes object nor has an encode method' raise TypeError((msg % s))
[ "def", "ensure_unicode", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "return", "s", "if", "hasattr", "(", "s", ",", "'decode'", ")", ":", "return", "s", ".", "decode", "(", ")", "msg", "=", "'Object %s is neither a bytes...
helper to ensure that text passed to writeconsolew is unicode .
train
false
30,114
def _convert_host_to_ip(host): addrinfo = socket.getaddrinfo(host, 80, 0, 0, socket.SOL_TCP) ips = [] for (family, socktype, proto, canonname, sockaddr) in addrinfo: ip = sockaddr[0] ips.append((family, ip)) if (family == socket.AF_INET): ips.append((socket.AF_INET6, ('::ffff:' + ip))) return ips
[ "def", "_convert_host_to_ip", "(", "host", ")", ":", "addrinfo", "=", "socket", ".", "getaddrinfo", "(", "host", ",", "80", ",", "0", ",", "0", ",", "socket", ".", "SOL_TCP", ")", "ips", "=", "[", "]", "for", "(", "family", ",", "socktype", ",", "p...
perform forward dns resolution on host .
train
false
30,116
def _parse_readrows_acceptance_tests(filename): import json with open(filename) as json_file: test_json = json.load(json_file) for test in test_json['tests']: name = test['name'] chunks = _generate_cell_chunks(test['chunks']) results = test['results'] (yield (name, chunks, results))
[ "def", "_parse_readrows_acceptance_tests", "(", "filename", ")", ":", "import", "json", "with", "open", "(", "filename", ")", "as", "json_file", ":", "test_json", "=", "json", ".", "load", "(", "json_file", ")", "for", "test", "in", "test_json", "[", "'tests...
parse acceptance tests from json see: URL 4d3185662ca61bc9fa1bdf1ec0166f6e5ecf86c6/bigtable-client-core/src/ test/resources/com/google/cloud/bigtable/grpc/scanner/v2/ read-rows-acceptance-test .
train
false
30,119
def write_to_version_file(filename, versions): os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(',', ': ')) with open(filename, 'w') as f: f.write((SHORT_VERSION_PY % contents)) print(("set %s to '%s'" % (filename, versions['version'])))
[ "def", "write_to_version_file", "(", "filename", ",", "versions", ")", ":", "os", ".", "unlink", "(", "filename", ")", "contents", "=", "json", ".", "dumps", "(", "versions", ",", "sort_keys", "=", "True", ",", "indent", "=", "1", ",", "separators", "=",...
write the given version number to the given _version .
train
true
30,124
def _should_create_database(connection): if (not _can_support_reuse_db(connection)): return True try: connection.cursor() except Exception: return True return (not _reusing_db())
[ "def", "_should_create_database", "(", "connection", ")", ":", "if", "(", "not", "_can_support_reuse_db", "(", "connection", ")", ")", ":", "return", "True", "try", ":", "connection", ".", "cursor", "(", ")", "except", "Exception", ":", "return", "True", "re...
return whether we should recreate the given db .
train
false
30,126
@register_opt() @local_optimizer([tensor.DimShuffle, GpuFromHost]) def local_gpu_dimshuffle_0(node): if isinstance(node.op, tensor.DimShuffle): (input,) = node.inputs if (input.owner and isinstance(input.owner.op, HostFromGpu)): p_dict = node.op._props_dict() p_dict.pop('inplace', None) new_op = GpuDimShuffle(**p_dict) return [host_from_gpu(new_op(as_cuda_ndarray_variable(input)))] if isinstance(node.op, GpuFromHost): host_input = node.inputs[0] if (host_input.owner and isinstance(host_input.owner.op, tensor.DimShuffle)): dimshuffle_node = host_input.owner p_dict = dimshuffle_node.op._props_dict() p_dict.pop('inplace', None) new_op = GpuDimShuffle(**p_dict) return [new_op(as_cuda_ndarray_variable(dimshuffle_node.inputs[0]))] return False
[ "@", "register_opt", "(", ")", "@", "local_optimizer", "(", "[", "tensor", ".", "DimShuffle", ",", "GpuFromHost", "]", ")", "def", "local_gpu_dimshuffle_0", "(", "node", ")", ":", "if", "isinstance", "(", "node", ".", "op", ",", "tensor", ".", "DimShuffle"...
dimshuffle(host_from_gpu()) -> host_from_gpu gpu_from_host -> gpu_dimshuffle .
train
false
30,128
def parsehtml(path, *args, **kwargs): return plaintext(DOM(path, *args, **kwargs).body)
[ "def", "parsehtml", "(", "path", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "plaintext", "(", "DOM", "(", "path", ",", "*", "args", ",", "**", "kwargs", ")", ".", "body", ")" ]
returns the content as a unicode string from the given .
train
false
30,129
def create_tree_watcher(pl, watcher_type=u'auto', expire_time=10): return TreeWatcher(pl, watcher_type, expire_time)
[ "def", "create_tree_watcher", "(", "pl", ",", "watcher_type", "=", "u'auto'", ",", "expire_time", "=", "10", ")", ":", "return", "TreeWatcher", "(", "pl", ",", "watcher_type", ",", "expire_time", ")" ]
create an object that can watch for changes in specified directories .
train
false
30,130
def _palette_is_grayscale(pil_image): assert (pil_image.mode == 'P') palette = np.asarray(pil_image.getpalette()).reshape((256, 3)) (start, stop) = pil_image.getextrema() valid_palette = palette[start:stop] return np.allclose(np.diff(valid_palette), 0)
[ "def", "_palette_is_grayscale", "(", "pil_image", ")", ":", "assert", "(", "pil_image", ".", "mode", "==", "'P'", ")", "palette", "=", "np", ".", "asarray", "(", "pil_image", ".", "getpalette", "(", ")", ")", ".", "reshape", "(", "(", "256", ",", "3", ...
return true if pil image in palette mode is grayscale .
train
false
30,132
def test_biweight_location_axis_3d(): with NumpyRNGContext(12345): nz = 3 ny = 4 nx = 5 data = normal(5, 2, (nz, ny, nx)) bw = funcs.biweight_location(data, axis=0) assert (bw.shape == (ny, nx)) y = 0 bwi = [] for i in range(nx): bwi.append(funcs.biweight_location(data[:, y, i])) bwi = np.array(bwi) assert_allclose(bw[y], bwi)
[ "def", "test_biweight_location_axis_3d", "(", ")", ":", "with", "NumpyRNGContext", "(", "12345", ")", ":", "nz", "=", "3", "ny", "=", "4", "nx", "=", "5", "data", "=", "normal", "(", "5", ",", "2", ",", "(", "nz", ",", "ny", ",", "nx", ")", ")", ...
test a 3d array with the axis keyword .
train
false
30,133
def find_parameters(instance, fields=None): if (fields is None): fields = NODES[('%s-widget' % instance.data['type'])].FIELDS.keys() params = [] for field in fields: data = instance.data['properties'][field] if ((field == 'sla') and (not instance.sla_enabled)): continue if isinstance(data, list): params.extend(find_json_parameters(data)) elif isinstance(data, basestring): for match in Template.pattern.finditer(data): name = match.group('braced') if (name is not None): params.append(name) return params
[ "def", "find_parameters", "(", "instance", ",", "fields", "=", "None", ")", ":", "if", "(", "fields", "is", "None", ")", ":", "fields", "=", "NODES", "[", "(", "'%s-widget'", "%", "instance", ".", "data", "[", "'type'", "]", ")", "]", ".", "FIELDS", ...
find parameters in the given fields .
train
false
30,135
def dendrogram(Z, p=30, truncate_mode=None, color_threshold=None, get_leaves=True, orientation='top', labels=None, count_sort=False, distance_sort=False, show_leaf_counts=True, no_plot=False, no_labels=False, leaf_font_size=None, leaf_rotation=None, leaf_label_func=None, show_contracted=False, link_color_func=None, ax=None, above_threshold_color='b'): Z = np.asarray(Z, order='c') if (orientation not in ['top', 'left', 'bottom', 'right']): raise ValueError("orientation must be one of 'top', 'left', 'bottom', or 'right'") is_valid_linkage(Z, throw=True, name='Z') Zs = Z.shape n = (Zs[0] + 1) if (type(p) in (int, float)): p = int(p) else: raise TypeError('The second argument must be a number') if (truncate_mode not in ('lastp', 'mlab', 'mtica', 'level', 'none', None)): raise ValueError('Invalid truncation mode.') if ((truncate_mode == 'lastp') or (truncate_mode == 'mlab')): if ((p > n) or (p == 0)): p = n if (truncate_mode == 'mtica'): truncate_mode = 'level' if (truncate_mode == 'level'): if (p <= 0): p = np.inf if get_leaves: lvs = [] else: lvs = None icoord_list = [] dcoord_list = [] color_list = [] current_color = [0] currently_below_threshold = [False] ivl = [] if ((color_threshold is None) or (isinstance(color_threshold, string_types) and (color_threshold == 'default'))): color_threshold = (max(Z[:, 2]) * 0.7) R = {'icoord': icoord_list, 'dcoord': dcoord_list, 'ivl': ivl, 'leaves': lvs, 'color_list': color_list} contraction_marks = ([] if show_contracted else None) _dendrogram_calculate_info(Z=Z, p=p, truncate_mode=truncate_mode, color_threshold=color_threshold, get_leaves=get_leaves, orientation=orientation, labels=labels, count_sort=count_sort, distance_sort=distance_sort, show_leaf_counts=show_leaf_counts, i=((2 * n) - 2), iv=0.0, ivl=ivl, n=n, icoord_list=icoord_list, dcoord_list=dcoord_list, lvs=lvs, current_color=current_color, color_list=color_list, currently_below_threshold=currently_below_threshold, leaf_label_func=leaf_label_func, contraction_marks=contraction_marks, link_color_func=link_color_func, above_threshold_color=above_threshold_color) if (not no_plot): mh = max(Z[:, 2]) _plot_dendrogram(icoord_list, dcoord_list, ivl, p, n, mh, orientation, no_labels, color_list, leaf_font_size=leaf_font_size, leaf_rotation=leaf_rotation, contraction_marks=contraction_marks, ax=ax, above_threshold_color=above_threshold_color) return R
[ "def", "dendrogram", "(", "Z", ",", "p", "=", "30", ",", "truncate_mode", "=", "None", ",", "color_threshold", "=", "None", ",", "get_leaves", "=", "True", ",", "orientation", "=", "'top'", ",", "labels", "=", "None", ",", "count_sort", "=", "False", "...
draw a tree diagram of relationships within a matrix parameters data : pandas .
train
false
30,136
def _format_sign(is_negative, spec): if is_negative: return '-' elif (spec['sign'] in ' +'): return spec['sign'] else: return ''
[ "def", "_format_sign", "(", "is_negative", ",", "spec", ")", ":", "if", "is_negative", ":", "return", "'-'", "elif", "(", "spec", "[", "'sign'", "]", "in", "' +'", ")", ":", "return", "spec", "[", "'sign'", "]", "else", ":", "return", "''" ]
determine sign character .
train
false
30,137
def ping(host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) try: return server.ping() except redis.ConnectionError: return False
[ "def", "ping", "(", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "try", ":", "return", "server...
a test to ensure the gnome module is loaded cli example: .
train
true
30,138
def _check_odd_rewrite(func, arg): return func(arg).func.is_Mul
[ "def", "_check_odd_rewrite", "(", "func", ",", "arg", ")", ":", "return", "func", "(", "arg", ")", ".", "func", ".", "is_Mul" ]
checks that the expr has been rewritten using f -> -f(x) arg : -x .
train
false
30,140
def describe_api_models(restApiId, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) models = _multi_call(conn.get_models, 'items', restApiId=restApiId) return {'models': [_convert_datetime_str(model) for model in models]} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "describe_api_models", "(", "restApiId", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "="...
get all models for a given api cli example: .
train
false
30,143
def delete_snapshot(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The delete_snapshot function must be called with -f or --function.') if ((not kwargs) or ('name' not in kwargs)): log.error('A name must be specified when deleting a snapshot.') return False name = kwargs['name'] conn = get_conn() __utils__['cloud.fire_event']('event', 'delete snapshot', 'salt/cloud/snapshot/deleting', args={'name': name}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport']) try: result = conn.destroy_volume_snapshot(conn.ex_get_snapshot(name)) except ResourceNotFoundError as exc: log.error('Snapshot {0} could not be found.\nThe following exception was thrown by libcloud:\n{1}'.format(name, exc), exc_info_on_loglevel=logging.DEBUG) return False __utils__['cloud.fire_event']('event', 'deleted snapshot', 'salt/cloud/snapshot/deleted', args={'name': name}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport']) return result
[ "def", "delete_snapshot", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The delete_snapshot function must be called with -f or --function.'", ")", "if", "(", ...
create a snapshot in a disk image .
train
true
30,144
def is_readable(path): if os.path.isdir(path): try: os.listdir(path) except (OSError, IOError): return False else: try: with _open(path, 'r') as fd: pass except (OSError, IOError): return False return True
[ "def", "is_readable", "(", "path", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "try", ":", "os", ".", "listdir", "(", "path", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "False", "else", ":", "tr...
check if a given path is readable by the current user .
train
false