id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
29,309
def _timestamp(pathname): try: s = _os_stat(pathname) except OSError: return None return long(s.st_mtime)
[ "def", "_timestamp", "(", "pathname", ")", ":", "try", ":", "s", "=", "_os_stat", "(", "pathname", ")", "except", "OSError", ":", "return", "None", "return", "long", "(", "s", ".", "st_mtime", ")" ]
return a formatted timestamp .
train
false
29,310
def set_virt_path(self, path, for_system=False): if (path is None): path = '' if for_system: if (path == ''): path = '<<inherit>>' self.virt_path = path
[ "def", "set_virt_path", "(", "self", ",", "path", ",", "for_system", "=", "False", ")", ":", "if", "(", "path", "is", "None", ")", ":", "path", "=", "''", "if", "for_system", ":", "if", "(", "path", "==", "''", ")", ":", "path", "=", "'<<inherit>>'...
virtual storage location suggestion .
train
false
29,311
def modifyBits(inputVal, maxChanges): changes = np.random.random_integers(0, maxChanges, 1)[0] if (changes == 0): return inputVal inputWidth = len(inputVal) whatToChange = np.random.random_integers(0, 41, changes) runningIndex = (-1) numModsDone = 0 for i in xrange(inputWidth): if (numModsDone >= changes): break if (inputVal[i] == 1): runningIndex += 1 if (runningIndex in whatToChange): if ((i != 0) and (inputVal[(i - 1)] == 0)): inputVal[(i - 1)] = 1 inputVal[i] = 0 return inputVal
[ "def", "modifyBits", "(", "inputVal", ",", "maxChanges", ")", ":", "changes", "=", "np", ".", "random", ".", "random_integers", "(", "0", ",", "maxChanges", ",", "1", ")", "[", "0", "]", "if", "(", "changes", "==", "0", ")", ":", "return", "inputVal"...
modifies up to maxchanges number of bits in the inputval .
train
true
29,312
def xvfb(registry, xml_parent, data): xwrapper = XML.SubElement(xml_parent, 'org.jenkinsci.plugins.xvfb.XvfbBuildWrapper') mapping = [('installation-name', 'installationName', 'default'), ('auto-display-name', 'autoDisplayName', False), ('display-name', 'displayName', ''), ('assigned-labels', 'assignedLabels', ''), ('parallel-build', 'parallelBuild', False), ('timeout', 'timeout', 0), ('screen', 'screen', '1024x768x24'), ('display-name-offset', 'displayNameOffset', 1), ('additional-options', 'additionalOptions', ''), ('debug', 'debug', False), ('shutdown-with-build', 'shutdownWithBuild', False)] convert_mapping_to_xml(xwrapper, data, mapping, fail_required=True)
[ "def", "xvfb", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "xwrapper", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'org.jenkinsci.plugins.xvfb.XvfbBuildWrapper'", ")", "mapping", "=", "[", "(", "'installation-name'", ",", "'installationN...
yaml: xvfb enable xvfb during the build .
train
false
29,315
@memoized def _block_types_possibly_scored(): return frozenset((category for (category, xblock_class) in XBlock.load_classes() if (getattr(xblock_class, 'has_score', False) or getattr(xblock_class, 'has_children', False))))
[ "@", "memoized", "def", "_block_types_possibly_scored", "(", ")", ":", "return", "frozenset", "(", "(", "category", "for", "(", "category", ",", "xblock_class", ")", "in", "XBlock", ".", "load_classes", "(", ")", "if", "(", "getattr", "(", "xblock_class", ",...
returns the block types that could have a score .
train
false
29,316
def rand_password(length=15): upper = random.choice(string.ascii_uppercase) ascii_char = string.ascii_letters digits = string.digits digit = random.choice(string.digits) puncs = '~!@#%^&*_=+' punc = random.choice(puncs) seed = ((ascii_char + digits) + puncs) pre = ((upper + digit) + punc) password = (pre + ''.join((random.choice(seed) for x in range((length - 3))))) return password
[ "def", "rand_password", "(", "length", "=", "15", ")", ":", "upper", "=", "random", ".", "choice", "(", "string", ".", "ascii_uppercase", ")", "ascii_char", "=", "string", ".", "ascii_letters", "digits", "=", "string", ".", "digits", "digit", "=", "random"...
generate a random password .
train
false
29,317
def make_jagged_equity_info(num_assets, start_date, first_end, frequency, periods_between_ends, auto_close_delta): frame = pd.DataFrame({'symbol': [chr((ord('A') + i)) for i in range(num_assets)], 'start_date': start_date, 'end_date': pd.date_range(first_end, freq=(periods_between_ends * frequency), periods=num_assets), 'exchange': 'TEST', 'exchange_full': 'TEST FULL'}, index=range(num_assets)) if (auto_close_delta is not None): frame['auto_close_date'] = (frame['end_date'] + auto_close_delta) return frame
[ "def", "make_jagged_equity_info", "(", "num_assets", ",", "start_date", ",", "first_end", ",", "frequency", ",", "periods_between_ends", ",", "auto_close_delta", ")", ":", "frame", "=", "pd", ".", "DataFrame", "(", "{", "'symbol'", ":", "[", "chr", "(", "(", ...
create a dataframe representing assets that all begin at the same start date .
train
true
29,318
@vectorize(['float32(float32)'], target='cuda') def gpu_cos(x): return math.cos(x)
[ "@", "vectorize", "(", "[", "'float32(float32)'", "]", ",", "target", "=", "'cuda'", ")", "def", "gpu_cos", "(", "x", ")", ":", "return", "math", ".", "cos", "(", "x", ")" ]
a simple cuda ufunc to compute the elemwise cosine .
train
false
29,319
def memoize_traffic(**memoize_kwargs): def memoize_traffic_decorator(fn): def memoize_traffic_wrapper(cls, *args, **kwargs): method = '.'.join((cls.__name__, fn.__name__)) actual_memoize_decorator = memoize(method, **memoize_kwargs) actual_memoize_wrapper = actual_memoize_decorator(fn) return actual_memoize_wrapper(cls, *args, **kwargs) return memoize_traffic_wrapper return memoize_traffic_decorator
[ "def", "memoize_traffic", "(", "**", "memoize_kwargs", ")", ":", "def", "memoize_traffic_decorator", "(", "fn", ")", ":", "def", "memoize_traffic_wrapper", "(", "cls", ",", "*", "args", ",", "**", "kwargs", ")", ":", "method", "=", "'.'", ".", "join", "(",...
wrap the memoize decorator and automatically determine memoize key .
train
false
29,320
def _check_img_lib(): imageio = PIL = None try: import imageio except ImportError: try: import PIL.Image except ImportError: pass return (imageio, PIL)
[ "def", "_check_img_lib", "(", ")", ":", "imageio", "=", "PIL", "=", "None", "try", ":", "import", "imageio", "except", "ImportError", ":", "try", ":", "import", "PIL", ".", "Image", "except", "ImportError", ":", "pass", "return", "(", "imageio", ",", "PI...
utility to search for imageio or pil .
train
true
29,321
def task_webhook(fun): @wraps(fun) def _inner(*args, **kwargs): try: retval = fun(*args, **kwargs) except Exception as exc: response = {'status': 'failure', 'reason': safe_repr(exc)} else: response = {'status': 'success', 'retval': retval} return JsonResponse(response) return _inner
[ "def", "task_webhook", "(", "fun", ")", ":", "@", "wraps", "(", "fun", ")", "def", "_inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "retval", "=", "fun", "(", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", "as...
decorator turning a function into a task webhook .
train
true
29,324
def CreateUser(): testName = 'PyNetTestUser' try: win32net.NetUserDel(server, testName) print 'Warning - deleted user before creating it!' except win32net.error: pass d = {} d['name'] = testName d['password'] = 'deleteme' d['priv'] = win32netcon.USER_PRIV_USER d['comment'] = 'Delete me - created by Python test code' d['flags'] = (win32netcon.UF_NORMAL_ACCOUNT | win32netcon.UF_SCRIPT) win32net.NetUserAdd(server, 1, d) try: try: win32net.NetUserChangePassword(server, testName, 'wrong', 'new') print 'ERROR: NetUserChangePassword worked with a wrong password!' except win32net.error: pass win32net.NetUserChangePassword(server, testName, 'deleteme', 'new') finally: win32net.NetUserDel(server, testName) print 'Created a user, changed their password, and deleted them!'
[ "def", "CreateUser", "(", ")", ":", "testName", "=", "'PyNetTestUser'", "try", ":", "win32net", ".", "NetUserDel", "(", "server", ",", "testName", ")", "print", "'Warning - deleted user before creating it!'", "except", "win32net", ".", "error", ":", "pass", "d", ...
creates a new test user .
train
false
29,325
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
return a fresh instance of the hash object .
train
false
29,326
def register_directive(name, directive): _directives[name] = directive
[ "def", "register_directive", "(", "name", ",", "directive", ")", ":", "_directives", "[", "name", "]", "=", "directive" ]
register a nonstandard application-defined directive function .
train
false
29,327
def exec_prompt(bus, prompt, callback): prompt_obj = bus_get_object(bus, prompt) prompt_iface = dbus.Interface(prompt_obj, (SS_PREFIX + 'Prompt')) prompt_iface.Prompt('', signature='s') def new_callback(dismissed, unlocked): if isinstance(unlocked, dbus.Array): unlocked = list(unlocked) callback(bool(dismissed), unlocked) prompt_iface.connect_to_signal('Completed', new_callback)
[ "def", "exec_prompt", "(", "bus", ",", "prompt", ",", "callback", ")", ":", "prompt_obj", "=", "bus_get_object", "(", "bus", ",", "prompt", ")", "prompt_iface", "=", "dbus", ".", "Interface", "(", "prompt_obj", ",", "(", "SS_PREFIX", "+", "'Prompt'", ")", ...
executes the given prompt .
train
false
29,328
def version_msg(): python_version = sys.version[:3] location = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) message = u'Cookiecutter %(version)s from {} (Python {})' return message.format(location, python_version)
[ "def", "version_msg", "(", ")", ":", "python_version", "=", "sys", ".", "version", "[", ":", "3", "]", "location", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file_...
returns the cookiecutter version .
train
true
29,329
def MakeMixture(metapmf, label='mix'): mix = Pmf(label=label) for (pmf, p1) in metapmf.Items(): for (x, p2) in pmf.Items(): mix.Incr(x, (p1 * p2)) return mix
[ "def", "MakeMixture", "(", "metapmf", ",", "label", "=", "'mix'", ")", ":", "mix", "=", "Pmf", "(", "label", "=", "label", ")", "for", "(", "pmf", ",", "p1", ")", "in", "metapmf", ".", "Items", "(", ")", ":", "for", "(", "x", ",", "p2", ")", ...
make a mixture distribution .
train
true
29,330
def MS_elements(predicate, expr, assumptions): return ask(predicate(expr.parent), assumptions)
[ "def", "MS_elements", "(", "predicate", ",", "expr", ",", "assumptions", ")", ":", "return", "ask", "(", "predicate", "(", "expr", ".", "parent", ")", ",", "assumptions", ")" ]
matrix slice elements .
train
false
29,332
def get_db_slave_ips(): nodes = file_io.read(constants.SLAVES_FILE_LOC).rstrip() nodes = nodes.split('\n') if (nodes[(-1)] == ''): nodes = nodes[:(-1)] return nodes
[ "def", "get_db_slave_ips", "(", ")", ":", "nodes", "=", "file_io", ".", "read", "(", "constants", ".", "SLAVES_FILE_LOC", ")", ".", "rstrip", "(", ")", "nodes", "=", "nodes", ".", "split", "(", "'\\n'", ")", "if", "(", "nodes", "[", "(", "-", "1", ...
returns the slave datastore ips .
train
false
29,333
def qualify(ref, resolvers, defns=Namespace.default): ns = None (p, n) = splitPrefix(ref) if (p is not None): if (not isinstance(resolvers, (list, tuple))): resolvers = (resolvers,) for r in resolvers: resolved = r.resolvePrefix(p) if (resolved[1] is not None): ns = resolved break if (ns is None): raise Exception(('prefix (%s) not resolved' % p)) else: ns = defns return (n, ns[1])
[ "def", "qualify", "(", "ref", ",", "resolvers", ",", "defns", "=", "Namespace", ".", "default", ")", ":", "ns", "=", "None", "(", "p", ",", "n", ")", "=", "splitPrefix", "(", "ref", ")", "if", "(", "p", "is", "not", "None", ")", ":", "if", "(",...
get a reference that is i{qualified} by namespace .
train
true
29,334
def parse_debug_object(response): response = nativestr(response) response = ('type:' + response) response = dict([kv.split(':') for kv in response.split()]) int_fields = ('refcount', 'serializedlength', 'lru', 'lru_seconds_idle') for field in int_fields: if (field in response): response[field] = int(response[field]) return response
[ "def", "parse_debug_object", "(", "response", ")", ":", "response", "=", "nativestr", "(", "response", ")", "response", "=", "(", "'type:'", "+", "response", ")", "response", "=", "dict", "(", "[", "kv", ".", "split", "(", "':'", ")", "for", "kv", "in"...
parse the results of rediss debug object command into a python dict .
train
true
29,335
def modify_replication_group(name, wait=600, security_groups=None, region=None, key=None, keyid=None, profile=None, **args): if security_groups: if (not isinstance(security_groups, list)): security_groups = [security_groups] sgs = __salt__['boto_secgroup.convert_to_group_ids'](groups=security_groups, region=region, key=key, keyid=keyid, profile=profile) if ('SecurityGroupIds' not in args): args['SecurityGroupIds'] = [] args['SecurityGroupIds'] += sgs args = dict([(k, v) for (k, v) in args.items() if (not k.startswith('_'))]) return _modify_resource(name, name_param='ReplicationGroupId', desc='replication group', res_type='replication_group', wait=wait, status_param='Status', region=region, key=key, keyid=keyid, profile=profile, **args)
[ "def", "modify_replication_group", "(", "name", ",", "wait", "=", "600", ",", "security_groups", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "**", "args", ")", ":", "i...
modify a replication group .
train
true
29,336
def convertXMLElement(geometryOutput, xmlElement): xmlElement.getXMLProcessor().createChildren(geometryOutput['shapes'], xmlElement)
[ "def", "convertXMLElement", "(", "geometryOutput", ",", "xmlElement", ")", ":", "xmlElement", ".", "getXMLProcessor", "(", ")", ".", "createChildren", "(", "geometryOutput", "[", "'shapes'", "]", ",", "xmlElement", ")" ]
convert the xml element to a path xml element .
train
false
29,337
def submit_export_ora2_data(request, course_key): task_type = 'export_ora2_data' task_class = export_ora2_data task_input = {} task_key = '' return submit_task(request, task_type, task_class, course_key, task_input, task_key)
[ "def", "submit_export_ora2_data", "(", "request", ",", "course_key", ")", ":", "task_type", "=", "'export_ora2_data'", "task_class", "=", "export_ora2_data", "task_input", "=", "{", "}", "task_key", "=", "''", "return", "submit_task", "(", "request", ",", "task_ty...
alreadyrunningerror is raised if an ora2 report is already being generated .
train
false
29,339
def get_managed_object_name(mo_ref): props = get_properties_of_managed_object(mo_ref, ['name']) return props.get('name')
[ "def", "get_managed_object_name", "(", "mo_ref", ")", ":", "props", "=", "get_properties_of_managed_object", "(", "mo_ref", ",", "[", "'name'", "]", ")", "return", "props", ".", "get", "(", "'name'", ")" ]
returns the name of a managed object .
train
false
29,340
def encode_certificate(result): cert_body = '-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n'.format('\n'.join(textwrap.wrap(base64.b64encode(result).decode('utf8'), 64))) signed_crt = open('/tmp/signed.crt', 'w') signed_crt.write(cert_body) signed_crt.close() return True
[ "def", "encode_certificate", "(", "result", ")", ":", "cert_body", "=", "'-----BEGIN CERTIFICATE-----\\n{0}\\n-----END CERTIFICATE-----\\n'", ".", "format", "(", "'\\n'", ".", "join", "(", "textwrap", ".", "wrap", "(", "base64", ".", "b64encode", "(", "result", ")",...
encode cert bytes to pem encoded cert file .
train
true
29,342
def greater(x, y): return tf.greater(x, y)
[ "def", "greater", "(", "x", ",", "y", ")", ":", "return", "tf", ".", "greater", "(", "x", ",", "y", ")" ]
element-wise truth value of .
train
false
29,343
@contextmanager def temporary_locale(temp_locale=None): orig_locale = locale.setlocale(locale.LC_ALL) if (temp_locale is not None): locale.setlocale(locale.LC_ALL, temp_locale) (yield) locale.setlocale(locale.LC_ALL, orig_locale)
[ "@", "contextmanager", "def", "temporary_locale", "(", "temp_locale", "=", "None", ")", ":", "orig_locale", "=", "locale", ".", "setlocale", "(", "locale", ".", "LC_ALL", ")", "if", "(", "temp_locale", "is", "not", "None", ")", ":", "locale", ".", "setloca...
enable code to run in a context with a temporary locale resets the locale back when exiting context .
train
true
29,344
def _nova_detach(nova_volume_manager, cinder_volume_manager, server_id, cinder_volume): try: nova_volume_manager.delete_server_volume(server_id=server_id, attachment_id=cinder_volume.id) except NovaNotFound: raise UnattachedVolume(cinder_volume.id) wait_for_volume_state(volume_manager=cinder_volume_manager, expected_volume=cinder_volume, desired_state=u'available', transient_states=(u'in-use', u'detaching'))
[ "def", "_nova_detach", "(", "nova_volume_manager", ",", "cinder_volume_manager", ",", "server_id", ",", "cinder_volume", ")", ":", "try", ":", "nova_volume_manager", ".", "delete_server_volume", "(", "server_id", "=", "server_id", ",", "attachment_id", "=", "cinder_vo...
detach a cinder volume from a nova host and block until the volume has detached .
train
false
29,345
def post_hook(config): cmd = config.post_hook if (config.verb == 'renew'): if (cmd and (cmd not in post_hook.eventually)): post_hook.eventually.append(cmd) elif cmd: logger.info('Running post-hook command: %s', cmd) _run_hook(cmd)
[ "def", "post_hook", "(", "config", ")", ":", "cmd", "=", "config", ".", "post_hook", "if", "(", "config", ".", "verb", "==", "'renew'", ")", ":", "if", "(", "cmd", "and", "(", "cmd", "not", "in", "post_hook", ".", "eventually", ")", ")", ":", "post...
run post hook if defined .
train
false
29,346
@handle_response_format @treeio_login_required def index_transactions(request, response_format='html'): if request.GET: query = _get_filter_query(Transaction, request.GET) else: query = Q() if ('massform' in request.POST): for key in request.POST: if ('mass-transaction' in key): try: transaction = Transaction.objects.get(pk=request.POST[key]) form = MassActionForm(request.user.profile, request.POST, instance=transaction) if (form.is_valid() and request.user.profile.has_permission(transaction, mode='w')): form.save() except: pass massform = MassActionForm(request.user.profile) transactions = Object.filter_by_request(request, Transaction.objects.filter(query), mode='r') filters = TransactionFilterForm(request.user.profile, 'title', request.GET) return render_to_response('finance/index_transactions', {'transactions': transactions, 'massform': massform, 'filters': filters}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "index_transactions", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "request", ".", "GET", ":", "query", "=", "_get_filter_query", "(", "Transaction", ",", "request", ".",...
index_transactions page: displays all transactions .
train
false
29,347
def _get_candidate_pos(version): return [i for (i, part) in enumerate(version) if (part in CANDIDATE_MARKERS)][0]
[ "def", "_get_candidate_pos", "(", "version", ")", ":", "return", "[", "i", "for", "(", "i", ",", "part", ")", "in", "enumerate", "(", "version", ")", "if", "(", "part", "in", "CANDIDATE_MARKERS", ")", "]", "[", "0", "]" ]
returns the position of the candidate marker .
train
false
29,348
def create_record(name, zone_id, type, data, profile): conn = _get_driver(profile=profile) record_type = _string_to_record_type(type) zone = conn.get_zone(zone_id) return conn.create_record(name, zone, record_type, data)
[ "def", "create_record", "(", "name", ",", "zone_id", ",", "type", ",", "data", ",", "profile", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "record_type", "=", "_string_to_record_type", "(", "type", ")", "zone", "=", "conn", ...
create a new record .
train
true
29,349
def am_following_group(context, data_dict): return _am_following(context, data_dict, ckan.logic.schema.default_follow_group_schema(), context['model'].UserFollowingGroup)
[ "def", "am_following_group", "(", "context", ",", "data_dict", ")", ":", "return", "_am_following", "(", "context", ",", "data_dict", ",", "ckan", ".", "logic", ".", "schema", ".", "default_follow_group_schema", "(", ")", ",", "context", "[", "'model'", "]", ...
return true if youre following the given group .
train
false
29,350
def _validate_db_int(**kwargs): max_int = ((2 ** 31) - 1) for (param_key, param_value) in kwargs.items(): if (param_value and (param_value > max_int)): msg = (_("'%(param)s' value out of range, must not exceed %(max)d.") % {'param': param_key, 'max': max_int}) raise exception.Invalid(msg)
[ "def", "_validate_db_int", "(", "**", "kwargs", ")", ":", "max_int", "=", "(", "(", "2", "**", "31", ")", "-", "1", ")", "for", "(", "param_key", ",", "param_value", ")", "in", "kwargs", ".", "items", "(", ")", ":", "if", "(", "param_value", "and",...
make sure that all arguments are less than or equal to 2 ** 31 - 1 .
train
false
29,351
def filter_otus(otus, prefs): new_otus_list = [] for i in otus: new_otus = [] for j in otus[i]: sample_seq = j.split('_') if (len(sample_seq) > 1): new_name = ''.join(sample_seq[:(-1)]) else: new_name = sample_seq[0] is_sample = False for sample_id in prefs: if (prefs[sample_id] == new_name): is_sample = True if is_sample: pass else: new_otus.append(('%s' % j)) if new_otus: new_otus_list.append((i, new_otus)) return new_otus_list
[ "def", "filter_otus", "(", "otus", ",", "prefs", ")", ":", "new_otus_list", "=", "[", "]", "for", "i", "in", "otus", ":", "new_otus", "=", "[", "]", "for", "j", "in", "otus", "[", "i", "]", ":", "sample_seq", "=", "j", ".", "split", "(", "'_'", ...
filters the otus file based on which samples should be removed and determines which sequences to remove .
train
false
29,352
def test_replay_load_template_name(monkeypatch, mocker, user_config_data, user_config_file): monkeypatch.chdir('tests/fake-repo-tmpl') mock_replay_load = mocker.patch('cookiecutter.main.load') mocker.patch('cookiecutter.main.generate_files') cookiecutter('.', replay=True, config_file=user_config_file) mock_replay_load.assert_called_once_with(user_config_data['replay_dir'], 'fake-repo-tmpl')
[ "def", "test_replay_load_template_name", "(", "monkeypatch", ",", "mocker", ",", "user_config_data", ",", "user_config_file", ")", ":", "monkeypatch", ".", "chdir", "(", "'tests/fake-repo-tmpl'", ")", "mock_replay_load", "=", "mocker", ".", "patch", "(", "'cookiecutte...
check that replay_load is called with a valid template_name that is not a relative path .
train
false
29,353
def get_logit_endog(true_params, exog, noise_level): N = exog.shape[0] Xdotparams = sp.dot(exog, true_params) noise = (noise_level * sp.randn(*Xdotparams.shape)) eXB = sp.column_stack((sp.ones(len(Xdotparams)), sp.exp(Xdotparams))) class_probabilities = (eXB / eXB.sum(1)[:, None]) cdf = class_probabilities.cumsum(axis=1) endog = sp.zeros(N) for i in range(N): endog[i] = sp.searchsorted(cdf[i, :], sp.rand()) return endog
[ "def", "get_logit_endog", "(", "true_params", ",", "exog", ",", "noise_level", ")", ":", "N", "=", "exog", ".", "shape", "[", "0", "]", "Xdotparams", "=", "sp", ".", "dot", "(", "exog", ",", "true_params", ")", "noise", "=", "(", "noise_level", "*", ...
gets an endogenous response that is consistent with the true_params .
train
false
29,354
def random_vector(size): return numpy.random.random(size)
[ "def", "random_vector", "(", "size", ")", ":", "return", "numpy", ".", "random", ".", "random", "(", "size", ")" ]
return array of random doubles in the half-open interval [0 .
train
false
29,355
def renewal_conf_files(config): return glob.glob(os.path.join(config.renewal_configs_dir, '*.conf'))
[ "def", "renewal_conf_files", "(", "config", ")", ":", "return", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "config", ".", "renewal_configs_dir", ",", "'*.conf'", ")", ")" ]
return /path/to/* .
train
false
29,356
def k_shell(G, k=None, core_number=None): def k_filter(v, k, c): return (c[v] == k) return _core_subgraph(G, k_filter, k, core_number)
[ "def", "k_shell", "(", "G", ",", "k", "=", "None", ",", "core_number", "=", "None", ")", ":", "def", "k_filter", "(", "v", ",", "k", ",", "c", ")", ":", "return", "(", "c", "[", "v", "]", "==", "k", ")", "return", "_core_subgraph", "(", "G", ...
return the k-shell of g .
train
false
29,357
@LocalContext def cpp(shellcode): arch = context.arch os = context.os code = (_include_header() + shellcode) cmd = ['cpp', '-C', '-nostdinc', '-undef', '-P', ('-I' + _incdir), '/dev/stdin'] return (_run(cmd, code).strip('\n').rstrip() + '\n')
[ "@", "LocalContext", "def", "cpp", "(", "shellcode", ")", ":", "arch", "=", "context", ".", "arch", "os", "=", "context", ".", "os", "code", "=", "(", "_include_header", "(", ")", "+", "shellcode", ")", "cmd", "=", "[", "'cpp'", ",", "'-C'", ",", "...
cpp -> str runs cpp over the given shellcode .
train
false
29,358
def idcounter(): global ICOUNT ICOUNT += 1 return str(ICOUNT)
[ "def", "idcounter", "(", ")", ":", "global", "ICOUNT", "ICOUNT", "+=", "1", "return", "str", "(", "ICOUNT", ")" ]
makes unique ids .
train
false
29,360
def prune_nones(mydict): return dict([(k, v) for (k, v) in mydict.items() if ((v != None) and (v != 'None'))])
[ "def", "prune_nones", "(", "mydict", ")", ":", "return", "dict", "(", "[", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "mydict", ".", "items", "(", ")", "if", "(", "(", "v", "!=", "None", ")", "and", "(", "v", "!=", "'Non...
remove keys from mydict whose values are none :arg mydict: the dictionary to act on :rtype: dict .
train
false
29,361
def init_ool(): global ffi_ool, cffi_sin_ool, cffi_cos_ool, cffi_foo, vsSin, vdSin global vector_real, vector_imag if (ffi_ool is None): (ffi_ool, mod) = load_ool_module() cffi_sin_ool = mod.lib.sin cffi_cos_ool = mod.lib.cos cffi_foo = mod.lib.foo vsSin = mod.lib.vsSin vdSin = mod.lib.vdSin vector_real = mod.lib.vector_real vector_imag = mod.lib.vector_imag del mod
[ "def", "init_ool", "(", ")", ":", "global", "ffi_ool", ",", "cffi_sin_ool", ",", "cffi_cos_ool", ",", "cffi_foo", ",", "vsSin", ",", "vdSin", "global", "vector_real", ",", "vector_imag", "if", "(", "ffi_ool", "is", "None", ")", ":", "(", "ffi_ool", ",", ...
same as init() for ool mode .
train
false
29,362
@register(u'yank-nth-arg') def yank_nth_arg(event): n = (event.arg if event.arg_present else None) event.current_buffer.yank_nth_arg(n)
[ "@", "register", "(", "u'yank-nth-arg'", ")", "def", "yank_nth_arg", "(", "event", ")", ":", "n", "=", "(", "event", ".", "arg", "if", "event", ".", "arg_present", "else", "None", ")", "event", ".", "current_buffer", ".", "yank_nth_arg", "(", "n", ")" ]
insert the first argument of the previous command .
train
false
29,363
def remove_empty_rules(css): return re.sub('[^\\}\\{]+\\{\\}', '', css)
[ "def", "remove_empty_rules", "(", "css", ")", ":", "return", "re", ".", "sub", "(", "'[^\\\\}\\\\{]+\\\\{\\\\}'", ",", "''", ",", "css", ")" ]
remove empty rules .
train
false
29,364
def getConfigOption(opt): return CONFIG_OPTIONS[opt]
[ "def", "getConfigOption", "(", "opt", ")", ":", "return", "CONFIG_OPTIONS", "[", "opt", "]" ]
return the value of a single global configuration option .
train
false
29,367
def load_properties(page_info): import re if (not page_info.title): page_info.title = extract_title(page_info.source, page_info.name) if (page_info.title and (not (u'{% block title %}' in page_info.source))): page_info.source += u'\n{% block title %}{{ title }}{% endblock %}' if (u'<!-- no-breadcrumbs -->' in page_info.source): page_info.no_breadcrumbs = 1 if (u'<!-- show-sidebar -->' in page_info.source): page_info.show_sidebar = 1 if (u'<!-- no-header -->' in page_info.source): page_info.no_header = 1 elif ((not (u'{% block header %}' in page_info.source)) and (not (u'<h1' in page_info.source))): page_info.source += u'\n{% block header %}<h1>{{ title }}</h1>{% endblock %}' if (u'<!-- no-cache -->' in page_info.source): page_info.no_cache = 1 if (u'<!-- no-sitemap -->' in page_info.source): page_info.no_cache = 1
[ "def", "load_properties", "(", "page_info", ")", ":", "import", "re", "if", "(", "not", "page_info", ".", "title", ")", ":", "page_info", ".", "title", "=", "extract_title", "(", "page_info", ".", "source", ",", "page_info", ".", "name", ")", "if", "(", ...
load properties like no_cache .
train
false
29,371
def modify_autojump_sh(etc_dir, share_dir, dryrun=False): custom_install = (' \n# check custom install \nif [ -s %s/autojump.${shell} ]; then \n source %s/autojump.${shell} \nfi\n' % (share_dir, share_dir)) with open(os.path.join(etc_dir, 'autojump.sh'), 'a') as f: f.write(custom_install)
[ "def", "modify_autojump_sh", "(", "etc_dir", ",", "share_dir", ",", "dryrun", "=", "False", ")", ":", "custom_install", "=", "(", "' \\n# check custom install \\nif [ -s %s/autojump.${shell} ]; then \\n source %s/autojump.${shell} \\nfi\\n'", "%", ...
append custom installation path to autojump .
train
false
29,372
def fonseca(individual): f_1 = (1 - exp((- sum((((xi - (1 / sqrt(3))) ** 2) for xi in individual[:3]))))) f_2 = (1 - exp((- sum((((xi + (1 / sqrt(3))) ** 2) for xi in individual[:3]))))) return (f_1, f_2)
[ "def", "fonseca", "(", "individual", ")", ":", "f_1", "=", "(", "1", "-", "exp", "(", "(", "-", "sum", "(", "(", "(", "(", "xi", "-", "(", "1", "/", "sqrt", "(", "3", ")", ")", ")", "**", "2", ")", "for", "xi", "in", "individual", "[", ":...
fonseca and flemings multiobjective function .
train
false
29,373
def Weibull(name, alpha, beta): return rv(name, WeibullDistribution, (alpha, beta))
[ "def", "Weibull", "(", "name", ",", "alpha", ",", "beta", ")", ":", "return", "rv", "(", "name", ",", "WeibullDistribution", ",", "(", "alpha", ",", "beta", ")", ")" ]
create a continuous random variable with a weibull distribution .
train
false
29,374
def dmp_apply_pairs(f, g, h, args, u, K): if (not u): return dup_apply_pairs(f, g, h, args, K) (n, m, v) = (len(f), len(g), (u - 1)) if (n != m): if (n > m): g = (dmp_zeros((n - m), v, K) + g) else: f = (dmp_zeros((m - n), v, K) + f) result = [] for (a, b) in zip(f, g): result.append(dmp_apply_pairs(a, b, h, args, v, K)) return dmp_strip(result, u)
[ "def", "dmp_apply_pairs", "(", "f", ",", "g", ",", "h", ",", "args", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_apply_pairs", "(", "f", ",", "g", ",", "h", ",", "args", ",", "K", ")", "(", "n", ",", "m", ...
apply h to pairs of coefficients of f and g .
train
false
29,375
def initialize_settings(instance): provider = default_provider.get_provider(instance.__class__) if provider: provider.initialize(instance)
[ "def", "initialize_settings", "(", "instance", ")", ":", "provider", "=", "default_provider", ".", "get_provider", "(", "instance", ".", "__class__", ")", "if", "provider", ":", "provider", ".", "initialize", "(", "instance", ")" ]
this is usually done in widgets new .
train
false
29,377
@contextmanager def controlled_logging(request, logger): default_level = None from_logout = _is_from_logout(request) if from_logout: default_level = logger.getEffectiveLevel() logger.setLevel(ERROR) try: (yield) finally: if from_logout: logger.setLevel(default_level)
[ "@", "contextmanager", "def", "controlled_logging", "(", "request", ",", "logger", ")", ":", "default_level", "=", "None", "from_logout", "=", "_is_from_logout", "(", "request", ")", "if", "from_logout", ":", "default_level", "=", "logger", ".", "getEffectiveLevel...
control the logging by changing loggers level if the request is from logout .
train
false
29,378
def related_bug(bug, status_code=None): def decorator(f): @functools.wraps(f) def wrapper(self, *func_args, **func_kwargs): try: return f(self, *func_args, **func_kwargs) except Exception as exc: exc_status_code = getattr(exc, 'status_code', None) if ((status_code is None) or (status_code == exc_status_code)): LOG.error('Hints: This test was made for the bug %s. The failure could be related to https://launchpad.net/bugs/%s', bug, bug) raise exc return wrapper return decorator
[ "def", "related_bug", "(", "bug", ",", "status_code", "=", "None", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "func_args", ",", "**", "func_kwargs", ")", ...
a decorator useful to know solutions from launchpad bug reports .
train
false
29,379
def list_actions(name, location='\\'): pythoncom.CoInitialize() task_service = win32com.client.Dispatch('Schedule.Service') task_service.Connect() task_folder = task_service.GetFolder(location) task_definition = task_folder.GetTask(name).Definition actions = task_definition.Actions ret = [] for action in actions: ret.append(action.Id) return ret
[ "def", "list_actions", "(", "name", ",", "location", "=", "'\\\\'", ")", ":", "pythoncom", ".", "CoInitialize", "(", ")", "task_service", "=", "win32com", ".", "client", ".", "Dispatch", "(", "'Schedule.Service'", ")", "task_service", ".", "Connect", "(", ")...
list all actions that pertain to a task in the specified location .
train
false
29,380
def render_sum(children): if (len(children) == 1): return children[0] children_latex = [k.latex for k in children] latex = ''.join(children_latex) tall = any((k.tall for k in children)) return LatexRendered(latex, tall=tall)
[ "def", "render_sum", "(", "children", ")", ":", "if", "(", "len", "(", "children", ")", "==", "1", ")", ":", "return", "children", "[", "0", "]", "children_latex", "=", "[", "k", ".", "latex", "for", "k", "in", "children", "]", "latex", "=", "''", ...
concatenate elements .
train
false
29,381
def _default_revctrl(dirname=''): for finder in finders: for item in finder(dirname): (yield item)
[ "def", "_default_revctrl", "(", "dirname", "=", "''", ")", ":", "for", "finder", "in", "finders", ":", "for", "item", "in", "finder", "(", "dirname", ")", ":", "(", "yield", "item", ")" ]
primary svn_cvs entry point .
train
false
29,382
def get_postgres_connection(host, port, database=None): def connect_to_postgres(): try: return connect(host=host, user=u'postgres', port=port, database=database) except (InterfaceError, ProgrammingError) as e: Message.new(message_type=u'acceptance:integration:postgres_connect', exception=unicode(e.__class__), reason=unicode(e)).write() return False d = loop_until(reactor, connect_to_postgres) return d
[ "def", "get_postgres_connection", "(", "host", ",", "port", ",", "database", "=", "None", ")", ":", "def", "connect_to_postgres", "(", ")", ":", "try", ":", "return", "connect", "(", "host", "=", "host", ",", "user", "=", "u'postgres'", ",", "port", "=",...
returns a deferred which fires with a pg8000 connection when one has been created .
train
false
29,383
def formatdate(timeval=None, localtime=False, usegmt=False): if (timeval is None): timeval = time.time() if localtime: now = time.localtime(timeval) if (time.daylight and now[(-1)]): offset = time.altzone else: offset = time.timezone (hours, minutes) = divmod(abs(offset), 3600) if (offset > 0): sign = '-' else: sign = '+' zone = ('%s%02d%02d' % (sign, hours, (minutes // 60))) else: now = time.gmtime(timeval) if usegmt: zone = 'GMT' else: zone = '-0000' return ('%s, %02d %s %04d %02d:%02d:%02d %s' % (['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][now[6]], now[2], ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][(now[1] - 1)], now[0], now[3], now[4], now[5], zone))
[ "def", "formatdate", "(", "timeval", "=", "None", ",", "localtime", "=", "False", ",", "usegmt", "=", "False", ")", ":", "if", "(", "timeval", "is", "None", ")", ":", "timeval", "=", "time", ".", "time", "(", ")", "if", "localtime", ":", "now", "="...
returns time format preferred for internet standards .
train
true
29,384
def get_growth_data(year, quarter): if (ct._check_input(year, quarter) is True): ct._write_head() data = _get_growth_data(year, quarter, 1, pd.DataFrame()) if (data is not None): data['code'] = data['code'].map((lambda x: str(x).zfill(6))) return data
[ "def", "get_growth_data", "(", "year", ",", "quarter", ")", ":", "if", "(", "ct", ".", "_check_input", "(", "year", ",", "quarter", ")", "is", "True", ")", ":", "ct", ".", "_write_head", "(", ")", "data", "=", "_get_growth_data", "(", "year", ",", "q...
parameters year:int 年度 e .
train
false
29,386
def file_find(filename, preferred_path=None, raise_if_missing=True): search_paths = _get_search_paths(preferred_path, suffix=filename) for file_candidate in search_paths: if os.path.isfile(file_candidate): return file_candidate if raise_if_missing: raise MissingDataSource(search_paths) else: return False
[ "def", "file_find", "(", "filename", ",", "preferred_path", "=", "None", ",", "raise_if_missing", "=", "True", ")", ":", "search_paths", "=", "_get_search_paths", "(", "preferred_path", ",", "suffix", "=", "filename", ")", "for", "file_candidate", "in", "search_...
return the path to an existing file .
train
false
29,388
def _semantic_feasibility(self, G1_node, G2_node): if (self.node_match is not None): nm = self.node_match(self.G1.node[G1_node], self.G2.node[G2_node]) if (not nm): return False if (self.edge_match is not None): G1_adj = self.G1_adj G2_adj = self.G2_adj core_1 = self.core_1 edge_match = self.edge_match for neighbor in G1_adj[G1_node]: if (neighbor == G1_node): if (not edge_match(G1_adj[G1_node][G1_node], G2_adj[G2_node][G2_node])): return False elif (neighbor in core_1): if (not edge_match(G1_adj[G1_node][neighbor], G2_adj[G2_node][core_1[neighbor]])): return False return True
[ "def", "_semantic_feasibility", "(", "self", ",", "G1_node", ",", "G2_node", ")", ":", "if", "(", "self", ".", "node_match", "is", "not", "None", ")", ":", "nm", "=", "self", ".", "node_match", "(", "self", ".", "G1", ".", "node", "[", "G1_node", "]"...
returns true if mapping g1_node to g2_node is semantically feasible .
train
false
29,390
def get_writer_instance(writer_name): writer = REPORT_WRITERS_MAP[writer_name]() assert isinstance(writer, ReportWriter) return writer
[ "def", "get_writer_instance", "(", "writer_name", ")", ":", "writer", "=", "REPORT_WRITERS_MAP", "[", "writer_name", "]", "(", ")", "assert", "isinstance", "(", "writer", ",", "ReportWriter", ")", "return", "writer" ]
get a report writer instance by name :type writer_name: str .
train
false
29,391
def percentiles_summary(df, num_old, num_new, upsample, state): from dask.array.percentile import _percentile length = len(df) if (length == 0): return () random_state = np.random.RandomState(state) qs = sample_percentiles(num_old, num_new, length, upsample, random_state) data = df.values interpolation = 'linear' if (str(data.dtype) == 'category'): data = data.codes interpolation = 'nearest' vals = _percentile(data, qs, interpolation=interpolation) if ((interpolation == 'linear') and np.issubdtype(data.dtype, np.integer)): vals = np.round(vals).astype(data.dtype) vals_and_weights = percentiles_to_weights(qs, vals, length) return vals_and_weights
[ "def", "percentiles_summary", "(", "df", ",", "num_old", ",", "num_new", ",", "upsample", ",", "state", ")", ":", "from", "dask", ".", "array", ".", "percentile", "import", "_percentile", "length", "=", "len", "(", "df", ")", "if", "(", "length", "==", ...
summarize data using percentiles and derived weights .
train
false
29,393
def exit_on_signal(): def shutdown(signal, frame): logger.warn('Received signal %s: exiting', signal) sys.exit((128 + signal)) signal.signal(signal.SIGHUP, shutdown) signal.signal(signal.SIGINT, shutdown) signal.signal(signal.SIGTERM, shutdown)
[ "def", "exit_on_signal", "(", ")", ":", "def", "shutdown", "(", "signal", ",", "frame", ")", ":", "logger", ".", "warn", "(", "'Received signal %s: exiting'", ",", "signal", ")", "sys", ".", "exit", "(", "(", "128", "+", "signal", ")", ")", "signal", "...
install a signal handler for hup .
train
true
29,395
def _animate_evoked_topomap(evoked, ch_type='mag', times=None, frame_rate=None, butterfly=False, blit=True, show=True): return _topomap_animation(evoked, ch_type=ch_type, times=times, frame_rate=frame_rate, butterfly=butterfly, blit=blit, show=show)
[ "def", "_animate_evoked_topomap", "(", "evoked", ",", "ch_type", "=", "'mag'", ",", "times", "=", "None", ",", "frame_rate", "=", "None", ",", "butterfly", "=", "False", ",", "blit", "=", "True", ",", "show", "=", "True", ")", ":", "return", "_topomap_an...
make animation of evoked data as topomap timeseries .
train
false
29,396
def getBevelPath(begin, center, close, end, radius): beginComplex = begin.dropAxis() centerComplex = center.dropAxis() endComplex = end.dropAxis() beginComplexSegmentLength = abs((centerComplex - beginComplex)) endComplexSegmentLength = abs((centerComplex - endComplex)) minimumRadius = lineation.getMinimumRadius(beginComplexSegmentLength, endComplexSegmentLength, radius) if (minimumRadius <= close): return [center] beginBevel = (center + ((minimumRadius / beginComplexSegmentLength) * (begin - center))) endBevel = (center + ((minimumRadius / endComplexSegmentLength) * (end - center))) if (radius > 0.0): return [beginBevel, endBevel] midpointComplex = (0.5 * (beginBevel.dropAxis() + endBevel.dropAxis())) spikeComplex = ((centerComplex + centerComplex) - midpointComplex) return [beginBevel, Vector3(spikeComplex.real, spikeComplex.imag, center.z), endBevel]
[ "def", "getBevelPath", "(", "begin", ",", "center", ",", "close", ",", "end", ",", "radius", ")", ":", "beginComplex", "=", "begin", ".", "dropAxis", "(", ")", "centerComplex", "=", "center", ".", "dropAxis", "(", ")", "endComplex", "=", "end", ".", "d...
get bevel path .
train
false
29,397
def gnu_getopt(args, shortopts, longopts=[]): opts = [] prog_args = [] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) if shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get('POSIXLY_CORRECT'): all_options_first = True else: all_options_first = False while args: if (args[0] == '--'): prog_args += args[1:] break if (args[0][:2] == '--'): (opts, args) = do_longs(opts, args[0][2:], longopts, args[1:]) elif (args[0][:1] == '-'): (opts, args) = do_shorts(opts, args[0][1:], shortopts, args[1:]) elif all_options_first: prog_args += args break else: prog_args.append(args[0]) args = args[1:] return (opts, prog_args)
[ "def", "gnu_getopt", "(", "args", ",", "shortopts", ",", "longopts", "=", "[", "]", ")", ":", "opts", "=", "[", "]", "prog_args", "=", "[", "]", "if", "isinstance", "(", "longopts", ",", "str", ")", ":", "longopts", "=", "[", "longopts", "]", "else...
getopt -> opts .
train
true
29,398
def mat2quat(M): (Qxx, Qyx, Qzx, Qxy, Qyy, Qzy, Qxz, Qyz, Qzz) = M.flat K = (np.array([[((Qxx - Qyy) - Qzz), 0, 0, 0], [(Qyx + Qxy), ((Qyy - Qxx) - Qzz), 0, 0], [(Qzx + Qxz), (Qzy + Qyz), ((Qzz - Qxx) - Qyy), 0], [(Qyz - Qzy), (Qzx - Qxz), (Qxy - Qyx), ((Qxx + Qyy) + Qzz)]]) / 3.0) (vals, vecs) = np.linalg.eigh(K) q = vecs[([3, 0, 1, 2], np.argmax(vals))] if (q[0] < 0): q *= (-1) return q
[ "def", "mat2quat", "(", "M", ")", ":", "(", "Qxx", ",", "Qyx", ",", "Qzx", ",", "Qxy", ",", "Qyy", ",", "Qzy", ",", "Qxz", ",", "Qyz", ",", "Qzz", ")", "=", "M", ".", "flat", "K", "=", "(", "np", ".", "array", "(", "[", "[", "(", "(", "...
calculate quaternion corresponding to given rotation matrix parameters m : array-like 3x3 rotation matrix returns q : array closest quaternion to input matrix .
train
false
29,399
def ember_app(path=None): ember_app_folder = None fp = (path or 'index.html') for k in settings.EXTERNAL_EMBER_APPS.keys(): if request.path.strip('/').startswith(k): ember_app_folder = os.path.abspath(os.path.join(os.getcwd(), settings.EXTERNAL_EMBER_APPS[k]['path'])) break if (not ember_app_folder): raise HTTPError(http.NOT_FOUND) if (not os.path.abspath(os.path.join(ember_app_folder, fp)).startswith(ember_app_folder)): raise HTTPError(http.NOT_FOUND) if (not os.path.isfile(os.path.join(ember_app_folder, fp))): fp = 'index.html' return send_from_directory(ember_app_folder, fp)
[ "def", "ember_app", "(", "path", "=", "None", ")", ":", "ember_app_folder", "=", "None", "fp", "=", "(", "path", "or", "'index.html'", ")", "for", "k", "in", "settings", ".", "EXTERNAL_EMBER_APPS", ".", "keys", "(", ")", ":", "if", "request", ".", "pat...
serve the contents of the ember application .
train
false
29,400
@jit def simulate_linear_model(A, x0, v, ts_length): A = np.asarray(A) n = A.shape[0] x = np.empty((n, ts_length)) x[:, 0] = x0 for t in range((ts_length - 1)): for i in range(n): x[(i, (t + 1))] = v[(i, t)] for j in range(n): x[(i, (t + 1))] += (A[(i, j)] * x[(j, t)]) return x
[ "@", "jit", "def", "simulate_linear_model", "(", "A", ",", "x0", ",", "v", ",", "ts_length", ")", ":", "A", "=", "np", ".", "asarray", "(", "A", ")", "n", "=", "A", ".", "shape", "[", "0", "]", "x", "=", "np", ".", "empty", "(", "(", "n", "...
this is a separate function for simulating a vector linear system of the form x_{t+1} = a x_t + v_t given x_0 = x0 here x_t and v_t are both n x 1 and a is n x n .
train
true
29,401
def hostname(): if sabnzbd.WIN32: return os.environ.get('computername', 'unknown') try: return os.uname()[1] except: return 'unknown'
[ "def", "hostname", "(", ")", ":", "if", "sabnzbd", ".", "WIN32", ":", "return", "os", ".", "environ", ".", "get", "(", "'computername'", ",", "'unknown'", ")", "try", ":", "return", "os", ".", "uname", "(", ")", "[", "1", "]", "except", ":", "retur...
require the hostname to have a specific value .
train
false
29,402
def make_client(reactor, cluster): control_node = cluster.control_node.address certificates_path = cluster.certificates_path cluster_cert = certificates_path.child('cluster.crt') user_cert = certificates_path.child('user.crt') user_key = certificates_path.child('user.key') return FlockerClient(reactor, control_node, REST_API_PORT, cluster_cert, user_cert, user_key)
[ "def", "make_client", "(", "reactor", ",", "cluster", ")", ":", "control_node", "=", "cluster", ".", "control_node", ".", "address", "certificates_path", "=", "cluster", ".", "certificates_path", "cluster_cert", "=", "certificates_path", ".", "child", "(", "'clust...
create a :class:flockerclient object for accessing the given cluster .
train
false
29,405
def absolute_coordinate(coord_string): m = ABSOLUTE_RE.match(coord_string.upper()) if m: parts = m.groups() if all(parts[(-2):]): return ('$%s$%s:$%s$%s' % (parts[0], parts[1], parts[3], parts[4])) else: return ('$%s$%s' % (parts[0], parts[1])) else: return coord_string
[ "def", "absolute_coordinate", "(", "coord_string", ")", ":", "m", "=", "ABSOLUTE_RE", ".", "match", "(", "coord_string", ".", "upper", "(", ")", ")", "if", "m", ":", "parts", "=", "m", ".", "groups", "(", ")", "if", "all", "(", "parts", "[", "(", "...
convert a coordinate to an absolute coordinate string .
train
false
29,406
@pytest.fixture(autouse=True) def httpbin_after_test(httpbin, request): request.node._httpbin_log = httpbin.captured_log (yield) httpbin.after_test()
[ "@", "pytest", ".", "fixture", "(", "autouse", "=", "True", ")", "def", "httpbin_after_test", "(", "httpbin", ",", "request", ")", ":", "request", ".", "node", ".", "_httpbin_log", "=", "httpbin", ".", "captured_log", "(", "yield", ")", "httpbin", ".", "...
fixture to clean httpbin request list after each test .
train
false
29,407
@require_admin @api_handle_error_with_json @process_log_from_request def check_update_progress(request, process_log): return JsonResponse(_process_log_to_dict(process_log))
[ "@", "require_admin", "@", "api_handle_error_with_json", "@", "process_log_from_request", "def", "check_update_progress", "(", "request", ",", "process_log", ")", ":", "return", "JsonResponse", "(", "_process_log_to_dict", "(", "process_log", ")", ")" ]
api endpoint for getting progress data on downloads .
train
false
29,408
def rostest_name_from_path(pkg_dir, test_file): test_file_abs = os.path.abspath(test_file) if test_file_abs.startswith(pkg_dir): test_file = test_file_abs[len(pkg_dir):] if (test_file[0] == os.sep): test_file = test_file[1:] outname = test_file.replace(os.sep, '_') if ('.' in outname): outname = outname[:outname.rfind('.')] return outname
[ "def", "rostest_name_from_path", "(", "pkg_dir", ",", "test_file", ")", ":", "test_file_abs", "=", "os", ".", "path", ".", "abspath", "(", "test_file", ")", "if", "test_file_abs", ".", "startswith", "(", "pkg_dir", ")", ":", "test_file", "=", "test_file_abs", ...
derive name of rostest based on file name/path .
train
false
29,409
@subscriber(ResourceChanged, for_resources=('bucket',), for_actions=(ACTIONS.DELETE,)) def on_buckets_deleted(event): storage = event.request.registry.storage permission = event.request.registry.permission for change in event.impacted_records: bucket = change['old'] bucket_uri = instance_uri(event.request, 'bucket', id=bucket['id']) parent_pattern = (bucket_uri + '*') storage.delete_all(parent_id=parent_pattern, collection_id=None, with_deleted=False) storage.purge_deleted(parent_id=parent_pattern, collection_id=None) permission.delete_object_permissions(parent_pattern)
[ "@", "subscriber", "(", "ResourceChanged", ",", "for_resources", "=", "(", "'bucket'", ",", ")", ",", "for_actions", "=", "(", "ACTIONS", ".", "DELETE", ",", ")", ")", "def", "on_buckets_deleted", "(", "event", ")", ":", "storage", "=", "event", ".", "re...
some buckets were deleted .
train
false
29,411
def test_issue360(en_tokenizer): tokens = en_tokenizer(u'$45...............Asking') assert (len(tokens) > 2)
[ "def", "test_issue360", "(", "en_tokenizer", ")", ":", "tokens", "=", "en_tokenizer", "(", "u'$45...............Asking'", ")", "assert", "(", "len", "(", "tokens", ")", ">", "2", ")" ]
test tokenization of big ellipsis .
train
false
29,412
def funshion_vid_to_urls(vid): html = get_content('http://pv.funshion.com/v5/video/play/?id={vid}&cl=aphone&uc=5'.format(vid=vid)) return select_url_from_video_api(html)
[ "def", "funshion_vid_to_urls", "(", "vid", ")", ":", "html", "=", "get_content", "(", "'http://pv.funshion.com/v5/video/play/?id={vid}&cl=aphone&uc=5'", ".", "format", "(", "vid", "=", "vid", ")", ")", "return", "select_url_from_video_api", "(", "html", ")" ]
str->str select one resolution for single video download .
train
false
29,413
def unit_poly_verts(theta): (x0, y0, r) = ([0.5] * 3) verts = [(((r * np.cos(t)) + x0), ((r * np.sin(t)) + y0)) for t in theta] return verts
[ "def", "unit_poly_verts", "(", "theta", ")", ":", "(", "x0", ",", "y0", ",", "r", ")", "=", "(", "[", "0.5", "]", "*", "3", ")", "verts", "=", "[", "(", "(", "(", "r", "*", "np", ".", "cos", "(", "t", ")", ")", "+", "x0", ")", ",", "(",...
return vertices of polygon for subplot axes .
train
true
29,415
def toposorted(graph, parents): result = [] used = set() def use(v, top): if (id(v) in used): return for parent in parents(v): if (parent is top): raise ValueError('graph is cyclical', graph) use(parent, v) used.add(id(v)) result.append(v) for v in graph: use(v, v) return result
[ "def", "toposorted", "(", "graph", ",", "parents", ")", ":", "result", "=", "[", "]", "used", "=", "set", "(", ")", "def", "use", "(", "v", ",", "top", ")", ":", "if", "(", "id", "(", "v", ")", "in", "used", ")", ":", "return", "for", "parent...
returns vertices of a dag in topological order .
train
false
29,416
def does_tree_import(package, name, node): binding = find_binding(name, find_root(node), package) return bool(binding)
[ "def", "does_tree_import", "(", "package", ",", "name", ",", "node", ")", ":", "binding", "=", "find_binding", "(", "name", ",", "find_root", "(", "node", ")", ",", "package", ")", "return", "bool", "(", "binding", ")" ]
returns true if name is imported from package at the top level of the tree which node belongs to .
train
true
29,418
def _pop_colors_and_alpha(glyphclass, kwargs, prefix='', default_alpha=1.0): result = dict() color = kwargs.pop((prefix + 'color'), get_default_color()) for argname in ('fill_color', 'line_color'): if (argname not in glyphclass.properties()): continue result[argname] = kwargs.pop((prefix + argname), color) if ('text_color' in glyphclass.properties()): result['text_color'] = kwargs.pop((prefix + 'text_color'), 'black') alpha = kwargs.pop((prefix + 'alpha'), default_alpha) for argname in ('fill_alpha', 'line_alpha', 'text_alpha'): if (argname not in glyphclass.properties()): continue result[argname] = kwargs.pop((prefix + argname), alpha) return result
[ "def", "_pop_colors_and_alpha", "(", "glyphclass", ",", "kwargs", ",", "prefix", "=", "''", ",", "default_alpha", "=", "1.0", ")", ":", "result", "=", "dict", "(", ")", "color", "=", "kwargs", ".", "pop", "(", "(", "prefix", "+", "'color'", ")", ",", ...
given a kwargs dict .
train
true
29,419
def build_from_c_and_cpp_files(extensions): for extension in extensions: sources = [] for sfile in extension.sources: (path, ext) = os.path.splitext(sfile) if (ext in ('.pyx', '.py')): if (extension.language == 'c++'): ext = '.cpp' else: ext = '.c' sfile = (path + ext) sources.append(sfile) extension.sources = sources
[ "def", "build_from_c_and_cpp_files", "(", "extensions", ")", ":", "for", "extension", "in", "extensions", ":", "sources", "=", "[", "]", "for", "sfile", "in", "extension", ".", "sources", ":", "(", "path", ",", "ext", ")", "=", "os", ".", "path", ".", ...
modify the extensions to build from the .
train
true
29,420
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
29,422
def test_enum(): class TestEnum(enum.Enum, ): 'Test enum.' value1 = 'Value 1' value2 = 'Value 2' schema = vol.Schema(cv.enum(TestEnum)) with pytest.raises(vol.Invalid): schema('value3')
[ "def", "test_enum", "(", ")", ":", "class", "TestEnum", "(", "enum", ".", "Enum", ",", ")", ":", "value1", "=", "'Value 1'", "value2", "=", "'Value 2'", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "enum", "(", "TestEnum", ")", ")", "with", ...
test enum validator .
train
false
29,423
def only_once(fn): once = [fn] def go(*arg, **kw): if once: once_fn = once.pop() return once_fn(*arg, **kw) return go
[ "def", "only_once", "(", "fn", ")", ":", "once", "=", "[", "fn", "]", "def", "go", "(", "*", "arg", ",", "**", "kw", ")", ":", "if", "once", ":", "once_fn", "=", "once", ".", "pop", "(", ")", "return", "once_fn", "(", "*", "arg", ",", "**", ...
decorate the given function to be a no-op after it is called exactly once .
train
false
29,425
@assignment_tag(takes_context=True) def test_page_is_public(context, page): if (u'all_page_view_restriction_paths' not in context): context[u'all_page_view_restriction_paths'] = PageViewRestriction.objects.select_related(u'page').values_list(u'page__path', flat=True) is_private = any([page.path.startswith(restricted_path) for restricted_path in context[u'all_page_view_restriction_paths']]) return (not is_private)
[ "@", "assignment_tag", "(", "takes_context", "=", "True", ")", "def", "test_page_is_public", "(", "context", ",", "page", ")", ":", "if", "(", "u'all_page_view_restriction_paths'", "not", "in", "context", ")", ":", "context", "[", "u'all_page_view_restriction_paths'...
usage: {% test_page_is_public page as is_public %} sets is_public to true iff there are no page view restrictions in place on this page .
train
false
29,426
def _remove_tag_url(question_id): return reverse('questions.remove_tag', kwargs={'question_id': question_id})
[ "def", "_remove_tag_url", "(", "question_id", ")", ":", "return", "reverse", "(", "'questions.remove_tag'", ",", "kwargs", "=", "{", "'question_id'", ":", "question_id", "}", ")" ]
return url to remove_tag for question 2 .
train
false
29,427
def _rational_reconstruction_func_coeffs(hm, p, m, ring, k): h = ring.zero for (monom, coeff) in hm.iterterms(): if (k == 0): coeffh = _rational_function_reconstruction(coeff, p, m) if (not coeffh): return None else: coeffh = ring.domain.zero for (mon, c) in coeff.drop_to_ground(k).iterterms(): ch = _rational_function_reconstruction(c, p, m) if (not ch): return None coeffh[mon] = ch h[monom] = coeffh return h
[ "def", "_rational_reconstruction_func_coeffs", "(", "hm", ",", "p", ",", "m", ",", "ring", ",", "k", ")", ":", "h", "=", "ring", ".", "zero", "for", "(", "monom", ",", "coeff", ")", "in", "hm", ".", "iterterms", "(", ")", ":", "if", "(", "k", "==...
reconstruct every coefficient c_h of a polynomial h in mathbb z_p[t_1 .
train
false
29,428
def _make_uuid(): return uuid.uuid4().hex
[ "def", "_make_uuid", "(", ")", ":", "return", "uuid", ".", "uuid4", "(", ")", ".", "hex" ]
return a 32-character uuid .
train
false
29,429
def get_size_info(api, volume): backing_file = api._root_path.descendant(['unattached', _backing_file_name(volume)]) backing_file.restat() actual = (backing_file.statinfo.st_blocks * 512) reported = backing_file.getsize() return _SizeInfo(actual=actual, reported=reported)
[ "def", "get_size_info", "(", "api", ",", "volume", ")", ":", "backing_file", "=", "api", ".", "_root_path", ".", "descendant", "(", "[", "'unattached'", ",", "_backing_file_name", "(", "volume", ")", "]", ")", "backing_file", ".", "restat", "(", ")", "actu...
retrieve information about the size of the backing file for the given volume .
train
false
29,430
def check_options(parser, options): if (options.percent_aligned > 1.0): parser.error('Please check -p option: should be between 0.0(0%) and 1.0(100%)') if (options.querydb is None): parser.error('Please check -i option: must specify path to a FASTA file') try: f = open(options.querydb, 'r') f.close() except IOError: parser.error('Please check -i option: cannot read from query FASTA filepath') if (options.subjectdb is None): parser.error('Please check -d option: must specify path to a FASTA file') try: f = open(options.subjectdb, 'r') f.close() except IOError: parser.error('Please check -d option: cannot read from subject FASTA filepath') if (options.outputdir is None): parser.error('Please check -o option: must specify the output directory path')
[ "def", "check_options", "(", "parser", ",", "options", ")", ":", "if", "(", "options", ".", "percent_aligned", ">", "1.0", ")", ":", "parser", ".", "error", "(", "'Please check -p option: should be between 0.0(0%) and 1.0(100%)'", ")", "if", "(", "options", ".", ...
check to insure required options have been supplied .
train
false
29,431
def key_from_env(passphrase=None): from fabric.state import env, output if ('key' in env): if output.debug: sys.stderr.write(('Trying to honor in-memory key %r\n' % env.key)) for pkey_class in (ssh.rsakey.RSAKey, ssh.dsskey.DSSKey): if output.debug: sys.stderr.write(('Trying to load it as %s\n' % pkey_class)) try: return pkey_class.from_private_key(StringIO(env.key), passphrase) except Exception as e: if ('Private key file is encrypted' in e): raise else: pass
[ "def", "key_from_env", "(", "passphrase", "=", "None", ")", ":", "from", "fabric", ".", "state", "import", "env", ",", "output", "if", "(", "'key'", "in", "env", ")", ":", "if", "output", ".", "debug", ":", "sys", ".", "stderr", ".", "write", "(", ...
returns a paramiko-ready key from a text string of a private key .
train
false
29,433
def _source_urls(album, sources=SOURCES_ALL): for s in sources: urls = ART_FUNCS[s](album) for url in urls: (yield url)
[ "def", "_source_urls", "(", "album", ",", "sources", "=", "SOURCES_ALL", ")", ":", "for", "s", "in", "sources", ":", "urls", "=", "ART_FUNCS", "[", "s", "]", "(", "album", ")", "for", "url", "in", "urls", ":", "(", "yield", "url", ")" ]
generate possible source urls for an albums art .
train
false
29,434
def user_key(scancode): assert (scancode > 0) return (scancode << 32)
[ "def", "user_key", "(", "scancode", ")", ":", "assert", "(", "scancode", ">", "0", ")", "return", "(", "scancode", "<<", "32", ")" ]
return a key symbol for a key not supported by pyglet .
train
false
29,435
def valid_processor_options(processors=None): if (processors is None): processors = [dynamic_import(p) for p in (settings.THUMBNAIL_PROCESSORS + settings.THUMBNAIL_SOURCE_GENERATORS)] valid_options = set(['size', 'quality', 'subsampling']) for processor in processors: args = inspect.getargspec(processor)[0] valid_options.update(args[1:]) return list(valid_options)
[ "def", "valid_processor_options", "(", "processors", "=", "None", ")", ":", "if", "(", "processors", "is", "None", ")", ":", "processors", "=", "[", "dynamic_import", "(", "p", ")", "for", "p", "in", "(", "settings", ".", "THUMBNAIL_PROCESSORS", "+", "sett...
return a list of unique valid options for a list of image processors .
train
false