id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
9,740
def DEFINE_spaceseplist(name, default, help, flag_values=FLAGS, **args): parser = WhitespaceSeparatedListParser() serializer = ListSerializer(' ') DEFINE(parser, name, default, help, flag_values, serializer, **args)
[ "def", "DEFINE_spaceseplist", "(", "name", ",", "default", ",", "help", ",", "flag_values", "=", "FLAGS", ",", "**", "args", ")", ":", "parser", "=", "WhitespaceSeparatedListParser", "(", ")", "serializer", "=", "ListSerializer", "(", "' '", ")", "DEFINE", "(", "parser", ",", "name", ",", "default", ",", "help", ",", "flag_values", ",", "serializer", ",", "**", "args", ")" ]
registers a flag whose value is a whitespace-separated list of strings .
train
false
9,741
def _FinalElement(key): return key.path().element_list()[(-1)]
[ "def", "_FinalElement", "(", "key", ")", ":", "return", "key", ".", "path", "(", ")", ".", "element_list", "(", ")", "[", "(", "-", "1", ")", "]" ]
return final element of a keys path .
train
false
9,742
def check_resource_cleanup(rsrc, template_id, resource_data, engine_id, timeout, msg_queue): check_message = functools.partial(_check_for_message, msg_queue) rsrc.delete_convergence(template_id, resource_data, engine_id, timeout, check_message)
[ "def", "check_resource_cleanup", "(", "rsrc", ",", "template_id", ",", "resource_data", ",", "engine_id", ",", "timeout", ",", "msg_queue", ")", ":", "check_message", "=", "functools", ".", "partial", "(", "_check_for_message", ",", "msg_queue", ")", "rsrc", ".", "delete_convergence", "(", "template_id", ",", "resource_data", ",", "engine_id", ",", "timeout", ",", "check_message", ")" ]
delete the resource if appropriate .
train
false
9,743
@sopel.module.nickname_commands(u'load') @sopel.module.priority(u'low') @sopel.module.thread(False) def f_load(bot, trigger): if (not trigger.admin): return name = trigger.group(2) path = u'' if (not name): return bot.reply(u'Load what?') if (name in sys.modules): return bot.reply(u'Module already loaded, use reload') mods = sopel.loader.enumerate_modules(bot.config) if (name not in mods): return bot.reply((u'Module %s not found' % name)) (path, type_) = mods[name] load_module(bot, name, path, type_)
[ "@", "sopel", ".", "module", ".", "nickname_commands", "(", "u'load'", ")", "@", "sopel", ".", "module", ".", "priority", "(", "u'low'", ")", "@", "sopel", ".", "module", ".", "thread", "(", "False", ")", "def", "f_load", "(", "bot", ",", "trigger", ")", ":", "if", "(", "not", "trigger", ".", "admin", ")", ":", "return", "name", "=", "trigger", ".", "group", "(", "2", ")", "path", "=", "u''", "if", "(", "not", "name", ")", ":", "return", "bot", ".", "reply", "(", "u'Load what?'", ")", "if", "(", "name", "in", "sys", ".", "modules", ")", ":", "return", "bot", ".", "reply", "(", "u'Module already loaded, use reload'", ")", "mods", "=", "sopel", ".", "loader", ".", "enumerate_modules", "(", "bot", ".", "config", ")", "if", "(", "name", "not", "in", "mods", ")", ":", "return", "bot", ".", "reply", "(", "(", "u'Module %s not found'", "%", "name", ")", ")", "(", "path", ",", "type_", ")", "=", "mods", "[", "name", "]", "load_module", "(", "bot", ",", "name", ",", "path", ",", "type_", ")" ]
loads a module .
train
false
9,744
def slice_2d(X, rows, cols): return [X[i][j] for j in cols for i in rows]
[ "def", "slice_2d", "(", "X", ",", "rows", ",", "cols", ")", ":", "return", "[", "X", "[", "i", "]", "[", "j", "]", "for", "j", "in", "cols", "for", "i", "in", "rows", "]" ]
slices a 2d list to a flat array .
train
false
9,745
def load_ndarray_transfer(name): assert (name in ['avicenna', 'harry', 'rita', 'sylvester', 'terry', 'ule']) fname = os.path.join(preprocess('${PYLEARN2_DATA_PATH}'), 'UTLC', 'filetensor', (name + '_transfer.ft')) transfer = load_filetensor(fname) return transfer
[ "def", "load_ndarray_transfer", "(", "name", ")", ":", "assert", "(", "name", "in", "[", "'avicenna'", ",", "'harry'", ",", "'rita'", ",", "'sylvester'", ",", "'terry'", ",", "'ule'", "]", ")", "fname", "=", "os", ".", "path", ".", "join", "(", "preprocess", "(", "'${PYLEARN2_DATA_PATH}'", ")", ",", "'UTLC'", ",", "'filetensor'", ",", "(", "name", "+", "'_transfer.ft'", ")", ")", "transfer", "=", "load_filetensor", "(", "fname", ")", "return", "transfer" ]
load the transfer labels for the training set of data set name .
train
false
9,746
def add_blob_owner(bucket_name, blob_name, user_email): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) blob.acl.reload() blob.acl.user(user_email).grant_owner() blob.acl.save() print 'Added user {} as an owner on blob {} in bucket {}.'.format(user_email, blob_name, bucket_name)
[ "def", "add_blob_owner", "(", "bucket_name", ",", "blob_name", ",", "user_email", ")", ":", "storage_client", "=", "storage", ".", "Client", "(", ")", "bucket", "=", "storage_client", ".", "bucket", "(", "bucket_name", ")", "blob", "=", "bucket", ".", "blob", "(", "blob_name", ")", "blob", ".", "acl", ".", "reload", "(", ")", "blob", ".", "acl", ".", "user", "(", "user_email", ")", ".", "grant_owner", "(", ")", "blob", ".", "acl", ".", "save", "(", ")", "print", "'Added user {} as an owner on blob {} in bucket {}.'", ".", "format", "(", "user_email", ",", "blob_name", ",", "bucket_name", ")" ]
adds a user as an owner on the given blob .
train
false
9,747
@world.absorb def wait_for_xmodule(): world.wait_for_js_variable_truthy('XModule') world.wait_for_js_variable_truthy('XBlock')
[ "@", "world", ".", "absorb", "def", "wait_for_xmodule", "(", ")", ":", "world", ".", "wait_for_js_variable_truthy", "(", "'XModule'", ")", "world", ".", "wait_for_js_variable_truthy", "(", "'XBlock'", ")" ]
wait until the xmodule javascript has loaded on the page .
train
false
9,748
def getMin(value): return min(value)
[ "def", "getMin", "(", "value", ")", ":", "return", "min", "(", "value", ")" ]
get the min .
train
false
9,749
def exp_re(DE, r, k): RE = S.Zero g = DE.atoms(Function).pop() mini = None for t in Add.make_args(DE): (coeff, d) = t.as_independent(g) if isinstance(d, Derivative): j = (len(d.args) - 1) else: j = 0 if ((mini is None) or (j < mini)): mini = j RE += (coeff * r((k + j))) if mini: RE = RE.subs(k, (k - mini)) return RE
[ "def", "exp_re", "(", "DE", ",", "r", ",", "k", ")", ":", "RE", "=", "S", ".", "Zero", "g", "=", "DE", ".", "atoms", "(", "Function", ")", ".", "pop", "(", ")", "mini", "=", "None", "for", "t", "in", "Add", ".", "make_args", "(", "DE", ")", ":", "(", "coeff", ",", "d", ")", "=", "t", ".", "as_independent", "(", "g", ")", "if", "isinstance", "(", "d", ",", "Derivative", ")", ":", "j", "=", "(", "len", "(", "d", ".", "args", ")", "-", "1", ")", "else", ":", "j", "=", "0", "if", "(", "(", "mini", "is", "None", ")", "or", "(", "j", "<", "mini", ")", ")", ":", "mini", "=", "j", "RE", "+=", "(", "coeff", "*", "r", "(", "(", "k", "+", "j", ")", ")", ")", "if", "mini", ":", "RE", "=", "RE", ".", "subs", "(", "k", ",", "(", "k", "-", "mini", ")", ")", "return", "RE" ]
converts a de with constant coefficients into a re .
train
false
9,751
@permission_required([('Apps', 'ModerateReview')]) def queue_moderated(request): queues_helper = ReviewersQueuesHelper(request) qs = queues_helper.get_moderated_queue() page = paginate(request, qs, per_page=20) flags = dict(ReviewFlag.FLAGS) reviews_formset = ReviewFlagFormSet((request.POST or None), queryset=page.object_list, request=request) if reviews_formset.is_valid(): reviews_formset.save() return redirect(reverse('reviewers.apps.queue_moderated')) return render(request, 'reviewers/queue.html', context(request, reviews_formset=reviews_formset, tab='moderated', page=page, flags=flags))
[ "@", "permission_required", "(", "[", "(", "'Apps'", ",", "'ModerateReview'", ")", "]", ")", "def", "queue_moderated", "(", "request", ")", ":", "queues_helper", "=", "ReviewersQueuesHelper", "(", "request", ")", "qs", "=", "queues_helper", ".", "get_moderated_queue", "(", ")", "page", "=", "paginate", "(", "request", ",", "qs", ",", "per_page", "=", "20", ")", "flags", "=", "dict", "(", "ReviewFlag", ".", "FLAGS", ")", "reviews_formset", "=", "ReviewFlagFormSet", "(", "(", "request", ".", "POST", "or", "None", ")", ",", "queryset", "=", "page", ".", "object_list", ",", "request", "=", "request", ")", "if", "reviews_formset", ".", "is_valid", "(", ")", ":", "reviews_formset", ".", "save", "(", ")", "return", "redirect", "(", "reverse", "(", "'reviewers.apps.queue_moderated'", ")", ")", "return", "render", "(", "request", ",", "'reviewers/queue.html'", ",", "context", "(", "request", ",", "reviews_formset", "=", "reviews_formset", ",", "tab", "=", "'moderated'", ",", "page", "=", "page", ",", "flags", "=", "flags", ")", ")" ]
queue for reviewing app reviews .
train
false
9,752
def cat_convert(cat): if (cat and (cat.lower() != 'none')): cats = config.get_ordered_categories() raw_cats = config.get_categories() for ucat in cats: try: indexer = raw_cats[ucat['name']].newzbin() if (not isinstance(indexer, list)): indexer = [indexer] except: indexer = [] for name in indexer: if re.search(('^%s$' % wildcard_to_re(name)), cat, re.I): if ('.' in name): logging.debug('Convert group "%s" to user-cat "%s"', cat, ucat['name']) else: logging.debug('Convert index site category "%s" to user-cat "%s"', cat, ucat['name']) return ucat['name'] for ucat in cats: if (cat.lower() == ucat['name'].lower()): logging.debug('Convert index site category "%s" to user-cat "%s"', cat, ucat['name']) return ucat['name'] for ucat in cats: if cat.lower().startswith(ucat['name'].lower()): logging.debug('Convert index site category "%s" to user-cat "%s"', cat, ucat['name']) return ucat['name'] return None
[ "def", "cat_convert", "(", "cat", ")", ":", "if", "(", "cat", "and", "(", "cat", ".", "lower", "(", ")", "!=", "'none'", ")", ")", ":", "cats", "=", "config", ".", "get_ordered_categories", "(", ")", "raw_cats", "=", "config", ".", "get_categories", "(", ")", "for", "ucat", "in", "cats", ":", "try", ":", "indexer", "=", "raw_cats", "[", "ucat", "[", "'name'", "]", "]", ".", "newzbin", "(", ")", "if", "(", "not", "isinstance", "(", "indexer", ",", "list", ")", ")", ":", "indexer", "=", "[", "indexer", "]", "except", ":", "indexer", "=", "[", "]", "for", "name", "in", "indexer", ":", "if", "re", ".", "search", "(", "(", "'^%s$'", "%", "wildcard_to_re", "(", "name", ")", ")", ",", "cat", ",", "re", ".", "I", ")", ":", "if", "(", "'.'", "in", "name", ")", ":", "logging", ".", "debug", "(", "'Convert group \"%s\" to user-cat \"%s\"'", ",", "cat", ",", "ucat", "[", "'name'", "]", ")", "else", ":", "logging", ".", "debug", "(", "'Convert index site category \"%s\" to user-cat \"%s\"'", ",", "cat", ",", "ucat", "[", "'name'", "]", ")", "return", "ucat", "[", "'name'", "]", "for", "ucat", "in", "cats", ":", "if", "(", "cat", ".", "lower", "(", ")", "==", "ucat", "[", "'name'", "]", ".", "lower", "(", ")", ")", ":", "logging", ".", "debug", "(", "'Convert index site category \"%s\" to user-cat \"%s\"'", ",", "cat", ",", "ucat", "[", "'name'", "]", ")", "return", "ucat", "[", "'name'", "]", "for", "ucat", "in", "cats", ":", "if", "cat", ".", "lower", "(", ")", ".", "startswith", "(", "ucat", "[", "'name'", "]", ".", "lower", "(", ")", ")", ":", "logging", ".", "debug", "(", "'Convert index site category \"%s\" to user-cat \"%s\"'", ",", "cat", ",", "ucat", "[", "'name'", "]", ")", "return", "ucat", "[", "'name'", "]", "return", "None" ]
convert indexers category/group-name to user categories .
train
false
9,755
def send_claim_registered_email(claimer, unclaimed_user, node, throttle=(24 * 3600)): unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) timestamp = unclaimed_record.get('last_sent') if (not throttle_period_expired(timestamp, throttle)): raise HTTPError(http.BAD_REQUEST, data=dict(message_long='User account can only be claimed with an existing user once every 24 hours')) verification_key = generate_verification_key(verification_type='claim') unclaimed_record['token'] = verification_key['token'] unclaimed_record['expires'] = verification_key['expires'] unclaimed_record['claimer_email'] = claimer.username unclaimed_user.save() referrer = User.load(unclaimed_record['referrer_id']) claim_url = web_url_for('claim_user_registered', uid=unclaimed_user._primary_key, pid=node._primary_key, token=unclaimed_record['token'], _external=True) mails.send_mail(referrer.username, mails.FORWARD_INVITE_REGISTERED, user=unclaimed_user, referrer=referrer, node=node, claim_url=claim_url, fullname=unclaimed_record['name']) unclaimed_record['last_sent'] = get_timestamp() unclaimed_user.save() mails.send_mail(claimer.username, mails.PENDING_VERIFICATION_REGISTERED, fullname=claimer.fullname, referrer=referrer, node=node)
[ "def", "send_claim_registered_email", "(", "claimer", ",", "unclaimed_user", ",", "node", ",", "throttle", "=", "(", "24", "*", "3600", ")", ")", ":", "unclaimed_record", "=", "unclaimed_user", ".", "get_unclaimed_record", "(", "node", ".", "_primary_key", ")", "timestamp", "=", "unclaimed_record", ".", "get", "(", "'last_sent'", ")", "if", "(", "not", "throttle_period_expired", "(", "timestamp", ",", "throttle", ")", ")", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ",", "data", "=", "dict", "(", "message_long", "=", "'User account can only be claimed with an existing user once every 24 hours'", ")", ")", "verification_key", "=", "generate_verification_key", "(", "verification_type", "=", "'claim'", ")", "unclaimed_record", "[", "'token'", "]", "=", "verification_key", "[", "'token'", "]", "unclaimed_record", "[", "'expires'", "]", "=", "verification_key", "[", "'expires'", "]", "unclaimed_record", "[", "'claimer_email'", "]", "=", "claimer", ".", "username", "unclaimed_user", ".", "save", "(", ")", "referrer", "=", "User", ".", "load", "(", "unclaimed_record", "[", "'referrer_id'", "]", ")", "claim_url", "=", "web_url_for", "(", "'claim_user_registered'", ",", "uid", "=", "unclaimed_user", ".", "_primary_key", ",", "pid", "=", "node", ".", "_primary_key", ",", "token", "=", "unclaimed_record", "[", "'token'", "]", ",", "_external", "=", "True", ")", "mails", ".", "send_mail", "(", "referrer", ".", "username", ",", "mails", ".", "FORWARD_INVITE_REGISTERED", ",", "user", "=", "unclaimed_user", ",", "referrer", "=", "referrer", ",", "node", "=", "node", ",", "claim_url", "=", "claim_url", ",", "fullname", "=", "unclaimed_record", "[", "'name'", "]", ")", "unclaimed_record", "[", "'last_sent'", "]", "=", "get_timestamp", "(", ")", "unclaimed_user", ".", "save", "(", ")", "mails", ".", "send_mail", "(", "claimer", ".", "username", ",", "mails", ".", "PENDING_VERIFICATION_REGISTERED", ",", "fullname", "=", "claimer", ".", "fullname", ",", "referrer", "=", "referrer", ",", "node", "=", "node", ")" ]
a registered user claiming the unclaimed user account as an contributor to a project .
train
false
9,756
def _makePipe(): (r, w) = pipe() return (_FDHolder(r), _FDHolder(w))
[ "def", "_makePipe", "(", ")", ":", "(", "r", ",", "w", ")", "=", "pipe", "(", ")", "return", "(", "_FDHolder", "(", "r", ")", ",", "_FDHolder", "(", "w", ")", ")" ]
create a pipe .
train
false
9,757
@parse_data @set_database def get_content_items(ids=None, **kwargs): if ids: values = Item.select().where(Item.id.in_(ids)) else: values = Item.select() return values
[ "@", "parse_data", "@", "set_database", "def", "get_content_items", "(", "ids", "=", "None", ",", "**", "kwargs", ")", ":", "if", "ids", ":", "values", "=", "Item", ".", "select", "(", ")", ".", "where", "(", "Item", ".", "id", ".", "in_", "(", "ids", ")", ")", "else", ":", "values", "=", "Item", ".", "select", "(", ")", "return", "values" ]
convenience function for returning multiple topic tree nodes for use in rendering content .
train
false
9,758
def allocate_ids_async(model, size, **kwargs): return datastore.AllocateIdsAsync(_coerce_to_key(model), size=size, **kwargs)
[ "def", "allocate_ids_async", "(", "model", ",", "size", ",", "**", "kwargs", ")", ":", "return", "datastore", ".", "AllocateIdsAsync", "(", "_coerce_to_key", "(", "model", ")", ",", "size", "=", "size", ",", "**", "kwargs", ")" ]
asynchronously allocates a range of ids .
train
false
9,759
def populate_entry_points(entry_points): for entry_point in entry_points: name = entry_point.name try: entry_point = entry_point.load() except Exception as e: warnings.warn(AstropyUserWarning(u'{type} error occurred in entry point {name}.'.format(type=type(e).__name__, name=name))) else: if (not inspect.isclass(entry_point)): warnings.warn(AstropyUserWarning(u'Modeling entry point {0} expected to be a Class.'.format(name))) elif issubclass(entry_point, Fitter): name = entry_point.__name__ globals()[name] = entry_point __all__.append(name) else: warnings.warn(AstropyUserWarning(u'Modeling entry point {0} expected to extend astropy.modeling.Fitter'.format(name)))
[ "def", "populate_entry_points", "(", "entry_points", ")", ":", "for", "entry_point", "in", "entry_points", ":", "name", "=", "entry_point", ".", "name", "try", ":", "entry_point", "=", "entry_point", ".", "load", "(", ")", "except", "Exception", "as", "e", ":", "warnings", ".", "warn", "(", "AstropyUserWarning", "(", "u'{type} error occurred in entry point {name}.'", ".", "format", "(", "type", "=", "type", "(", "e", ")", ".", "__name__", ",", "name", "=", "name", ")", ")", ")", "else", ":", "if", "(", "not", "inspect", ".", "isclass", "(", "entry_point", ")", ")", ":", "warnings", ".", "warn", "(", "AstropyUserWarning", "(", "u'Modeling entry point {0} expected to be a Class.'", ".", "format", "(", "name", ")", ")", ")", "elif", "issubclass", "(", "entry_point", ",", "Fitter", ")", ":", "name", "=", "entry_point", ".", "__name__", "globals", "(", ")", "[", "name", "]", "=", "entry_point", "__all__", ".", "append", "(", "name", ")", "else", ":", "warnings", ".", "warn", "(", "AstropyUserWarning", "(", "u'Modeling entry point {0} expected to extend astropy.modeling.Fitter'", ".", "format", "(", "name", ")", ")", ")" ]
this injects entry points into the astropy .
train
false
9,763
def _CompareTasksByEta(a, b): if (a.eta_usec() > b.eta_usec()): return 1 if (a.eta_usec() < b.eta_usec()): return (-1) return 0
[ "def", "_CompareTasksByEta", "(", "a", ",", "b", ")", ":", "if", "(", "a", ".", "eta_usec", "(", ")", ">", "b", ".", "eta_usec", "(", ")", ")", ":", "return", "1", "if", "(", "a", ".", "eta_usec", "(", ")", "<", "b", ".", "eta_usec", "(", ")", ")", ":", "return", "(", "-", "1", ")", "return", "0" ]
python sort comparator for tasks by estimated time of arrival .
train
false
9,766
def mask_hash(hash, show=6, char=u'*'): masked = hash[:show] masked += (char * len(hash[show:])) return masked
[ "def", "mask_hash", "(", "hash", ",", "show", "=", "6", ",", "char", "=", "u'*'", ")", ":", "masked", "=", "hash", "[", ":", "show", "]", "masked", "+=", "(", "char", "*", "len", "(", "hash", "[", "show", ":", "]", ")", ")", "return", "masked" ]
returns the given hash .
train
true
9,767
def patch_get_utility(target='zope.component.getUtility'): return mock.patch(target, new_callable=_create_get_utility_mock)
[ "def", "patch_get_utility", "(", "target", "=", "'zope.component.getUtility'", ")", ":", "return", "mock", ".", "patch", "(", "target", ",", "new_callable", "=", "_create_get_utility_mock", ")" ]
patch zope .
train
false
9,769
def isSegmentCompletelyInAnIntersection(segment, xIntersections): for xIntersectionIndex in xrange(0, len(xIntersections), 2): surroundingXFirst = xIntersections[xIntersectionIndex] surroundingXSecond = xIntersections[(xIntersectionIndex + 1)] if euclidean.isSegmentCompletelyInX(segment, surroundingXFirst, surroundingXSecond): return True return False
[ "def", "isSegmentCompletelyInAnIntersection", "(", "segment", ",", "xIntersections", ")", ":", "for", "xIntersectionIndex", "in", "xrange", "(", "0", ",", "len", "(", "xIntersections", ")", ",", "2", ")", ":", "surroundingXFirst", "=", "xIntersections", "[", "xIntersectionIndex", "]", "surroundingXSecond", "=", "xIntersections", "[", "(", "xIntersectionIndex", "+", "1", ")", "]", "if", "euclidean", ".", "isSegmentCompletelyInX", "(", "segment", ",", "surroundingXFirst", ",", "surroundingXSecond", ")", ":", "return", "True", "return", "False" ]
add sparse endpoints from a segment .
train
false
9,773
def _chkconfig_add(name): cmd = '/sbin/chkconfig --add {0}'.format(name) if (__salt__['cmd.retcode'](cmd, python_shell=False) == 0): log.info('Added initscript "{0}" to chkconfig'.format(name)) return True else: log.error('Unable to add initscript "{0}" to chkconfig'.format(name)) return False
[ "def", "_chkconfig_add", "(", "name", ")", ":", "cmd", "=", "'/sbin/chkconfig --add {0}'", ".", "format", "(", "name", ")", "if", "(", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "==", "0", ")", ":", "log", ".", "info", "(", "'Added initscript \"{0}\" to chkconfig'", ".", "format", "(", "name", ")", ")", "return", "True", "else", ":", "log", ".", "error", "(", "'Unable to add initscript \"{0}\" to chkconfig'", ".", "format", "(", "name", ")", ")", "return", "False" ]
run chkconfig --add for a service whose script is installed in /etc/init .
train
true
9,775
def get_constr_constant(constraints): constants = [get_constant(c.expr) for c in constraints] return np.hstack(constants)
[ "def", "get_constr_constant", "(", "constraints", ")", ":", "constants", "=", "[", "get_constant", "(", "c", ".", "expr", ")", "for", "c", "in", "constraints", "]", "return", "np", ".", "hstack", "(", "constants", ")" ]
returns the constant term for the constraints matrix .
train
false
9,776
def parse_fp(source, module_name, lexer=None, parser=None, enable_cache=True): if (not module_name.endswith('_thrift')): raise ThriftParserError("ThriftPy can only generate module with '_thrift' suffix") if (enable_cache and (module_name in thrift_cache)): return thrift_cache[module_name] if (not hasattr(source, 'read')): raise ThriftParserError("Except `source` to be a file-like object witha method named 'read'") if (lexer is None): lexer = lex.lex() if (parser is None): parser = yacc.yacc(debug=False, write_tables=0) data = source.read() thrift = types.ModuleType(module_name) setattr(thrift, '__thrift_file__', None) thrift_stack.append(thrift) lexer.lineno = 1 parser.parse(data) thrift_stack.pop() if enable_cache: thrift_cache[module_name] = thrift return thrift
[ "def", "parse_fp", "(", "source", ",", "module_name", ",", "lexer", "=", "None", ",", "parser", "=", "None", ",", "enable_cache", "=", "True", ")", ":", "if", "(", "not", "module_name", ".", "endswith", "(", "'_thrift'", ")", ")", ":", "raise", "ThriftParserError", "(", "\"ThriftPy can only generate module with '_thrift' suffix\"", ")", "if", "(", "enable_cache", "and", "(", "module_name", "in", "thrift_cache", ")", ")", ":", "return", "thrift_cache", "[", "module_name", "]", "if", "(", "not", "hasattr", "(", "source", ",", "'read'", ")", ")", ":", "raise", "ThriftParserError", "(", "\"Except `source` to be a file-like object witha method named 'read'\"", ")", "if", "(", "lexer", "is", "None", ")", ":", "lexer", "=", "lex", ".", "lex", "(", ")", "if", "(", "parser", "is", "None", ")", ":", "parser", "=", "yacc", ".", "yacc", "(", "debug", "=", "False", ",", "write_tables", "=", "0", ")", "data", "=", "source", ".", "read", "(", ")", "thrift", "=", "types", ".", "ModuleType", "(", "module_name", ")", "setattr", "(", "thrift", ",", "'__thrift_file__'", ",", "None", ")", "thrift_stack", ".", "append", "(", "thrift", ")", "lexer", ".", "lineno", "=", "1", "parser", ".", "parse", "(", "data", ")", "thrift_stack", ".", "pop", "(", ")", "if", "enable_cache", ":", "thrift_cache", "[", "module_name", "]", "=", "thrift", "return", "thrift" ]
parse a file-like object to thrift module object .
train
false
9,777
def save_tweets(filename, tweets): if (len(tweets) == 0): return try: archive = open(filename, 'w') except IOError as e: err(('Cannot save tweets: %s' % str(e))) return for k in sorted(tweets.keys()): try: archive.write(('%i %s\n' % (k, tweets[k].encode('utf-8')))) except Exception as ex: err(('archiving tweet %s failed due to %s' % (k, unicode(ex)))) archive.close()
[ "def", "save_tweets", "(", "filename", ",", "tweets", ")", ":", "if", "(", "len", "(", "tweets", ")", "==", "0", ")", ":", "return", "try", ":", "archive", "=", "open", "(", "filename", ",", "'w'", ")", "except", "IOError", "as", "e", ":", "err", "(", "(", "'Cannot save tweets: %s'", "%", "str", "(", "e", ")", ")", ")", "return", "for", "k", "in", "sorted", "(", "tweets", ".", "keys", "(", ")", ")", ":", "try", ":", "archive", ".", "write", "(", "(", "'%i %s\\n'", "%", "(", "k", ",", "tweets", "[", "k", "]", ".", "encode", "(", "'utf-8'", ")", ")", ")", ")", "except", "Exception", "as", "ex", ":", "err", "(", "(", "'archiving tweet %s failed due to %s'", "%", "(", "k", ",", "unicode", "(", "ex", ")", ")", ")", ")", "archive", ".", "close", "(", ")" ]
save tweets from dict to file .
train
false
9,778
def autoscroll(sbar, first, last): (first, last) = (float(first), float(last)) if ((first <= 0) and (last >= 1)): sbar.grid_remove() else: sbar.grid() sbar.set(first, last)
[ "def", "autoscroll", "(", "sbar", ",", "first", ",", "last", ")", ":", "(", "first", ",", "last", ")", "=", "(", "float", "(", "first", ")", ",", "float", "(", "last", ")", ")", "if", "(", "(", "first", "<=", "0", ")", "and", "(", "last", ">=", "1", ")", ")", ":", "sbar", ".", "grid_remove", "(", ")", "else", ":", "sbar", ".", "grid", "(", ")", "sbar", ".", "set", "(", "first", ",", "last", ")" ]
hide and show scrollbar as needed .
train
false
9,780
def g_connect(method): def wrapped(self, *args, **kwargs): if (not self.initialized): display.vvvv(('Initial connection to galaxy_server: %s' % self._api_server)) server_version = self._get_server_api_version() if (server_version not in self.SUPPORTED_VERSIONS): raise AnsibleError(('Unsupported Galaxy server API version: %s' % server_version)) self.baseurl = ('%s/api/%s' % (self._api_server, server_version)) self.version = server_version display.vvvv(('Base API: %s' % self.baseurl)) self.initialized = True return method(self, *args, **kwargs) return wrapped
[ "def", "g_connect", "(", "method", ")", ":", "def", "wrapped", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "self", ".", "initialized", ")", ":", "display", ".", "vvvv", "(", "(", "'Initial connection to galaxy_server: %s'", "%", "self", ".", "_api_server", ")", ")", "server_version", "=", "self", ".", "_get_server_api_version", "(", ")", "if", "(", "server_version", "not", "in", "self", ".", "SUPPORTED_VERSIONS", ")", ":", "raise", "AnsibleError", "(", "(", "'Unsupported Galaxy server API version: %s'", "%", "server_version", ")", ")", "self", ".", "baseurl", "=", "(", "'%s/api/%s'", "%", "(", "self", ".", "_api_server", ",", "server_version", ")", ")", "self", ".", "version", "=", "server_version", "display", ".", "vvvv", "(", "(", "'Base API: %s'", "%", "self", ".", "baseurl", ")", ")", "self", ".", "initialized", "=", "True", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped" ]
wrapper to lazily initialize connection info to galaxy .
train
false
9,785
def POST(k, n): return _XXX(k, n, _POST)
[ "def", "POST", "(", "k", ",", "n", ")", ":", "return", "_XXX", "(", "k", ",", "n", ",", "_POST", ")" ]
munging to turn a method name into a post-hook-method-name .
train
false
9,787
def make_assert(error): def openssl_assert(ok): '\n If *ok* is not True, retrieve the error from OpenSSL and raise it.\n ' if (ok is not True): exception_from_error_queue(error) return openssl_assert
[ "def", "make_assert", "(", "error", ")", ":", "def", "openssl_assert", "(", "ok", ")", ":", "if", "(", "ok", "is", "not", "True", ")", ":", "exception_from_error_queue", "(", "error", ")", "return", "openssl_assert" ]
create an assert function that uses :func:exception_from_error_queue to raise an exception wrapped by *error* .
train
false
9,789
def ScaleData(data, old_min, old_max, new_min, new_max): def ScalePoint(x): if (x is None): return None return ((scale * x) + translate) if (old_min == old_max): scale = 1 else: scale = ((new_max - new_min) / float((old_max - old_min))) translate = (new_min - (scale * old_min)) return map(ScalePoint, data)
[ "def", "ScaleData", "(", "data", ",", "old_min", ",", "old_max", ",", "new_min", ",", "new_max", ")", ":", "def", "ScalePoint", "(", "x", ")", ":", "if", "(", "x", "is", "None", ")", ":", "return", "None", "return", "(", "(", "scale", "*", "x", ")", "+", "translate", ")", "if", "(", "old_min", "==", "old_max", ")", ":", "scale", "=", "1", "else", ":", "scale", "=", "(", "(", "new_max", "-", "new_min", ")", "/", "float", "(", "(", "old_max", "-", "old_min", ")", ")", ")", "translate", "=", "(", "new_min", "-", "(", "scale", "*", "old_min", ")", ")", "return", "map", "(", "ScalePoint", ",", "data", ")" ]
scale the input data so that the range old_min-old_max maps to new_min-new_max .
train
false
9,790
def dump_object(header, obj): result = (header + '\n') for key in obj.hash: if ((key == 'afe') or (key == 'hash')): continue result += ('%20s: %s\n' % (key, obj.hash[key])) return result
[ "def", "dump_object", "(", "header", ",", "obj", ")", ":", "result", "=", "(", "header", "+", "'\\n'", ")", "for", "key", "in", "obj", ".", "hash", ":", "if", "(", "(", "key", "==", "'afe'", ")", "or", "(", "key", "==", "'hash'", ")", ")", ":", "continue", "result", "+=", "(", "'%20s: %s\\n'", "%", "(", "key", ",", "obj", ".", "hash", "[", "key", "]", ")", ")", "return", "result" ]
dump an objects attributes and methods kind of like dir() .
train
false
9,791
def create_theme(name, **extra_kwargs): kwargs = {'status': STATUS_PUBLIC, 'name': name, 'slug': slugify(name), 'bayesian_rating': random.uniform(1, 5), 'average_daily_users': random.randint(200, 2000), 'weekly_downloads': random.randint(200, 2000), 'created': datetime.now(), 'last_updated': datetime.now()} kwargs.update(extra_kwargs) theme = Addon.objects.create(type=ADDON_EXTENSION, **kwargs) generate_version(addon=theme) theme.update_version() theme.status = STATUS_PUBLIC theme.type = ADDON_PERSONA Persona.objects.create(addon=theme, popularity=theme.weekly_downloads, persona_id=0) theme.save() return theme
[ "def", "create_theme", "(", "name", ",", "**", "extra_kwargs", ")", ":", "kwargs", "=", "{", "'status'", ":", "STATUS_PUBLIC", ",", "'name'", ":", "name", ",", "'slug'", ":", "slugify", "(", "name", ")", ",", "'bayesian_rating'", ":", "random", ".", "uniform", "(", "1", ",", "5", ")", ",", "'average_daily_users'", ":", "random", ".", "randint", "(", "200", ",", "2000", ")", ",", "'weekly_downloads'", ":", "random", ".", "randint", "(", "200", ",", "2000", ")", ",", "'created'", ":", "datetime", ".", "now", "(", ")", ",", "'last_updated'", ":", "datetime", ".", "now", "(", ")", "}", "kwargs", ".", "update", "(", "extra_kwargs", ")", "theme", "=", "Addon", ".", "objects", ".", "create", "(", "type", "=", "ADDON_EXTENSION", ",", "**", "kwargs", ")", "generate_version", "(", "addon", "=", "theme", ")", "theme", ".", "update_version", "(", ")", "theme", ".", "status", "=", "STATUS_PUBLIC", "theme", ".", "type", "=", "ADDON_PERSONA", "Persona", ".", "objects", ".", "create", "(", "addon", "=", "theme", ",", "popularity", "=", "theme", ".", "weekly_downloads", ",", "persona_id", "=", "0", ")", "theme", ".", "save", "(", ")", "return", "theme" ]
create a theme with the given name .
train
false
9,792
@app.route('/scans/<int:scan_id>/exceptions/', methods=['POST']) @requires_auth def exception_creator(scan_id): scan_info = get_scan_info_from_id(scan_id) if (scan_info is None): abort(404, 'Scan not found') current_status = FakeStatus(None) current_status.set_running_plugin('phase', 'plugin') current_status.set_current_fuzzable_request('phase', 'http://www.w3af.org/') try: raise Exception('unittest') except Exception as exception: exec_info = sys.exc_info() enabled_plugins = '' scan_info.w3af_core.exception_handler.write_crash_file = (lambda x: x) scan_info.w3af_core.exception_handler.handle(current_status, exception, exec_info, enabled_plugins) return (jsonify({'code': 201}), 201)
[ "@", "app", ".", "route", "(", "'/scans/<int:scan_id>/exceptions/'", ",", "methods", "=", "[", "'POST'", "]", ")", "@", "requires_auth", "def", "exception_creator", "(", "scan_id", ")", ":", "scan_info", "=", "get_scan_info_from_id", "(", "scan_id", ")", "if", "(", "scan_info", "is", "None", ")", ":", "abort", "(", "404", ",", "'Scan not found'", ")", "current_status", "=", "FakeStatus", "(", "None", ")", "current_status", ".", "set_running_plugin", "(", "'phase'", ",", "'plugin'", ")", "current_status", ".", "set_current_fuzzable_request", "(", "'phase'", ",", "'http://www.w3af.org/'", ")", "try", ":", "raise", "Exception", "(", "'unittest'", ")", "except", "Exception", "as", "exception", ":", "exec_info", "=", "sys", ".", "exc_info", "(", ")", "enabled_plugins", "=", "''", "scan_info", ".", "w3af_core", ".", "exception_handler", ".", "write_crash_file", "=", "(", "lambda", "x", ":", "x", ")", "scan_info", ".", "w3af_core", ".", "exception_handler", ".", "handle", "(", "current_status", ",", "exception", ",", "exec_info", ",", "enabled_plugins", ")", "return", "(", "jsonify", "(", "{", "'code'", ":", "201", "}", ")", ",", "201", ")" ]
mostly for testing .
train
false
9,793
def plot_2_and_1(images): fig = plt.figure() ax = fig.add_subplot(1, 2, 1) ax.matshow(images[5], cmap=matplotlib.cm.binary) plt.xticks(np.array([])) plt.yticks(np.array([])) ax = fig.add_subplot(1, 2, 2) ax.matshow(images[3], cmap=matplotlib.cm.binary) plt.xticks(np.array([])) plt.yticks(np.array([])) plt.show()
[ "def", "plot_2_and_1", "(", "images", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "1", ",", "2", ",", "1", ")", "ax", ".", "matshow", "(", "images", "[", "5", "]", ",", "cmap", "=", "matplotlib", ".", "cm", ".", "binary", ")", "plt", ".", "xticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "yticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "ax", "=", "fig", ".", "add_subplot", "(", "1", ",", "2", ",", "2", ")", "ax", ".", "matshow", "(", "images", "[", "3", "]", ",", "cmap", "=", "matplotlib", ".", "cm", ".", "binary", ")", "plt", ".", "xticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "yticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "show", "(", ")" ]
plot a 2 and a 1 image from the mnist set .
train
false
9,796
def generate_random_edx_username(): allowable_chars = (string.ascii_letters + string.digits) username = '' for _index in range(30): username = (username + random.SystemRandom().choice(allowable_chars)) return username
[ "def", "generate_random_edx_username", "(", ")", ":", "allowable_chars", "=", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "username", "=", "''", "for", "_index", "in", "range", "(", "30", ")", ":", "username", "=", "(", "username", "+", "random", ".", "SystemRandom", "(", ")", ".", "choice", "(", "allowable_chars", ")", ")", "return", "username" ]
create a valid random edx user id .
train
false
9,798
def run_mcr_job(job): log('Running a compiled Matlab job.\n') os.chdir(job.expt_dir) if os.environ.has_key('MATLAB'): mcr_loc = os.environ['MATLAB'] else: mcr_loc = MCR_LOCATION cmd = ('./run_%s.sh %s %s' % (job.name, mcr_loc, job_file_for(job))) log(("Executing command '%s'\n" % cmd)) sh(cmd)
[ "def", "run_mcr_job", "(", "job", ")", ":", "log", "(", "'Running a compiled Matlab job.\\n'", ")", "os", ".", "chdir", "(", "job", ".", "expt_dir", ")", "if", "os", ".", "environ", ".", "has_key", "(", "'MATLAB'", ")", ":", "mcr_loc", "=", "os", ".", "environ", "[", "'MATLAB'", "]", "else", ":", "mcr_loc", "=", "MCR_LOCATION", "cmd", "=", "(", "'./run_%s.sh %s %s'", "%", "(", "job", ".", "name", ",", "mcr_loc", ",", "job_file_for", "(", "job", ")", ")", ")", "log", "(", "(", "\"Executing command '%s'\\n\"", "%", "cmd", ")", ")", "sh", "(", "cmd", ")" ]
run a compiled matlab job .
train
false
9,799
def iseia(r, valid_types=(E6, E12, E24)): if ((not isinstance(r, numbers.Number)) or (r < 0) or math.isnan(r) or math.isinf(r)): return False if (r == 0): return True while (r < 100): r = (r * 10) while (r >= 1000): r = (r / 10) if (abs((r - round(r))) > 0.01): return False r = int(round(r)) for type_list in valid_types: if (r in type_list): return True if ((int((r / 10.0)) in type_list) and ((r % 10) == 0)): return True return False
[ "def", "iseia", "(", "r", ",", "valid_types", "=", "(", "E6", ",", "E12", ",", "E24", ")", ")", ":", "if", "(", "(", "not", "isinstance", "(", "r", ",", "numbers", ".", "Number", ")", ")", "or", "(", "r", "<", "0", ")", "or", "math", ".", "isnan", "(", "r", ")", "or", "math", ".", "isinf", "(", "r", ")", ")", ":", "return", "False", "if", "(", "r", "==", "0", ")", ":", "return", "True", "while", "(", "r", "<", "100", ")", ":", "r", "=", "(", "r", "*", "10", ")", "while", "(", "r", ">=", "1000", ")", ":", "r", "=", "(", "r", "/", "10", ")", "if", "(", "abs", "(", "(", "r", "-", "round", "(", "r", ")", ")", ")", ">", "0.01", ")", ":", "return", "False", "r", "=", "int", "(", "round", "(", "r", ")", ")", "for", "type_list", "in", "valid_types", ":", "if", "(", "r", "in", "type_list", ")", ":", "return", "True", "if", "(", "(", "int", "(", "(", "r", "/", "10.0", ")", ")", "in", "type_list", ")", "and", "(", "(", "r", "%", "10", ")", "==", "0", ")", ")", ":", "return", "True", "return", "False" ]
check if a component is a valid eia value .
train
false
9,800
def char_from_number(number): base = 26 rval = '' if (number == 0): rval = 'A' while (number != 0): remainder = (number % base) new_char = chr((ord('A') + remainder)) rval = (new_char + rval) number //= base return rval
[ "def", "char_from_number", "(", "number", ")", ":", "base", "=", "26", "rval", "=", "''", "if", "(", "number", "==", "0", ")", ":", "rval", "=", "'A'", "while", "(", "number", "!=", "0", ")", ":", "remainder", "=", "(", "number", "%", "base", ")", "new_char", "=", "chr", "(", "(", "ord", "(", "'A'", ")", "+", "remainder", ")", ")", "rval", "=", "(", "new_char", "+", "rval", ")", "number", "//=", "base", "return", "rval" ]
converts number to string by rendering it in base 26 using capital letters as digits .
train
false
9,803
def tquery(query, con=None, cur=None): res = sql.execute(query, con=con, cur=cur).fetchall() if (res is None): return None else: return list(res)
[ "def", "tquery", "(", "query", ",", "con", "=", "None", ",", "cur", "=", "None", ")", ":", "res", "=", "sql", ".", "execute", "(", "query", ",", "con", "=", "con", ",", "cur", "=", "cur", ")", ".", "fetchall", "(", ")", "if", "(", "res", "is", "None", ")", ":", "return", "None", "else", ":", "return", "list", "(", "res", ")" ]
replace removed sql .
train
false
9,804
def sob(unicode, encoding): if (encoding is None): return unicode else: return unicode.encode(encoding)
[ "def", "sob", "(", "unicode", ",", "encoding", ")", ":", "if", "(", "encoding", "is", "None", ")", ":", "return", "unicode", "else", ":", "return", "unicode", ".", "encode", "(", "encoding", ")" ]
returns either the given unicode string or its encoding .
train
false
9,805
def getInsetSeparateLoopsFromLoops(loops, radius, thresholdRatio=0.9): if (radius == 0.0): return loops isInset = (radius > 0) insetSeparateLoops = [] arounds = getAroundsFromLoops(loops, abs(radius), thresholdRatio) for around in arounds: if (isInset == euclidean.getIsInFilledRegion(loops, around[0])): if isInset: around.reverse() insetSeparateLoops.append(around) return insetSeparateLoops
[ "def", "getInsetSeparateLoopsFromLoops", "(", "loops", ",", "radius", ",", "thresholdRatio", "=", "0.9", ")", ":", "if", "(", "radius", "==", "0.0", ")", ":", "return", "loops", "isInset", "=", "(", "radius", ">", "0", ")", "insetSeparateLoops", "=", "[", "]", "arounds", "=", "getAroundsFromLoops", "(", "loops", ",", "abs", "(", "radius", ")", ",", "thresholdRatio", ")", "for", "around", "in", "arounds", ":", "if", "(", "isInset", "==", "euclidean", ".", "getIsInFilledRegion", "(", "loops", ",", "around", "[", "0", "]", ")", ")", ":", "if", "isInset", ":", "around", ".", "reverse", "(", ")", "insetSeparateLoops", ".", "append", "(", "around", ")", "return", "insetSeparateLoops" ]
get the separate inset loops .
train
false
9,807
def transpose_inplace(x, **kwargs): dims = list(range((x.ndim - 1), (-1), (-1))) return elemwise.DimShuffle(x.broadcastable, dims, inplace=True)(x)
[ "def", "transpose_inplace", "(", "x", ",", "**", "kwargs", ")", ":", "dims", "=", "list", "(", "range", "(", "(", "x", ".", "ndim", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ")", "return", "elemwise", ".", "DimShuffle", "(", "x", ".", "broadcastable", ",", "dims", ",", "inplace", "=", "True", ")", "(", "x", ")" ]
perform a transpose on a tensor without copying the underlying storage .
train
false
9,808
def getCylindrical(azimuthDegrees, radius=1.0, z=0.0): return getCylindricalByRadians(math.radians(azimuthDegrees), radius, z)
[ "def", "getCylindrical", "(", "azimuthDegrees", ",", "radius", "=", "1.0", ",", "z", "=", "0.0", ")", ":", "return", "getCylindricalByRadians", "(", "math", ".", "radians", "(", "azimuthDegrees", ")", ",", "radius", ",", "z", ")" ]
get the cylindrical vector3 by degrees .
train
false
9,811
def convoltuion_shape(img_height, img_width, filter_shape, stride, padding): height = ((((img_height + (2 * padding[0])) - filter_shape[0]) / float(stride[0])) + 1) width = ((((img_width + (2 * padding[1])) - filter_shape[1]) / float(stride[1])) + 1) assert ((height % 1) == 0) assert ((width % 1) == 0) return (int(height), int(width))
[ "def", "convoltuion_shape", "(", "img_height", ",", "img_width", ",", "filter_shape", ",", "stride", ",", "padding", ")", ":", "height", "=", "(", "(", "(", "(", "img_height", "+", "(", "2", "*", "padding", "[", "0", "]", ")", ")", "-", "filter_shape", "[", "0", "]", ")", "/", "float", "(", "stride", "[", "0", "]", ")", ")", "+", "1", ")", "width", "=", "(", "(", "(", "(", "img_width", "+", "(", "2", "*", "padding", "[", "1", "]", ")", ")", "-", "filter_shape", "[", "1", "]", ")", "/", "float", "(", "stride", "[", "1", "]", ")", ")", "+", "1", ")", "assert", "(", "(", "height", "%", "1", ")", "==", "0", ")", "assert", "(", "(", "width", "%", "1", ")", "==", "0", ")", "return", "(", "int", "(", "height", ")", ",", "int", "(", "width", ")", ")" ]
calculate output shape for convolution layer .
train
false
9,812
def create_youtube_string(module): youtube_ids = [module.youtube_id_0_75, module.youtube_id_1_0, module.youtube_id_1_25, module.youtube_id_1_5] youtube_speeds = ['0.75', '1.00', '1.25', '1.50'] return ','.join([':'.join(pair) for pair in zip(youtube_speeds, youtube_ids) if pair[1]])
[ "def", "create_youtube_string", "(", "module", ")", ":", "youtube_ids", "=", "[", "module", ".", "youtube_id_0_75", ",", "module", ".", "youtube_id_1_0", ",", "module", ".", "youtube_id_1_25", ",", "module", ".", "youtube_id_1_5", "]", "youtube_speeds", "=", "[", "'0.75'", ",", "'1.00'", ",", "'1.25'", ",", "'1.50'", "]", "return", "','", ".", "join", "(", "[", "':'", ".", "join", "(", "pair", ")", "for", "pair", "in", "zip", "(", "youtube_speeds", ",", "youtube_ids", ")", "if", "pair", "[", "1", "]", "]", ")" ]
create a string of youtube ids from modules metadata attributes .
train
false
9,813
def is_true(val): return ((val is True) or (val in ['True', 'true', 'T', 't']))
[ "def", "is_true", "(", "val", ")", ":", "return", "(", "(", "val", "is", "True", ")", "or", "(", "val", "in", "[", "'True'", ",", "'true'", ",", "'T'", ",", "'t'", "]", ")", ")" ]
returns true if input is a boolean and true or is a string and looks like a true value .
train
false
9,815
def safe_range(*args): rng = range(*args) if (len(rng) > MAX_RANGE): raise OverflowError(('range too big, maximum size for range is %d' % MAX_RANGE)) return rng
[ "def", "safe_range", "(", "*", "args", ")", ":", "rng", "=", "range", "(", "*", "args", ")", "if", "(", "len", "(", "rng", ")", ">", "MAX_RANGE", ")", ":", "raise", "OverflowError", "(", "(", "'range too big, maximum size for range is %d'", "%", "MAX_RANGE", ")", ")", "return", "rng" ]
a range that cant generate ranges with a length of more than max_range items .
train
true
9,817
def _dict_diff(a, b): if (set(a.keys()) - set(b.keys())): LOG.debug('metadata diff -- master has extra keys: %(keys)s', {'keys': ' '.join((set(a.keys()) - set(b.keys())))}) return True for key in a: if (str(a[key]) != str(b[key])): LOG.debug('metadata diff -- value differs for key %(key)s: master "%(master_value)s" vs slave "%(slave_value)s"', {'key': key, 'master_value': a[key], 'slave_value': b[key]}) return True return False
[ "def", "_dict_diff", "(", "a", ",", "b", ")", ":", "if", "(", "set", "(", "a", ".", "keys", "(", ")", ")", "-", "set", "(", "b", ".", "keys", "(", ")", ")", ")", ":", "LOG", ".", "debug", "(", "'metadata diff -- master has extra keys: %(keys)s'", ",", "{", "'keys'", ":", "' '", ".", "join", "(", "(", "set", "(", "a", ".", "keys", "(", ")", ")", "-", "set", "(", "b", ".", "keys", "(", ")", ")", ")", ")", "}", ")", "return", "True", "for", "key", "in", "a", ":", "if", "(", "str", "(", "a", "[", "key", "]", ")", "!=", "str", "(", "b", "[", "key", "]", ")", ")", ":", "LOG", ".", "debug", "(", "'metadata diff -- value differs for key %(key)s: master \"%(master_value)s\" vs slave \"%(slave_value)s\"'", ",", "{", "'key'", ":", "key", ",", "'master_value'", ":", "a", "[", "key", "]", ",", "'slave_value'", ":", "b", "[", "key", "]", "}", ")", "return", "True", "return", "False" ]
a one way dictionary diff .
train
false
9,818
def initializer_mock(request, cls): _patch = patch.object(cls, '__init__', return_value=None) request.addfinalizer(_patch.stop) return _patch.start()
[ "def", "initializer_mock", "(", "request", ",", "cls", ")", ":", "_patch", "=", "patch", ".", "object", "(", "cls", ",", "'__init__'", ",", "return_value", "=", "None", ")", "request", ".", "addfinalizer", "(", "_patch", ".", "stop", ")", "return", "_patch", ".", "start", "(", ")" ]
return a mock for the __init__ method on *cls* where the patch is reversed after pytest uses it .
train
false
9,819
def RequestMock(response='', headers=None): res = mock.Mock() res.read.return_value = response res.contents = response res.text = response res.iter_lines.side_effect = (lambda chunk_size=1: response.split('\n').__iter__()) res.iter_content.side_effect = (lambda chunk_size=1: (response,).__iter__()) def lines(): return [(l + '\n') for l in response.split('\n')[:(-1)]] res.readlines.side_effect = lines res.iter_lines.side_effect = (lambda : lines().__iter__()) res.headers = (headers or {}) res.headers['content-length'] = len(response) return res
[ "def", "RequestMock", "(", "response", "=", "''", ",", "headers", "=", "None", ")", ":", "res", "=", "mock", ".", "Mock", "(", ")", "res", ".", "read", ".", "return_value", "=", "response", "res", ".", "contents", "=", "response", "res", ".", "text", "=", "response", "res", ".", "iter_lines", ".", "side_effect", "=", "(", "lambda", "chunk_size", "=", "1", ":", "response", ".", "split", "(", "'\\n'", ")", ".", "__iter__", "(", ")", ")", "res", ".", "iter_content", ".", "side_effect", "=", "(", "lambda", "chunk_size", "=", "1", ":", "(", "response", ",", ")", ".", "__iter__", "(", ")", ")", "def", "lines", "(", ")", ":", "return", "[", "(", "l", "+", "'\\n'", ")", "for", "l", "in", "response", ".", "split", "(", "'\\n'", ")", "[", ":", "(", "-", "1", ")", "]", "]", "res", ".", "readlines", ".", "side_effect", "=", "lines", "res", ".", "iter_lines", ".", "side_effect", "=", "(", "lambda", ":", "lines", "(", ")", ".", "__iter__", "(", ")", ")", "res", ".", "headers", "=", "(", "headers", "or", "{", "}", ")", "res", ".", "headers", "[", "'content-length'", "]", "=", "len", "(", "response", ")", "return", "res" ]
mocks the request objects of urllib2 and requests modules .
train
false
9,820
def getPointMinimum(firstPoint, secondPoint): return Vector3(min(firstPoint.x, secondPoint.x), min(firstPoint.y, secondPoint.y), min(firstPoint.z, secondPoint.z))
[ "def", "getPointMinimum", "(", "firstPoint", ",", "secondPoint", ")", ":", "return", "Vector3", "(", "min", "(", "firstPoint", ".", "x", ",", "secondPoint", ".", "x", ")", ",", "min", "(", "firstPoint", ".", "y", ",", "secondPoint", ".", "y", ")", ",", "min", "(", "firstPoint", ".", "z", ",", "secondPoint", ".", "z", ")", ")" ]
get a point with each component the minimum of the respective components of a pair of vector3s .
train
false
9,821
def get_filetype_icon(fname): ext = osp.splitext(fname)[1] if ext.startswith('.'): ext = ext[1:] return get_icon(('%s.png' % ext), ima.icon('FileIcon'))
[ "def", "get_filetype_icon", "(", "fname", ")", ":", "ext", "=", "osp", ".", "splitext", "(", "fname", ")", "[", "1", "]", "if", "ext", ".", "startswith", "(", "'.'", ")", ":", "ext", "=", "ext", "[", "1", ":", "]", "return", "get_icon", "(", "(", "'%s.png'", "%", "ext", ")", ",", "ima", ".", "icon", "(", "'FileIcon'", ")", ")" ]
return file type icon .
train
true
9,822
def RecurrenceOperators(base, generator): ring = RecurrenceOperatorAlgebra(base, generator) return (ring, ring.shift_operator)
[ "def", "RecurrenceOperators", "(", "base", ",", "generator", ")", ":", "ring", "=", "RecurrenceOperatorAlgebra", "(", "base", ",", "generator", ")", "return", "(", "ring", ",", "ring", ".", "shift_operator", ")" ]
returns an algebra of recurrence operators and the operator for shifting i .
train
false
9,825
def setup_authentication(config): config.include('pyramid_multiauth') settings = config.get_settings() policies = aslist(settings['multiauth.policies']) if ('basicauth' in policies): config.include('kinto.core.authentication') def on_policy_selected(event): authn_type = event.policy_name.lower() event.request.authn_type = authn_type event.request.selected_userid = event.userid logger.bind(uid=event.userid, authn_type=authn_type) config.add_subscriber(on_policy_selected, MultiAuthPolicySelected)
[ "def", "setup_authentication", "(", "config", ")", ":", "config", ".", "include", "(", "'pyramid_multiauth'", ")", "settings", "=", "config", ".", "get_settings", "(", ")", "policies", "=", "aslist", "(", "settings", "[", "'multiauth.policies'", "]", ")", "if", "(", "'basicauth'", "in", "policies", ")", ":", "config", ".", "include", "(", "'kinto.core.authentication'", ")", "def", "on_policy_selected", "(", "event", ")", ":", "authn_type", "=", "event", ".", "policy_name", ".", "lower", "(", ")", "event", ".", "request", ".", "authn_type", "=", "authn_type", "event", ".", "request", ".", "selected_userid", "=", "event", ".", "userid", "logger", ".", "bind", "(", "uid", "=", "event", ".", "userid", ",", "authn_type", "=", "authn_type", ")", "config", ".", "add_subscriber", "(", "on_policy_selected", ",", "MultiAuthPolicySelected", ")" ]
register non-default auth methods .
train
false
9,827
def b64e(s): return base64.b64encode(s)
[ "def", "b64e", "(", "s", ")", ":", "return", "base64", ".", "b64encode", "(", "s", ")" ]
b64e(s) -> str base64 encodes a string example: .
train
false
9,828
def cuda_set_device(dev_id): pass
[ "def", "cuda_set_device", "(", "dev_id", ")", ":", "pass" ]
selects the cuda device with the given id .
train
false
9,830
def _GivePropertiesFromGeneralToSpecific(handler_list): for (i, j) in itertools.combinations(xrange(len(handler_list)), 2): if handler_list[j].MatchesAll(handler_list[i]): if isinstance(handler_list[i], SimpleHandler): handler_list[i] = handler_list[i].CreateOverlappedHandler() handler_list[i].AddMatchingHandler(handler_list[j])
[ "def", "_GivePropertiesFromGeneralToSpecific", "(", "handler_list", ")", ":", "for", "(", "i", ",", "j", ")", "in", "itertools", ".", "combinations", "(", "xrange", "(", "len", "(", "handler_list", ")", ")", ",", "2", ")", ":", "if", "handler_list", "[", "j", "]", ".", "MatchesAll", "(", "handler_list", "[", "i", "]", ")", ":", "if", "isinstance", "(", "handler_list", "[", "i", "]", ",", "SimpleHandler", ")", ":", "handler_list", "[", "i", "]", "=", "handler_list", "[", "i", "]", ".", "CreateOverlappedHandler", "(", ")", "handler_list", "[", "i", "]", ".", "AddMatchingHandler", "(", "handler_list", "[", "j", "]", ")" ]
makes sure that handlers have all properties of more general ones .
train
false
9,831
def _get_deployment_flavor(flavor=None): if (not flavor): flavor = CONF.paste_deploy.flavor return ('' if (not flavor) else ('-' + flavor))
[ "def", "_get_deployment_flavor", "(", "flavor", "=", "None", ")", ":", "if", "(", "not", "flavor", ")", ":", "flavor", "=", "CONF", ".", "paste_deploy", ".", "flavor", "return", "(", "''", "if", "(", "not", "flavor", ")", "else", "(", "'-'", "+", "flavor", ")", ")" ]
retrieve the paste_deploy .
train
false
9,832
def intensity_range(image, range_values='image', clip_negative=False): if (range_values == 'dtype'): range_values = image.dtype.type if (range_values == 'image'): i_min = np.min(image) i_max = np.max(image) elif (range_values in DTYPE_RANGE): (i_min, i_max) = DTYPE_RANGE[range_values] if clip_negative: i_min = 0 else: (i_min, i_max) = range_values return (i_min, i_max)
[ "def", "intensity_range", "(", "image", ",", "range_values", "=", "'image'", ",", "clip_negative", "=", "False", ")", ":", "if", "(", "range_values", "==", "'dtype'", ")", ":", "range_values", "=", "image", ".", "dtype", ".", "type", "if", "(", "range_values", "==", "'image'", ")", ":", "i_min", "=", "np", ".", "min", "(", "image", ")", "i_max", "=", "np", ".", "max", "(", "image", ")", "elif", "(", "range_values", "in", "DTYPE_RANGE", ")", ":", "(", "i_min", ",", "i_max", ")", "=", "DTYPE_RANGE", "[", "range_values", "]", "if", "clip_negative", ":", "i_min", "=", "0", "else", ":", "(", "i_min", ",", "i_max", ")", "=", "range_values", "return", "(", "i_min", ",", "i_max", ")" ]
return image intensity range based on desired value type .
train
false
9,833
def get_diff_for_otu_maps(otu_map1, otu_map2): otus1 = set(otu_map1.keys()) otus2 = set(otu_map2.keys()) ids1 = set([x for otu in otus1 for x in otu_map1[otu]]) ids2 = set([x for otu in otus2 for x in otu_map2[otu]]) return ((ids1 - ids2), (ids2 - ids1))
[ "def", "get_diff_for_otu_maps", "(", "otu_map1", ",", "otu_map2", ")", ":", "otus1", "=", "set", "(", "otu_map1", ".", "keys", "(", ")", ")", "otus2", "=", "set", "(", "otu_map2", ".", "keys", "(", ")", ")", "ids1", "=", "set", "(", "[", "x", "for", "otu", "in", "otus1", "for", "x", "in", "otu_map1", "[", "otu", "]", "]", ")", "ids2", "=", "set", "(", "[", "x", "for", "otu", "in", "otus2", "for", "x", "in", "otu_map2", "[", "otu", "]", "]", ")", "return", "(", "(", "ids1", "-", "ids2", ")", ",", "(", "ids2", "-", "ids1", ")", ")" ]
return reads in two otu_maps that are not shared otu_map1 .
train
false
9,834
def check_known_inconsistencies(bill_data, bond_data): inconsistent_dates = bill_data.index.sym_diff(bond_data.index) known_inconsistencies = [pd.Timestamp('2006-09-04', tz='UTC'), pd.Timestamp('2010-02-15', tz='UTC'), pd.Timestamp('2013-07-25', tz='UTC')] unexpected_inconsistences = inconsistent_dates.drop(known_inconsistencies) if len(unexpected_inconsistences): in_bills = bill_data.index.difference(bond_data.index).difference(known_inconsistencies) in_bonds = bond_data.index.difference(bill_data.index).difference(known_inconsistencies) raise ValueError('Inconsistent dates for Canadian treasury bills vs bonds. \nDates with bills but not bonds: {in_bills}.\nDates with bonds but not bills: {in_bonds}.'.format(in_bills=in_bills, in_bonds=in_bonds))
[ "def", "check_known_inconsistencies", "(", "bill_data", ",", "bond_data", ")", ":", "inconsistent_dates", "=", "bill_data", ".", "index", ".", "sym_diff", "(", "bond_data", ".", "index", ")", "known_inconsistencies", "=", "[", "pd", ".", "Timestamp", "(", "'2006-09-04'", ",", "tz", "=", "'UTC'", ")", ",", "pd", ".", "Timestamp", "(", "'2010-02-15'", ",", "tz", "=", "'UTC'", ")", ",", "pd", ".", "Timestamp", "(", "'2013-07-25'", ",", "tz", "=", "'UTC'", ")", "]", "unexpected_inconsistences", "=", "inconsistent_dates", ".", "drop", "(", "known_inconsistencies", ")", "if", "len", "(", "unexpected_inconsistences", ")", ":", "in_bills", "=", "bill_data", ".", "index", ".", "difference", "(", "bond_data", ".", "index", ")", ".", "difference", "(", "known_inconsistencies", ")", "in_bonds", "=", "bond_data", ".", "index", ".", "difference", "(", "bill_data", ".", "index", ")", ".", "difference", "(", "known_inconsistencies", ")", "raise", "ValueError", "(", "'Inconsistent dates for Canadian treasury bills vs bonds. \\nDates with bills but not bonds: {in_bills}.\\nDates with bonds but not bills: {in_bonds}.'", ".", "format", "(", "in_bills", "=", "in_bills", ",", "in_bonds", "=", "in_bonds", ")", ")" ]
there are a couple quirks in the data provided by bank of canada .
train
true
9,835
def kpsewhich(filename): try: find_cmd('kpsewhich') proc = subprocess.Popen(['kpsewhich', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() return stdout.strip().decode('utf8', 'replace') except FindCmdError: pass
[ "def", "kpsewhich", "(", "filename", ")", ":", "try", ":", "find_cmd", "(", "'kpsewhich'", ")", "proc", "=", "subprocess", ".", "Popen", "(", "[", "'kpsewhich'", ",", "filename", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "stdout", ",", "stderr", ")", "=", "proc", ".", "communicate", "(", ")", "return", "stdout", ".", "strip", "(", ")", ".", "decode", "(", "'utf8'", ",", "'replace'", ")", "except", "FindCmdError", ":", "pass" ]
invoke kpsewhich command with an argument filename .
train
false
9,836
def test_withall(tmpmod): with tmpmod.mkdir('xontrib').join('spameggs.py').open('w') as x: x.write("\n__all__ = 'spam', '_foobar'\nspam = 1\neggs = 2\n_foobar = 3\n") ctx = xontrib_context('spameggs') assert (ctx == {'spam': 1, '_foobar': 3})
[ "def", "test_withall", "(", "tmpmod", ")", ":", "with", "tmpmod", ".", "mkdir", "(", "'xontrib'", ")", ".", "join", "(", "'spameggs.py'", ")", ".", "open", "(", "'w'", ")", "as", "x", ":", "x", ".", "write", "(", "\"\\n__all__ = 'spam', '_foobar'\\nspam = 1\\neggs = 2\\n_foobar = 3\\n\"", ")", "ctx", "=", "xontrib_context", "(", "'spameggs'", ")", "assert", "(", "ctx", "==", "{", "'spam'", ":", "1", ",", "'_foobar'", ":", "3", "}", ")" ]
tests what gets exported from a module with __all__ .
train
false
9,837
def test_approve_addons_approve_files(use_case, mozilla_user): (addon, file1, file2, review_type) = use_case approve_addons.approve_files([(file1, review_type), (file2, review_type)]) assert (file1.reload().status == amo.STATUS_PUBLIC) assert (file2.reload().status == amo.STATUS_PUBLIC) logs = AddonLog.objects.filter(addon=addon) assert (len(logs) == 2) (file1_log, file2_log) = logs assert (file1_log.activity_log.details['comments'] == u'bulk approval') assert (file1_log.activity_log.user == mozilla_user) assert (file2_log.activity_log.details['comments'] == u'bulk approval') assert (file2_log.activity_log.user == mozilla_user) assert (not ReviewerScore.objects.all())
[ "def", "test_approve_addons_approve_files", "(", "use_case", ",", "mozilla_user", ")", ":", "(", "addon", ",", "file1", ",", "file2", ",", "review_type", ")", "=", "use_case", "approve_addons", ".", "approve_files", "(", "[", "(", "file1", ",", "review_type", ")", ",", "(", "file2", ",", "review_type", ")", "]", ")", "assert", "(", "file1", ".", "reload", "(", ")", ".", "status", "==", "amo", ".", "STATUS_PUBLIC", ")", "assert", "(", "file2", ".", "reload", "(", ")", ".", "status", "==", "amo", ".", "STATUS_PUBLIC", ")", "logs", "=", "AddonLog", ".", "objects", ".", "filter", "(", "addon", "=", "addon", ")", "assert", "(", "len", "(", "logs", ")", "==", "2", ")", "(", "file1_log", ",", "file2_log", ")", "=", "logs", "assert", "(", "file1_log", ".", "activity_log", ".", "details", "[", "'comments'", "]", "==", "u'bulk approval'", ")", "assert", "(", "file1_log", ".", "activity_log", ".", "user", "==", "mozilla_user", ")", "assert", "(", "file2_log", ".", "activity_log", ".", "details", "[", "'comments'", "]", "==", "u'bulk approval'", ")", "assert", "(", "file2_log", ".", "activity_log", ".", "user", "==", "mozilla_user", ")", "assert", "(", "not", "ReviewerScore", ".", "objects", ".", "all", "(", ")", ")" ]
files are approved using the correct review type .
train
false
9,841
def _get_participants(msg, excluded_emails=[]): participants = ((msg.to_addr + msg.cc_addr) + msg.bcc_addr) return sorted(list(set([email.lower() for (_, email) in participants if (email not in excluded_emails)])))
[ "def", "_get_participants", "(", "msg", ",", "excluded_emails", "=", "[", "]", ")", ":", "participants", "=", "(", "(", "msg", ".", "to_addr", "+", "msg", ".", "cc_addr", ")", "+", "msg", ".", "bcc_addr", ")", "return", "sorted", "(", "list", "(", "set", "(", "[", "email", ".", "lower", "(", ")", "for", "(", "_", ",", "email", ")", "in", "participants", "if", "(", "email", "not", "in", "excluded_emails", ")", "]", ")", ")", ")" ]
returns an alphabetically sorted list of emails addresses that msg was sent to .
train
false
9,843
def filter_form_field_choices(field, predicate, invert=False): if (not callable(predicate)): allowed_values = set(predicate) def predicate(pair): return (pair[0] in allowed_values) if invert: choices = [pair for pair in field.choices if (not predicate(pair))] else: choices = [pair for pair in field.choices if predicate(pair)] field.choices = field.widget.choices = choices
[ "def", "filter_form_field_choices", "(", "field", ",", "predicate", ",", "invert", "=", "False", ")", ":", "if", "(", "not", "callable", "(", "predicate", ")", ")", ":", "allowed_values", "=", "set", "(", "predicate", ")", "def", "predicate", "(", "pair", ")", ":", "return", "(", "pair", "[", "0", "]", "in", "allowed_values", ")", "if", "invert", ":", "choices", "=", "[", "pair", "for", "pair", "in", "field", ".", "choices", "if", "(", "not", "predicate", "(", "pair", ")", ")", "]", "else", ":", "choices", "=", "[", "pair", "for", "pair", "in", "field", ".", "choices", "if", "predicate", "(", "pair", ")", "]", "field", ".", "choices", "=", "field", ".", "widget", ".", "choices", "=", "choices" ]
filter choices of a form field and its widget by predicate .
train
false
9,844
@testing.requires_testing_data def test_mixed_stc(): N = 90 T = 2 S = 3 data = rng.randn(N, T) vertno = (S * [np.arange((N // S))]) assert_raises(ValueError, MixedSourceEstimate, data=data, vertices=[np.arange(N)]) stc = MixedSourceEstimate(data, vertno, 0, 1) vol = read_source_spaces(fname_vsrc) assert_raises(ValueError, stc.plot_surface, src=vol)
[ "@", "testing", ".", "requires_testing_data", "def", "test_mixed_stc", "(", ")", ":", "N", "=", "90", "T", "=", "2", "S", "=", "3", "data", "=", "rng", ".", "randn", "(", "N", ",", "T", ")", "vertno", "=", "(", "S", "*", "[", "np", ".", "arange", "(", "(", "N", "//", "S", ")", ")", "]", ")", "assert_raises", "(", "ValueError", ",", "MixedSourceEstimate", ",", "data", "=", "data", ",", "vertices", "=", "[", "np", ".", "arange", "(", "N", ")", "]", ")", "stc", "=", "MixedSourceEstimate", "(", "data", ",", "vertno", ",", "0", ",", "1", ")", "vol", "=", "read_source_spaces", "(", "fname_vsrc", ")", "assert_raises", "(", "ValueError", ",", "stc", ".", "plot_surface", ",", "src", "=", "vol", ")" ]
test source estimate from mixed source space .
train
false
9,845
def unpack(source): (payload, symtab, radix, count) = _filterargs(source) if (count != len(symtab)): raise UnpackingError('Malformed p.a.c.k.e.r. symtab.') try: unbase = Unbaser(radix) except TypeError: raise UnpackingError('Unknown p.a.c.k.e.r. encoding.') def lookup(match): 'Look up symbols in the synthetic symtab.' word = match.group(0) return (symtab[unbase(word)] or word) source = re.sub('\\b\\w+\\b', lookup, payload) return _replacestrings(source)
[ "def", "unpack", "(", "source", ")", ":", "(", "payload", ",", "symtab", ",", "radix", ",", "count", ")", "=", "_filterargs", "(", "source", ")", "if", "(", "count", "!=", "len", "(", "symtab", ")", ")", ":", "raise", "UnpackingError", "(", "'Malformed p.a.c.k.e.r. symtab.'", ")", "try", ":", "unbase", "=", "Unbaser", "(", "radix", ")", "except", "TypeError", ":", "raise", "UnpackingError", "(", "'Unknown p.a.c.k.e.r. encoding.'", ")", "def", "lookup", "(", "match", ")", ":", "word", "=", "match", ".", "group", "(", "0", ")", "return", "(", "symtab", "[", "unbase", "(", "word", ")", "]", "or", "word", ")", "source", "=", "re", ".", "sub", "(", "'\\\\b\\\\w+\\\\b'", ",", "lookup", ",", "payload", ")", "return", "_replacestrings", "(", "source", ")" ]
unpack data to tuple of length n .
train
false
9,846
def _clone(requestedVersion): assert (not os.path.exists(VERSIONSDIR)), 'use `git fetch` not `git clone`' print(_translate('Downloading the PsychoPy Library from Github (may take a while)')) cmd = ('git clone -o github https://github.com/psychopy/versions ' + VER_SUBDIR) print(cmd) subprocess.check_output(cmd.split(), cwd=USERDIR) return _checkout(requestedVersion)
[ "def", "_clone", "(", "requestedVersion", ")", ":", "assert", "(", "not", "os", ".", "path", ".", "exists", "(", "VERSIONSDIR", ")", ")", ",", "'use `git fetch` not `git clone`'", "print", "(", "_translate", "(", "'Downloading the PsychoPy Library from Github (may take a while)'", ")", ")", "cmd", "=", "(", "'git clone -o github https://github.com/psychopy/versions '", "+", "VER_SUBDIR", ")", "print", "(", "cmd", ")", "subprocess", ".", "check_output", "(", "cmd", ".", "split", "(", ")", ",", "cwd", "=", "USERDIR", ")", "return", "_checkout", "(", "requestedVersion", ")" ]
download all versions .
train
false
9,849
def decode_wait_status(sts): if os.WIFEXITED(sts): es = (os.WEXITSTATUS(sts) & 65535) msg = ('exit status %s' % es) return (es, msg) elif os.WIFSIGNALED(sts): sig = os.WTERMSIG(sts) msg = ('terminated by %s' % signame(sig)) if hasattr(os, 'WCOREDUMP'): iscore = os.WCOREDUMP(sts) else: iscore = (sts & 128) if iscore: msg += ' (core dumped)' return ((-1), msg) else: msg = ('unknown termination cause 0x%04x' % sts) return ((-1), msg)
[ "def", "decode_wait_status", "(", "sts", ")", ":", "if", "os", ".", "WIFEXITED", "(", "sts", ")", ":", "es", "=", "(", "os", ".", "WEXITSTATUS", "(", "sts", ")", "&", "65535", ")", "msg", "=", "(", "'exit status %s'", "%", "es", ")", "return", "(", "es", ",", "msg", ")", "elif", "os", ".", "WIFSIGNALED", "(", "sts", ")", ":", "sig", "=", "os", ".", "WTERMSIG", "(", "sts", ")", "msg", "=", "(", "'terminated by %s'", "%", "signame", "(", "sig", ")", ")", "if", "hasattr", "(", "os", ",", "'WCOREDUMP'", ")", ":", "iscore", "=", "os", ".", "WCOREDUMP", "(", "sts", ")", "else", ":", "iscore", "=", "(", "sts", "&", "128", ")", "if", "iscore", ":", "msg", "+=", "' (core dumped)'", "return", "(", "(", "-", "1", ")", ",", "msg", ")", "else", ":", "msg", "=", "(", "'unknown termination cause 0x%04x'", "%", "sts", ")", "return", "(", "(", "-", "1", ")", ",", "msg", ")" ]
decode the status returned by wait() or waitpid() .
train
false
9,850
@open_file(1, mode='wb') def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL): pickle.dump(G, path, protocol)
[ "@", "open_file", "(", "1", ",", "mode", "=", "'wb'", ")", "def", "write_gpickle", "(", "G", ",", "path", ",", "protocol", "=", "pickle", ".", "HIGHEST_PROTOCOL", ")", ":", "pickle", ".", "dump", "(", "G", ",", "path", ",", "protocol", ")" ]
write graph in python pickle format .
train
false
9,851
def module_exists(module_name): try: __import__(module_name) except ImportError: return False else: return True
[ "def", "module_exists", "(", "module_name", ")", ":", "try", ":", "__import__", "(", "module_name", ")", "except", "ImportError", ":", "return", "False", "else", ":", "return", "True" ]
check to see if a module is installed or not .
train
false
9,852
def ReverseBitsInt64(v): v = (((v >> 1) & 6148914691236517205) | ((v & 6148914691236517205) << 1)) v = (((v >> 2) & 3689348814741910323) | ((v & 3689348814741910323) << 2)) v = (((v >> 4) & 1085102592571150095) | ((v & 1085102592571150095) << 4)) v = (((v >> 8) & 71777214294589695) | ((v & 71777214294589695) << 8)) v = (((v >> 16) & 281470681808895) | ((v & 281470681808895) << 16)) v = int(((v >> 32) | ((v << 32) & 18446744073709551615L))) return v
[ "def", "ReverseBitsInt64", "(", "v", ")", ":", "v", "=", "(", "(", "(", "v", ">>", "1", ")", "&", "6148914691236517205", ")", "|", "(", "(", "v", "&", "6148914691236517205", ")", "<<", "1", ")", ")", "v", "=", "(", "(", "(", "v", ">>", "2", ")", "&", "3689348814741910323", ")", "|", "(", "(", "v", "&", "3689348814741910323", ")", "<<", "2", ")", ")", "v", "=", "(", "(", "(", "v", ">>", "4", ")", "&", "1085102592571150095", ")", "|", "(", "(", "v", "&", "1085102592571150095", ")", "<<", "4", ")", ")", "v", "=", "(", "(", "(", "v", ">>", "8", ")", "&", "71777214294589695", ")", "|", "(", "(", "v", "&", "71777214294589695", ")", "<<", "8", ")", ")", "v", "=", "(", "(", "(", "v", ">>", "16", ")", "&", "281470681808895", ")", "|", "(", "(", "v", "&", "281470681808895", ")", "<<", "16", ")", ")", "v", "=", "int", "(", "(", "(", "v", ">>", "32", ")", "|", "(", "(", "v", "<<", "32", ")", "&", "18446744073709551615", "L", ")", ")", ")", "return", "v" ]
reverse the bits of a 64-bit integer .
train
false
9,853
def flatten_dictionary(input, sep='.', prefix=None): for (name, value) in sorted(input.items()): fullname = sep.join(filter(None, [prefix, name])) if isinstance(value, dict): for result in flatten_dictionary(value, sep, fullname): (yield result) else: (yield (fullname, value))
[ "def", "flatten_dictionary", "(", "input", ",", "sep", "=", "'.'", ",", "prefix", "=", "None", ")", ":", "for", "(", "name", ",", "value", ")", "in", "sorted", "(", "input", ".", "items", "(", ")", ")", ":", "fullname", "=", "sep", ".", "join", "(", "filter", "(", "None", ",", "[", "prefix", ",", "name", "]", ")", ")", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "for", "result", "in", "flatten_dictionary", "(", "value", ",", "sep", ",", "fullname", ")", ":", "(", "yield", "result", ")", "else", ":", "(", "yield", "(", "fullname", ",", "value", ")", ")" ]
produces iterator of pairs where the first value is the joined key names and the second value is the value associated with the lowest level key .
train
true
9,857
def app_has_custom(app, attr): return mro_lookup(app.__class__, attr, stop={Celery, object}, monkey_patched=[__name__])
[ "def", "app_has_custom", "(", "app", ",", "attr", ")", ":", "return", "mro_lookup", "(", "app", ".", "__class__", ",", "attr", ",", "stop", "=", "{", "Celery", ",", "object", "}", ",", "monkey_patched", "=", "[", "__name__", "]", ")" ]
return true if app has customized method attr .
train
false
9,858
def hostgroup_update(groupid, name=None, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'hostgroup.update' params = {'groupid': groupid} if name: params['name'] = name params = _params_extend(params, **connection_args) ret = _query(method, params, conn_args['url'], conn_args['auth']) return ret['result']['groupids'] else: raise KeyError except KeyError: return ret
[ "def", "hostgroup_update", "(", "groupid", ",", "name", "=", "None", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'hostgroup.update'", "params", "=", "{", "'groupid'", ":", "groupid", "}", "if", "name", ":", "params", "[", "'name'", "]", "=", "name", "params", "=", "_params_extend", "(", "params", ",", "**", "connection_args", ")", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "[", "'groupids'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "ret" ]
update existing hosts group .
train
true
9,859
def test_multi_constructor_obj(): try: load('a: !obj:decimal.Decimal { 1 }') except TypeError as e: assert (str(e) == 'Received non string object (1) as key in mapping.') pass except Exception as e: error_msg = ('Got the unexpected error: %s' % e) reraise_as(ValueError(error_msg))
[ "def", "test_multi_constructor_obj", "(", ")", ":", "try", ":", "load", "(", "'a: !obj:decimal.Decimal { 1 }'", ")", "except", "TypeError", "as", "e", ":", "assert", "(", "str", "(", "e", ")", "==", "'Received non string object (1) as key in mapping.'", ")", "pass", "except", "Exception", "as", "e", ":", "error_msg", "=", "(", "'Got the unexpected error: %s'", "%", "e", ")", "reraise_as", "(", "ValueError", "(", "error_msg", ")", ")" ]
tests whether multi_constructor_obj throws an exception when the keys in mapping are none .
train
false
9,860
def parse_language(speaker, emote): emote = _RE_REF_LANG.sub('\\1', emote) errors = [] mapping = {} for (imatch, say_match) in enumerate(reversed(list(_RE_LANGUAGE.finditer(emote)))): (langname, saytext) = say_match.groups() (istart, iend) = (say_match.start(), say_match.end()) key = ('##%i' % imatch) emote = ((emote[:istart] + ('{%s}' % key)) + emote[iend:]) mapping[key] = (langname, saytext) if errors: raise LanguageError('\n'.join(errors)) return (emote, mapping)
[ "def", "parse_language", "(", "speaker", ",", "emote", ")", ":", "emote", "=", "_RE_REF_LANG", ".", "sub", "(", "'\\\\1'", ",", "emote", ")", "errors", "=", "[", "]", "mapping", "=", "{", "}", "for", "(", "imatch", ",", "say_match", ")", "in", "enumerate", "(", "reversed", "(", "list", "(", "_RE_LANGUAGE", ".", "finditer", "(", "emote", ")", ")", ")", ")", ":", "(", "langname", ",", "saytext", ")", "=", "say_match", ".", "groups", "(", ")", "(", "istart", ",", "iend", ")", "=", "(", "say_match", ".", "start", "(", ")", ",", "say_match", ".", "end", "(", ")", ")", "key", "=", "(", "'##%i'", "%", "imatch", ")", "emote", "=", "(", "(", "emote", "[", ":", "istart", "]", "+", "(", "'{%s}'", "%", "key", ")", ")", "+", "emote", "[", "iend", ":", "]", ")", "mapping", "[", "key", "]", "=", "(", "langname", ",", "saytext", ")", "if", "errors", ":", "raise", "LanguageError", "(", "'\\n'", ".", "join", "(", "errors", ")", ")", "return", "(", "emote", ",", "mapping", ")" ]
parse the emote for language .
train
false
9,861
def label2rgb(label, image=None, colors=None, alpha=0.3, bg_label=(-1), bg_color=(0, 0, 0), image_alpha=1, kind='overlay'): if (kind == 'overlay'): return _label2rgb_overlay(label, image, colors, alpha, bg_label, bg_color, image_alpha) else: return _label2rgb_avg(label, image, bg_label, bg_color)
[ "def", "label2rgb", "(", "label", ",", "image", "=", "None", ",", "colors", "=", "None", ",", "alpha", "=", "0.3", ",", "bg_label", "=", "(", "-", "1", ")", ",", "bg_color", "=", "(", "0", ",", "0", ",", "0", ")", ",", "image_alpha", "=", "1", ",", "kind", "=", "'overlay'", ")", ":", "if", "(", "kind", "==", "'overlay'", ")", ":", "return", "_label2rgb_overlay", "(", "label", ",", "image", ",", "colors", ",", "alpha", ",", "bg_label", ",", "bg_color", ",", "image_alpha", ")", "else", ":", "return", "_label2rgb_avg", "(", "label", ",", "image", ",", "bg_label", ",", "bg_color", ")" ]
return an rgb image where color-coded labels are painted over the image .
train
false
9,862
def _get_server_type(doc): if (not doc.get('ok')): return SERVER_TYPE.Unknown if doc.get('isreplicaset'): return SERVER_TYPE.RSGhost elif doc.get('setName'): if doc.get('hidden'): return SERVER_TYPE.RSOther elif doc.get('ismaster'): return SERVER_TYPE.RSPrimary elif doc.get('secondary'): return SERVER_TYPE.RSSecondary elif doc.get('arbiterOnly'): return SERVER_TYPE.RSArbiter else: return SERVER_TYPE.RSOther elif (doc.get('msg') == 'isdbgrid'): return SERVER_TYPE.Mongos else: return SERVER_TYPE.Standalone
[ "def", "_get_server_type", "(", "doc", ")", ":", "if", "(", "not", "doc", ".", "get", "(", "'ok'", ")", ")", ":", "return", "SERVER_TYPE", ".", "Unknown", "if", "doc", ".", "get", "(", "'isreplicaset'", ")", ":", "return", "SERVER_TYPE", ".", "RSGhost", "elif", "doc", ".", "get", "(", "'setName'", ")", ":", "if", "doc", ".", "get", "(", "'hidden'", ")", ":", "return", "SERVER_TYPE", ".", "RSOther", "elif", "doc", ".", "get", "(", "'ismaster'", ")", ":", "return", "SERVER_TYPE", ".", "RSPrimary", "elif", "doc", ".", "get", "(", "'secondary'", ")", ":", "return", "SERVER_TYPE", ".", "RSSecondary", "elif", "doc", ".", "get", "(", "'arbiterOnly'", ")", ":", "return", "SERVER_TYPE", ".", "RSArbiter", "else", ":", "return", "SERVER_TYPE", ".", "RSOther", "elif", "(", "doc", ".", "get", "(", "'msg'", ")", "==", "'isdbgrid'", ")", ":", "return", "SERVER_TYPE", ".", "Mongos", "else", ":", "return", "SERVER_TYPE", ".", "Standalone" ]
determine the server type from an ismaster response .
train
true
9,863
def wotan2penntreebank(token, tag): for (k, v) in wotan.items(): if tag.startswith(k): for (a, b) in v: if (a in tag): return (token, b) return (token, tag)
[ "def", "wotan2penntreebank", "(", "token", ",", "tag", ")", ":", "for", "(", "k", ",", "v", ")", "in", "wotan", ".", "items", "(", ")", ":", "if", "tag", ".", "startswith", "(", "k", ")", ":", "for", "(", "a", ",", "b", ")", "in", "v", ":", "if", "(", "a", "in", "tag", ")", ":", "return", "(", "token", ",", "b", ")", "return", "(", "token", ",", "tag", ")" ]
converts a wotan tag to a penn treebank ii tag .
train
false
9,864
@pytest.mark.django_db def test_cross_sell_plugin_type(): shop = get_default_shop() supplier = get_default_supplier() product = create_product('test-sku', shop=shop, supplier=supplier, stock_behavior=StockBehavior.UNSTOCKED) context = get_jinja_context(product=product) type_counts = ((ProductCrossSellType.RELATED, 1), (ProductCrossSellType.RECOMMENDED, 2), (ProductCrossSellType.BOUGHT_WITH, 3)) for (type, count) in type_counts: _create_cross_sell_products(product, shop, supplier, type, count) assert (ProductCrossSell.objects.filter(product1=product, type=type).count() == count) for (type, count) in type_counts: assert (len(list(product_helpers.get_product_cross_sells(context, product, type, count))) == count)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_cross_sell_plugin_type", "(", ")", ":", "shop", "=", "get_default_shop", "(", ")", "supplier", "=", "get_default_supplier", "(", ")", "product", "=", "create_product", "(", "'test-sku'", ",", "shop", "=", "shop", ",", "supplier", "=", "supplier", ",", "stock_behavior", "=", "StockBehavior", ".", "UNSTOCKED", ")", "context", "=", "get_jinja_context", "(", "product", "=", "product", ")", "type_counts", "=", "(", "(", "ProductCrossSellType", ".", "RELATED", ",", "1", ")", ",", "(", "ProductCrossSellType", ".", "RECOMMENDED", ",", "2", ")", ",", "(", "ProductCrossSellType", ".", "BOUGHT_WITH", ",", "3", ")", ")", "for", "(", "type", ",", "count", ")", "in", "type_counts", ":", "_create_cross_sell_products", "(", "product", ",", "shop", ",", "supplier", ",", "type", ",", "count", ")", "assert", "(", "ProductCrossSell", ".", "objects", ".", "filter", "(", "product1", "=", "product", ",", "type", "=", "type", ")", ".", "count", "(", ")", "==", "count", ")", "for", "(", "type", ",", "count", ")", "in", "type_counts", ":", "assert", "(", "len", "(", "list", "(", "product_helpers", ".", "get_product_cross_sells", "(", "context", ",", "product", ",", "type", ",", "count", ")", ")", ")", "==", "count", ")" ]
test that template helper returns correct number of cross sells when shop contains multiple relation types .
train
false
9,866
def plot_images_together(images): fig = plt.figure() images = [image[:, 3:25] for image in images] image = np.concatenate(images, axis=1) ax = fig.add_subplot(1, 1, 1) ax.matshow(image, cmap=matplotlib.cm.binary) plt.xticks(np.array([])) plt.yticks(np.array([])) plt.show()
[ "def", "plot_images_together", "(", "images", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "images", "=", "[", "image", "[", ":", ",", "3", ":", "25", "]", "for", "image", "in", "images", "]", "image", "=", "np", ".", "concatenate", "(", "images", ",", "axis", "=", "1", ")", "ax", "=", "fig", ".", "add_subplot", "(", "1", ",", "1", ",", "1", ")", "ax", ".", "matshow", "(", "image", ",", "cmap", "=", "matplotlib", ".", "cm", ".", "binary", ")", "plt", ".", "xticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "yticks", "(", "np", ".", "array", "(", "[", "]", ")", ")", "plt", ".", "show", "(", ")" ]
plot a single image containing all six mnist images .
train
false
9,867
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
9,868
def fourier_transform(f, x, k, **hints): return FourierTransform(f, x, k).doit(**hints)
[ "def", "fourier_transform", "(", "f", ",", "x", ",", "k", ",", "**", "hints", ")", ":", "return", "FourierTransform", "(", "f", ",", "x", ",", "k", ")", ".", "doit", "(", "**", "hints", ")" ]
compute the unitary .
train
false
9,871
def _PutSecret(io_loop, secret): _GetSecretsManager().PutSecret(secret, sys.stdin.read()) io_loop.stop()
[ "def", "_PutSecret", "(", "io_loop", ",", "secret", ")", ":", "_GetSecretsManager", "(", ")", ".", "PutSecret", "(", "secret", ",", "sys", ".", "stdin", ".", "read", "(", ")", ")", "io_loop", ".", "stop", "(", ")" ]
reads the new secret from stdin and writes to secrets subdir .
train
false
9,875
def get_request_or_stub(): request = crum.get_current_request() if (request is None): log.warning('Could not retrieve the current request. A stub request will be created instead using settings.SITE_NAME. This should be used *only* in test cases, never in production!') full_url = 'http://{site_name}'.format(site_name=settings.SITE_NAME) parsed_url = urlparse(full_url) return RequestFactory(SERVER_NAME=parsed_url.hostname, SERVER_PORT=(parsed_url.port or 80)).get('/') else: return request
[ "def", "get_request_or_stub", "(", ")", ":", "request", "=", "crum", ".", "get_current_request", "(", ")", "if", "(", "request", "is", "None", ")", ":", "log", ".", "warning", "(", "'Could not retrieve the current request. A stub request will be created instead using settings.SITE_NAME. This should be used *only* in test cases, never in production!'", ")", "full_url", "=", "'http://{site_name}'", ".", "format", "(", "site_name", "=", "settings", ".", "SITE_NAME", ")", "parsed_url", "=", "urlparse", "(", "full_url", ")", "return", "RequestFactory", "(", "SERVER_NAME", "=", "parsed_url", ".", "hostname", ",", "SERVER_PORT", "=", "(", "parsed_url", ".", "port", "or", "80", ")", ")", ".", "get", "(", "'/'", ")", "else", ":", "return", "request" ]
return the current request or a stub request .
train
false
9,876
def p_command_def_bad_arg(p): p[0] = 'BAD ARGUMENT IN DEF STATEMENT'
[ "def", "p_command_def_bad_arg", "(", "p", ")", ":", "p", "[", "0", "]", "=", "'BAD ARGUMENT IN DEF STATEMENT'" ]
command : def id lparen error rparen equals expr .
train
false
9,878
def reject_spurious_dots(*items): for list in items: for tok in list: if ((tok == '.') and (type(tok) == HySymbol)): raise LexException('Malformed dotted list', tok.start_line, tok.start_column)
[ "def", "reject_spurious_dots", "(", "*", "items", ")", ":", "for", "list", "in", "items", ":", "for", "tok", "in", "list", ":", "if", "(", "(", "tok", "==", "'.'", ")", "and", "(", "type", "(", "tok", ")", "==", "HySymbol", ")", ")", ":", "raise", "LexException", "(", "'Malformed dotted list'", ",", "tok", ".", "start_line", ",", "tok", ".", "start_column", ")" ]
reject the spurious dots from items .
train
false
9,880
def json_view(f): @wraps(f) def _wrapped(req, *a, **kw): try: ret = f(req, *a, **kw) blob = json.dumps(ret) return http.HttpResponse(blob, content_type=JSON) except http.Http404 as e: blob = json.dumps({'success': False, 'error': 404, 'message': str(e)}) return http.HttpResponseNotFound(blob, content_type=JSON) except PermissionDenied as e: blob = json.dumps({'success': False, 'error': 403, 'message': str(e)}) return http.HttpResponseForbidden(blob, content_type=JSON) except Exception as e: blob = json.dumps({'success': False, 'error': 500, 'message': str(e)}) return http.HttpResponseServerError(blob, content_type=JSON) return _wrapped
[ "def", "json_view", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "_wrapped", "(", "req", ",", "*", "a", ",", "**", "kw", ")", ":", "try", ":", "ret", "=", "f", "(", "req", ",", "*", "a", ",", "**", "kw", ")", "blob", "=", "json", ".", "dumps", "(", "ret", ")", "return", "http", ".", "HttpResponse", "(", "blob", ",", "content_type", "=", "JSON", ")", "except", "http", ".", "Http404", "as", "e", ":", "blob", "=", "json", ".", "dumps", "(", "{", "'success'", ":", "False", ",", "'error'", ":", "404", ",", "'message'", ":", "str", "(", "e", ")", "}", ")", "return", "http", ".", "HttpResponseNotFound", "(", "blob", ",", "content_type", "=", "JSON", ")", "except", "PermissionDenied", "as", "e", ":", "blob", "=", "json", ".", "dumps", "(", "{", "'success'", ":", "False", ",", "'error'", ":", "403", ",", "'message'", ":", "str", "(", "e", ")", "}", ")", "return", "http", ".", "HttpResponseForbidden", "(", "blob", ",", "content_type", "=", "JSON", ")", "except", "Exception", "as", "e", ":", "blob", "=", "json", ".", "dumps", "(", "{", "'success'", ":", "False", ",", "'error'", ":", "500", ",", "'message'", ":", "str", "(", "e", ")", "}", ")", "return", "http", ".", "HttpResponseServerError", "(", "blob", ",", "content_type", "=", "JSON", ")", "return", "_wrapped" ]
return some basic document info in a json blob .
train
false
9,881
def write_proj(fname, projs): check_fname(fname, 'projection', ('-proj.fif', '-proj.fif.gz')) fid = io.write.start_file(fname) io.proj._write_proj(fid, projs) io.write.end_file(fid)
[ "def", "write_proj", "(", "fname", ",", "projs", ")", ":", "check_fname", "(", "fname", ",", "'projection'", ",", "(", "'-proj.fif'", ",", "'-proj.fif.gz'", ")", ")", "fid", "=", "io", ".", "write", ".", "start_file", "(", "fname", ")", "io", ".", "proj", ".", "_write_proj", "(", "fid", ",", "projs", ")", "io", ".", "write", ".", "end_file", "(", "fid", ")" ]
write projections to a fif file .
train
false
9,882
def text_date_synonym(name): def getter(self): return getattr(self, name) def setter(self, value): if isinstance(value, basestring): try: setattr(self, name, datetime.strptime(value, u'%Y-%m-%d')) except ValueError: setattr(self, name, None) else: setattr(self, name, value) return synonym(name, descriptor=property(getter, setter))
[ "def", "text_date_synonym", "(", "name", ")", ":", "def", "getter", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "name", ")", "def", "setter", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "basestring", ")", ":", "try", ":", "setattr", "(", "self", ",", "name", ",", "datetime", ".", "strptime", "(", "value", ",", "u'%Y-%m-%d'", ")", ")", "except", "ValueError", ":", "setattr", "(", "self", ",", "name", ",", "None", ")", "else", ":", "setattr", "(", "self", ",", "name", ",", "value", ")", "return", "synonym", "(", "name", ",", "descriptor", "=", "property", "(", "getter", ",", "setter", ")", ")" ]
converts y-m-d date strings into datetime objects .
train
false
9,883
@gof.local_optimizer([T.AllocEmpty]) def local_alloc_empty_to_zeros(node): if isinstance(node.op, T.AllocEmpty): return [T.zeros(node.inputs, dtype=node.outputs[0].dtype)]
[ "@", "gof", ".", "local_optimizer", "(", "[", "T", ".", "AllocEmpty", "]", ")", "def", "local_alloc_empty_to_zeros", "(", "node", ")", ":", "if", "isinstance", "(", "node", ".", "op", ",", "T", ".", "AllocEmpty", ")", ":", "return", "[", "T", ".", "zeros", "(", "node", ".", "inputs", ",", "dtype", "=", "node", ".", "outputs", "[", "0", "]", ".", "dtype", ")", "]" ]
this convert allocempty to alloc of 0 .
train
false
9,885
@_get_client def image_member_update(client, memb_id, values): return client.image_member_update(memb_id=memb_id, values=values)
[ "@", "_get_client", "def", "image_member_update", "(", "client", ",", "memb_id", ",", "values", ")", ":", "return", "client", ".", "image_member_update", "(", "memb_id", "=", "memb_id", ",", "values", "=", "values", ")" ]
update an imagemember object .
train
false
9,886
def handle_data_class_factory(endog, exog): if data_util._is_using_ndarray_type(endog, exog): klass = ModelData elif data_util._is_using_pandas(endog, exog): klass = PandasData elif data_util._is_using_patsy(endog, exog): klass = PatsyData elif data_util._is_using_ndarray(endog, exog): klass = ModelData else: raise ValueError(('unrecognized data structures: %s / %s' % (type(endog), type(exog)))) return klass
[ "def", "handle_data_class_factory", "(", "endog", ",", "exog", ")", ":", "if", "data_util", ".", "_is_using_ndarray_type", "(", "endog", ",", "exog", ")", ":", "klass", "=", "ModelData", "elif", "data_util", ".", "_is_using_pandas", "(", "endog", ",", "exog", ")", ":", "klass", "=", "PandasData", "elif", "data_util", ".", "_is_using_patsy", "(", "endog", ",", "exog", ")", ":", "klass", "=", "PatsyData", "elif", "data_util", ".", "_is_using_ndarray", "(", "endog", ",", "exog", ")", ":", "klass", "=", "ModelData", "else", ":", "raise", "ValueError", "(", "(", "'unrecognized data structures: %s / %s'", "%", "(", "type", "(", "endog", ")", ",", "type", "(", "exog", ")", ")", ")", ")", "return", "klass" ]
given inputs .
train
false
9,890
def from_array_like(ary, stream=0, gpu_data=None): if (ary.ndim == 0): ary = ary.reshape(1) return DeviceNDArray(ary.shape, ary.strides, ary.dtype, writeback=ary, stream=stream, gpu_data=gpu_data)
[ "def", "from_array_like", "(", "ary", ",", "stream", "=", "0", ",", "gpu_data", "=", "None", ")", ":", "if", "(", "ary", ".", "ndim", "==", "0", ")", ":", "ary", "=", "ary", ".", "reshape", "(", "1", ")", "return", "DeviceNDArray", "(", "ary", ".", "shape", ",", "ary", ".", "strides", ",", "ary", ".", "dtype", ",", "writeback", "=", "ary", ",", "stream", "=", "stream", ",", "gpu_data", "=", "gpu_data", ")" ]
create a devicendarray object that is like ary .
train
false
9,891
def bench_scikit_tree_classifier(X, Y): from sklearn.tree import DecisionTreeClassifier gc.collect() tstart = datetime.now() clf = DecisionTreeClassifier() clf.fit(X, Y).predict(X) delta = (datetime.now() - tstart) scikit_classifier_results.append((delta.seconds + (delta.microseconds / mu_second)))
[ "def", "bench_scikit_tree_classifier", "(", "X", ",", "Y", ")", ":", "from", "sklearn", ".", "tree", "import", "DecisionTreeClassifier", "gc", ".", "collect", "(", ")", "tstart", "=", "datetime", ".", "now", "(", ")", "clf", "=", "DecisionTreeClassifier", "(", ")", "clf", ".", "fit", "(", "X", ",", "Y", ")", ".", "predict", "(", "X", ")", "delta", "=", "(", "datetime", ".", "now", "(", ")", "-", "tstart", ")", "scikit_classifier_results", ".", "append", "(", "(", "delta", ".", "seconds", "+", "(", "delta", ".", "microseconds", "/", "mu_second", ")", ")", ")" ]
benchmark with scikit-learn decision tree classifier .
train
false
9,892
def SetLevel(level): global _Level _Level = level
[ "def", "SetLevel", "(", "level", ")", ":", "global", "_Level", "_Level", "=", "level" ]
set the current indentation level .
train
false
9,893
def vb_get_box(): vb_get_manager() vbox = _virtualboxManager.vbox return vbox
[ "def", "vb_get_box", "(", ")", ":", "vb_get_manager", "(", ")", "vbox", "=", "_virtualboxManager", ".", "vbox", "return", "vbox" ]
needed for certain operations in the sdk e .
train
false
9,895
def prewitt_v(image, mask=None): assert_nD(image, 2) image = img_as_float(image) result = convolve(image, VPREWITT_WEIGHTS) return _mask_filter_result(result, mask)
[ "def", "prewitt_v", "(", "image", ",", "mask", "=", "None", ")", ":", "assert_nD", "(", "image", ",", "2", ")", "image", "=", "img_as_float", "(", "image", ")", "result", "=", "convolve", "(", "image", ",", "VPREWITT_WEIGHTS", ")", "return", "_mask_filter_result", "(", "result", ",", "mask", ")" ]
find the vertical edges of an image using the prewitt transform .
train
false