id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
5,524
def rfc822date(timeinfo=None, local=1): if (not timeinfo): if local: timeinfo = time.localtime() else: timeinfo = time.gmtime() if local: if timeinfo[8]: tz = (- time.altzone) else: tz = (- time.timezone) (tzhr, tzmin) = divmod(abs(tz), 3600) if tz: tzhr *= int((abs(tz) // tz)) (tzmin, tzsec) = divmod(tzmin, 60) else: (tzhr, tzmin) = (0, 0) return ('%s, %02d %s %04d %02d:%02d:%02d %+03d%02d' % (['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timeinfo[6]], timeinfo[2], ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][(timeinfo[1] - 1)], timeinfo[0], timeinfo[3], timeinfo[4], timeinfo[5], tzhr, tzmin))
[ "def", "rfc822date", "(", "timeinfo", "=", "None", ",", "local", "=", "1", ")", ":", "if", "(", "not", "timeinfo", ")", ":", "if", "local", ":", "timeinfo", "=", "time", ".", "localtime", "(", ")", "else", ":", "timeinfo", "=", "time", ".", "gmtime", "(", ")", "if", "local", ":", "if", "timeinfo", "[", "8", "]", ":", "tz", "=", "(", "-", "time", ".", "altzone", ")", "else", ":", "tz", "=", "(", "-", "time", ".", "timezone", ")", "(", "tzhr", ",", "tzmin", ")", "=", "divmod", "(", "abs", "(", "tz", ")", ",", "3600", ")", "if", "tz", ":", "tzhr", "*=", "int", "(", "(", "abs", "(", "tz", ")", "//", "tz", ")", ")", "(", "tzmin", ",", "tzsec", ")", "=", "divmod", "(", "tzmin", ",", "60", ")", "else", ":", "(", "tzhr", ",", "tzmin", ")", "=", "(", "0", ",", "0", ")", "return", "(", "'%s, %02d %s %04d %02d:%02d:%02d %+03d%02d'", "%", "(", "[", "'Mon'", ",", "'Tue'", ",", "'Wed'", ",", "'Thu'", ",", "'Fri'", ",", "'Sat'", ",", "'Sun'", "]", "[", "timeinfo", "[", "6", "]", "]", ",", "timeinfo", "[", "2", "]", ",", "[", "'Jan'", ",", "'Feb'", ",", "'Mar'", ",", "'Apr'", ",", "'May'", ",", "'Jun'", ",", "'Jul'", ",", "'Aug'", ",", "'Sep'", ",", "'Oct'", ",", "'Nov'", ",", "'Dec'", "]", "[", "(", "timeinfo", "[", "1", "]", "-", "1", ")", "]", ",", "timeinfo", "[", "0", "]", ",", "timeinfo", "[", "3", "]", ",", "timeinfo", "[", "4", "]", ",", "timeinfo", "[", "5", "]", ",", "tzhr", ",", "tzmin", ")", ")" ]
format an rfc-2822 compliant date string .
train
false
5,525
@contextmanager def json_writer(response, fields, name=None, bom=False): if hasattr(response, u'headers'): response.headers['Content-Type'] = 'application/json; charset=utf-8' if name: response.headers['Content-disposition'] = 'attachment; filename="{name}.json"'.format(name=encode_rfc2231(name)) if bom: response.write(UTF8_BOM) response.write(('{\n "fields": %s,\n "records": [' % json.dumps(fields, ensure_ascii=False, separators=(u',', u':')))) (yield JSONWriter(response, [f['id'] for f in fields])) response.write('\n]}\n')
[ "@", "contextmanager", "def", "json_writer", "(", "response", ",", "fields", ",", "name", "=", "None", ",", "bom", "=", "False", ")", ":", "if", "hasattr", "(", "response", ",", "u'headers'", ")", ":", "response", ".", "headers", "[", "'Content-Type'", "]", "=", "'application/json; charset=utf-8'", "if", "name", ":", "response", ".", "headers", "[", "'Content-disposition'", "]", "=", "'attachment; filename=\"{name}.json\"'", ".", "format", "(", "name", "=", "encode_rfc2231", "(", "name", ")", ")", "if", "bom", ":", "response", ".", "write", "(", "UTF8_BOM", ")", "response", ".", "write", "(", "(", "'{\\n \"fields\": %s,\\n \"records\": ['", "%", "json", ".", "dumps", "(", "fields", ",", "ensure_ascii", "=", "False", ",", "separators", "=", "(", "u','", ",", "u':'", ")", ")", ")", ")", "(", "yield", "JSONWriter", "(", "response", ",", "[", "f", "[", "'id'", "]", "for", "f", "in", "fields", "]", ")", ")", "response", ".", "write", "(", "'\\n]}\\n'", ")" ]
context manager for writing utf-8 json data to response .
train
false
5,526
def p_iteration_statement_2(t): pass
[ "def", "p_iteration_statement_2", "(", "t", ")", ":", "pass" ]
iteration_statement : for lparen expression_opt semi expression_opt semi expression_opt rparen statement .
train
false
5,527
def create_temp_dir(): complete_dir = get_temp_dir() if (not os.path.exists(complete_dir)): try: os.makedirs(complete_dir) except OSError as ose: if (ose.errno != errno.EEXIST): raise os.chmod(complete_dir, stat.S_IRWXU) return complete_dir
[ "def", "create_temp_dir", "(", ")", ":", "complete_dir", "=", "get_temp_dir", "(", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "complete_dir", ")", ")", ":", "try", ":", "os", ".", "makedirs", "(", "complete_dir", ")", "except", "OSError", "as", "ose", ":", "if", "(", "ose", ".", "errno", "!=", "errno", ".", "EEXIST", ")", ":", "raise", "os", ".", "chmod", "(", "complete_dir", ",", "stat", ".", "S_IRWXU", ")", "return", "complete_dir" ]
create the temp directory for w3af to work inside .
train
false
5,530
def _get_sigma(sigma, nobs): if (sigma is None): return (None, None) sigma = np.asarray(sigma).squeeze() if (sigma.ndim == 0): sigma = np.repeat(sigma, nobs) if (sigma.ndim == 1): if (sigma.shape != (nobs,)): raise ValueError(('Sigma must be a scalar, 1d of length %s or a 2d array of shape %s x %s' % (nobs, nobs, nobs))) cholsigmainv = (1 / np.sqrt(sigma)) else: if (sigma.shape != (nobs, nobs)): raise ValueError(('Sigma must be a scalar, 1d of length %s or a 2d array of shape %s x %s' % (nobs, nobs, nobs))) cholsigmainv = np.linalg.cholesky(np.linalg.pinv(sigma)).T return (sigma, cholsigmainv)
[ "def", "_get_sigma", "(", "sigma", ",", "nobs", ")", ":", "if", "(", "sigma", "is", "None", ")", ":", "return", "(", "None", ",", "None", ")", "sigma", "=", "np", ".", "asarray", "(", "sigma", ")", ".", "squeeze", "(", ")", "if", "(", "sigma", ".", "ndim", "==", "0", ")", ":", "sigma", "=", "np", ".", "repeat", "(", "sigma", ",", "nobs", ")", "if", "(", "sigma", ".", "ndim", "==", "1", ")", ":", "if", "(", "sigma", ".", "shape", "!=", "(", "nobs", ",", ")", ")", ":", "raise", "ValueError", "(", "(", "'Sigma must be a scalar, 1d of length %s or a 2d array of shape %s x %s'", "%", "(", "nobs", ",", "nobs", ",", "nobs", ")", ")", ")", "cholsigmainv", "=", "(", "1", "/", "np", ".", "sqrt", "(", "sigma", ")", ")", "else", ":", "if", "(", "sigma", ".", "shape", "!=", "(", "nobs", ",", "nobs", ")", ")", ":", "raise", "ValueError", "(", "(", "'Sigma must be a scalar, 1d of length %s or a 2d array of shape %s x %s'", "%", "(", "nobs", ",", "nobs", ",", "nobs", ")", ")", ")", "cholsigmainv", "=", "np", ".", "linalg", ".", "cholesky", "(", "np", ".", "linalg", ".", "pinv", "(", "sigma", ")", ")", ".", "T", "return", "(", "sigma", ",", "cholsigmainv", ")" ]
returns sigma for gls and the inverse of its cholesky decomposition .
train
false
5,532
def convert_mysql_timestamp(timestamp): if ((not PY2) and isinstance(timestamp, (bytes, bytearray))): timestamp = timestamp.decode('ascii') if (timestamp[4] == '-'): return convert_datetime(timestamp) timestamp += ('0' * (14 - len(timestamp))) (year, month, day, hour, minute, second) = (int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14])) try: return datetime.datetime(year, month, day, hour, minute, second) except ValueError: return None
[ "def", "convert_mysql_timestamp", "(", "timestamp", ")", ":", "if", "(", "(", "not", "PY2", ")", "and", "isinstance", "(", "timestamp", ",", "(", "bytes", ",", "bytearray", ")", ")", ")", ":", "timestamp", "=", "timestamp", ".", "decode", "(", "'ascii'", ")", "if", "(", "timestamp", "[", "4", "]", "==", "'-'", ")", ":", "return", "convert_datetime", "(", "timestamp", ")", "timestamp", "+=", "(", "'0'", "*", "(", "14", "-", "len", "(", "timestamp", ")", ")", ")", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", "=", "(", "int", "(", "timestamp", "[", ":", "4", "]", ")", ",", "int", "(", "timestamp", "[", "4", ":", "6", "]", ")", ",", "int", "(", "timestamp", "[", "6", ":", "8", "]", ")", ",", "int", "(", "timestamp", "[", "8", ":", "10", "]", ")", ",", "int", "(", "timestamp", "[", "10", ":", "12", "]", ")", ",", "int", "(", "timestamp", "[", "12", ":", "14", "]", ")", ")", "try", ":", "return", "datetime", ".", "datetime", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", "except", "ValueError", ":", "return", "None" ]
convert a mysql timestamp to a timestamp object .
train
false
5,533
def matthews_corrcoef(y_true, y_pred, sample_weight=None): (y_type, y_true, y_pred) = _check_targets(y_true, y_pred) if (y_type != 'binary'): raise ValueError(('%s is not supported' % y_type)) lb = LabelEncoder() lb.fit(np.hstack([y_true, y_pred])) y_true = lb.transform(y_true) y_pred = lb.transform(y_pred) mean_yt = np.average(y_true, weights=sample_weight) mean_yp = np.average(y_pred, weights=sample_weight) y_true_u_cent = (y_true - mean_yt) y_pred_u_cent = (y_pred - mean_yp) cov_ytyp = np.average((y_true_u_cent * y_pred_u_cent), weights=sample_weight) var_yt = np.average((y_true_u_cent ** 2), weights=sample_weight) var_yp = np.average((y_pred_u_cent ** 2), weights=sample_weight) mcc = (cov_ytyp / np.sqrt((var_yt * var_yp))) if np.isnan(mcc): return 0.0 else: return mcc
[ "def", "matthews_corrcoef", "(", "y_true", ",", "y_pred", ",", "sample_weight", "=", "None", ")", ":", "(", "y_type", ",", "y_true", ",", "y_pred", ")", "=", "_check_targets", "(", "y_true", ",", "y_pred", ")", "if", "(", "y_type", "!=", "'binary'", ")", ":", "raise", "ValueError", "(", "(", "'%s is not supported'", "%", "y_type", ")", ")", "lb", "=", "LabelEncoder", "(", ")", "lb", ".", "fit", "(", "np", ".", "hstack", "(", "[", "y_true", ",", "y_pred", "]", ")", ")", "y_true", "=", "lb", ".", "transform", "(", "y_true", ")", "y_pred", "=", "lb", ".", "transform", "(", "y_pred", ")", "mean_yt", "=", "np", ".", "average", "(", "y_true", ",", "weights", "=", "sample_weight", ")", "mean_yp", "=", "np", ".", "average", "(", "y_pred", ",", "weights", "=", "sample_weight", ")", "y_true_u_cent", "=", "(", "y_true", "-", "mean_yt", ")", "y_pred_u_cent", "=", "(", "y_pred", "-", "mean_yp", ")", "cov_ytyp", "=", "np", ".", "average", "(", "(", "y_true_u_cent", "*", "y_pred_u_cent", ")", ",", "weights", "=", "sample_weight", ")", "var_yt", "=", "np", ".", "average", "(", "(", "y_true_u_cent", "**", "2", ")", ",", "weights", "=", "sample_weight", ")", "var_yp", "=", "np", ".", "average", "(", "(", "y_pred_u_cent", "**", "2", ")", ",", "weights", "=", "sample_weight", ")", "mcc", "=", "(", "cov_ytyp", "/", "np", ".", "sqrt", "(", "(", "var_yt", "*", "var_yp", ")", ")", ")", "if", "np", ".", "isnan", "(", "mcc", ")", ":", "return", "0.0", "else", ":", "return", "mcc" ]
compute the matthews correlation coefficient for binary classes the matthews correlation coefficient is used in machine learning as a measure of the quality of binary classifications .
train
false
5,536
def _generateExtraMetricSpecs(options): global _metricSpecSchema results = [] for metric in options['metrics']: for propertyName in _metricSpecSchema['properties'].keys(): _getPropertyValue(_metricSpecSchema, propertyName, metric) (specString, label) = _generateMetricSpecString(field=metric['field'], metric=metric['metric'], params=metric['params'], inferenceElement=metric['inferenceElement'], returnLabel=True) if metric['logged']: options['loggedMetrics'].append(label) results.append(specString) return results
[ "def", "_generateExtraMetricSpecs", "(", "options", ")", ":", "global", "_metricSpecSchema", "results", "=", "[", "]", "for", "metric", "in", "options", "[", "'metrics'", "]", ":", "for", "propertyName", "in", "_metricSpecSchema", "[", "'properties'", "]", ".", "keys", "(", ")", ":", "_getPropertyValue", "(", "_metricSpecSchema", ",", "propertyName", ",", "metric", ")", "(", "specString", ",", "label", ")", "=", "_generateMetricSpecString", "(", "field", "=", "metric", "[", "'field'", "]", ",", "metric", "=", "metric", "[", "'metric'", "]", ",", "params", "=", "metric", "[", "'params'", "]", ",", "inferenceElement", "=", "metric", "[", "'inferenceElement'", "]", ",", "returnLabel", "=", "True", ")", "if", "metric", "[", "'logged'", "]", ":", "options", "[", "'loggedMetrics'", "]", ".", "append", "(", "label", ")", "results", ".", "append", "(", "specString", ")", "return", "results" ]
generates the non-default metrics specified by the expgenerator params .
train
true
5,537
def adapt_llvm_version(llvmir): llvmir = rename_register(llvmir) return add_metadata_type(llvmir)
[ "def", "adapt_llvm_version", "(", "llvmir", ")", ":", "llvmir", "=", "rename_register", "(", "llvmir", ")", "return", "add_metadata_type", "(", "llvmir", ")" ]
adapt the llvm ir to match the syntax required by hlc .
train
false
5,538
def get_toolbar_item_for_plugins(): global TOOLBAR_ITEMS_PLUGINS return TOOLBAR_ITEMS_PLUGINS
[ "def", "get_toolbar_item_for_plugins", "(", ")", ":", "global", "TOOLBAR_ITEMS_PLUGINS", "return", "TOOLBAR_ITEMS_PLUGINS" ]
returns the toolbar actions set by plugins .
train
false
5,539
def require_cwd_to_be_oppia(allow_deploy_dir=False): is_oppia_dir = os.getcwd().endswith('oppia') current_dirname = os.path.basename(os.path.normpath(os.getcwd())) is_deploy_dir = (current_dirname.startswith('deploy-') and os.path.isdir(os.path.join(os.getcwd(), '..', 'oppia'))) if (is_oppia_dir or (allow_deploy_dir and is_deploy_dir)): return raise Exception('Please run this script from the oppia/ directory.')
[ "def", "require_cwd_to_be_oppia", "(", "allow_deploy_dir", "=", "False", ")", ":", "is_oppia_dir", "=", "os", ".", "getcwd", "(", ")", ".", "endswith", "(", "'oppia'", ")", "current_dirname", "=", "os", ".", "path", ".", "basename", "(", "os", ".", "path", ".", "normpath", "(", "os", ".", "getcwd", "(", ")", ")", ")", "is_deploy_dir", "=", "(", "current_dirname", ".", "startswith", "(", "'deploy-'", ")", "and", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'..'", ",", "'oppia'", ")", ")", ")", "if", "(", "is_oppia_dir", "or", "(", "allow_deploy_dir", "and", "is_deploy_dir", ")", ")", ":", "return", "raise", "Exception", "(", "'Please run this script from the oppia/ directory.'", ")" ]
ensures that the current working directory ends in oppia .
train
false
5,540
def customize_mpl(): print('Setting custom matplotlib visual style') rcParams['figure.figsize'] = (10, 6) rcParams['figure.dpi'] = 150 rcParams['axes.color_cycle'] = dark2_colors rcParams['lines.linewidth'] = 2 rcParams['axes.grid'] = True rcParams['axes.facecolor'] = '#eeeeee' rcParams['font.size'] = 14 rcParams['patch.edgecolor'] = 'none'
[ "def", "customize_mpl", "(", ")", ":", "print", "(", "'Setting custom matplotlib visual style'", ")", "rcParams", "[", "'figure.figsize'", "]", "=", "(", "10", ",", "6", ")", "rcParams", "[", "'figure.dpi'", "]", "=", "150", "rcParams", "[", "'axes.color_cycle'", "]", "=", "dark2_colors", "rcParams", "[", "'lines.linewidth'", "]", "=", "2", "rcParams", "[", "'axes.grid'", "]", "=", "True", "rcParams", "[", "'axes.facecolor'", "]", "=", "'#eeeeee'", "rcParams", "[", "'font.size'", "]", "=", "14", "rcParams", "[", "'patch.edgecolor'", "]", "=", "'none'" ]
tweak matplotlib visual style .
train
false
5,541
def tile_key(layer, coord, format, key_prefix): name = layer.name() tile = ('%(zoom)d/%(column)d/%(row)d' % coord.__dict__) key = str(('%(key_prefix)s/%(name)s/%(tile)s.%(format)s' % locals())) return key
[ "def", "tile_key", "(", "layer", ",", "coord", ",", "format", ",", "key_prefix", ")", ":", "name", "=", "layer", ".", "name", "(", ")", "tile", "=", "(", "'%(zoom)d/%(column)d/%(row)d'", "%", "coord", ".", "__dict__", ")", "key", "=", "str", "(", "(", "'%(key_prefix)s/%(name)s/%(tile)s.%(format)s'", "%", "locals", "(", ")", ")", ")", "return", "key" ]
return a tile key string .
train
false
5,542
def mmap_readwrite(f, sz=0, close=True): return _mmap_do(f, sz, mmap.MAP_SHARED, (mmap.PROT_READ | mmap.PROT_WRITE), close)
[ "def", "mmap_readwrite", "(", "f", ",", "sz", "=", "0", ",", "close", "=", "True", ")", ":", "return", "_mmap_do", "(", "f", ",", "sz", ",", "mmap", ".", "MAP_SHARED", ",", "(", "mmap", ".", "PROT_READ", "|", "mmap", ".", "PROT_WRITE", ")", ",", "close", ")" ]
create a read-write memory mapped region on file f .
train
false
5,543
def BOOL(value): if (value in (u'1', u'0')): return bool(int(value)) raise ValueError((u'%r is not 0 or 1' % value))
[ "def", "BOOL", "(", "value", ")", ":", "if", "(", "value", "in", "(", "u'1'", ",", "u'0'", ")", ")", ":", "return", "bool", "(", "int", "(", "value", ")", ")", "raise", "ValueError", "(", "(", "u'%r is not 0 or 1'", "%", "value", ")", ")" ]
convert the values 0 and 1 into booleans .
train
false
5,545
def test_existing_path_FileLinks_repr(): td = mkdtemp() tf1 = NamedTemporaryFile(dir=td) tf2 = NamedTemporaryFile(dir=td) fl = display.FileLinks(td) actual = repr(fl) actual = actual.split('\n') actual.sort() expected = [('%s/' % td), (' %s' % split(tf1.name)[1]), (' %s' % split(tf2.name)[1])] expected.sort() nt.assert_equal(actual, expected)
[ "def", "test_existing_path_FileLinks_repr", "(", ")", ":", "td", "=", "mkdtemp", "(", ")", "tf1", "=", "NamedTemporaryFile", "(", "dir", "=", "td", ")", "tf2", "=", "NamedTemporaryFile", "(", "dir", "=", "td", ")", "fl", "=", "display", ".", "FileLinks", "(", "td", ")", "actual", "=", "repr", "(", "fl", ")", "actual", "=", "actual", ".", "split", "(", "'\\n'", ")", "actual", ".", "sort", "(", ")", "expected", "=", "[", "(", "'%s/'", "%", "td", ")", ",", "(", "' %s'", "%", "split", "(", "tf1", ".", "name", ")", "[", "1", "]", ")", ",", "(", "' %s'", "%", "split", "(", "tf2", ".", "name", ")", "[", "1", "]", ")", "]", "expected", ".", "sort", "(", ")", "nt", ".", "assert_equal", "(", "actual", ",", "expected", ")" ]
filelinks: calling repr() functions as expected on existing directory .
train
false
5,546
def gps_noise_rng(radius): noise = gauss(0, (radius / 3.0)) noise = min(max((- radius), noise), radius) return noise
[ "def", "gps_noise_rng", "(", "radius", ")", ":", "noise", "=", "gauss", "(", "0", ",", "(", "radius", "/", "3.0", ")", ")", "noise", "=", "min", "(", "max", "(", "(", "-", "radius", ")", ",", "noise", ")", ",", "radius", ")", "return", "noise" ]
simulates gps noise .
train
false
5,547
def RegisterNamedPath(name, path): keyStr = (BuildDefaultPythonKey() + '\\PythonPath') if name: keyStr = ((keyStr + '\\') + name) win32api.RegSetValue(GetRootKey(), keyStr, win32con.REG_SZ, path)
[ "def", "RegisterNamedPath", "(", "name", ",", "path", ")", ":", "keyStr", "=", "(", "BuildDefaultPythonKey", "(", ")", "+", "'\\\\PythonPath'", ")", "if", "name", ":", "keyStr", "=", "(", "(", "keyStr", "+", "'\\\\'", ")", "+", "name", ")", "win32api", ".", "RegSetValue", "(", "GetRootKey", "(", ")", ",", "keyStr", ",", "win32con", ".", "REG_SZ", ",", "path", ")" ]
register a named path - ie .
train
false
5,548
def register_webapi_capabilities(capabilities_id, caps): if (not capabilities_id): raise ValueError(u'The capabilities_id attribute must not be None') if (capabilities_id in _registered_capabilities): raise KeyError((u'"%s" is already a registered set of capabilities' % capabilities_id)) if (capabilities_id in _capabilities_defaults): raise KeyError((u'"%s" is reserved for the default set of capabilities' % capabilities_id)) _registered_capabilities[capabilities_id] = caps
[ "def", "register_webapi_capabilities", "(", "capabilities_id", ",", "caps", ")", ":", "if", "(", "not", "capabilities_id", ")", ":", "raise", "ValueError", "(", "u'The capabilities_id attribute must not be None'", ")", "if", "(", "capabilities_id", "in", "_registered_capabilities", ")", ":", "raise", "KeyError", "(", "(", "u'\"%s\" is already a registered set of capabilities'", "%", "capabilities_id", ")", ")", "if", "(", "capabilities_id", "in", "_capabilities_defaults", ")", ":", "raise", "KeyError", "(", "(", "u'\"%s\" is reserved for the default set of capabilities'", "%", "capabilities_id", ")", ")", "_registered_capabilities", "[", "capabilities_id", "]", "=", "caps" ]
registers a set of web api capabilities .
train
false
5,549
def encode_feedparser_dict(d): if (isinstance(d, feedparser.FeedParserDict) or isinstance(d, dict)): j = {} for k in d.keys(): j[k] = encode_feedparser_dict(d[k]) return j elif isinstance(d, list): l = [] for k in d: l.append(encode_feedparser_dict(k)) return l else: return d
[ "def", "encode_feedparser_dict", "(", "d", ")", ":", "if", "(", "isinstance", "(", "d", ",", "feedparser", ".", "FeedParserDict", ")", "or", "isinstance", "(", "d", ",", "dict", ")", ")", ":", "j", "=", "{", "}", "for", "k", "in", "d", ".", "keys", "(", ")", ":", "j", "[", "k", "]", "=", "encode_feedparser_dict", "(", "d", "[", "k", "]", ")", "return", "j", "elif", "isinstance", "(", "d", ",", "list", ")", ":", "l", "=", "[", "]", "for", "k", "in", "d", ":", "l", ".", "append", "(", "encode_feedparser_dict", "(", "k", ")", ")", "return", "l", "else", ":", "return", "d" ]
helper function to get rid of feedparser bs with a deep copy .
train
false
5,551
def refund_seat(course_enrollment, request_user): course_key_str = unicode(course_enrollment.course_id) unenrolled_user = course_enrollment.user try: refund_ids = ecommerce_api_client((request_user or unenrolled_user)).refunds.post({u'course_id': course_key_str, u'username': unenrolled_user.username}) except HttpClientError as exc: if ((exc.response.status_code == 403) and (request_user != unenrolled_user)): log.warning(u'User [%s] was not authorized to initiate a refund for user [%s] upon unenrollment from course [%s]', request_user.id, unenrolled_user.id, course_key_str) return [] else: raise exc if refund_ids: log.info(u'Refund successfully opened for user [%s], course [%s]: %r', unenrolled_user.id, course_key_str, refund_ids) if (course_enrollment.mode != u'verified'): log.info(u'Skipping refund email notification for non-verified mode for user [%s], course [%s], mode: [%s]', course_enrollment.user.id, course_enrollment.course_id, course_enrollment.mode) else: try: send_refund_notification(course_enrollment, refund_ids) except: log.warning(u'Could not send email notification for refund.', exc_info=True) else: log.debug(u'No refund opened for user [%s], course [%s]', unenrolled_user.id, course_key_str) return refund_ids
[ "def", "refund_seat", "(", "course_enrollment", ",", "request_user", ")", ":", "course_key_str", "=", "unicode", "(", "course_enrollment", ".", "course_id", ")", "unenrolled_user", "=", "course_enrollment", ".", "user", "try", ":", "refund_ids", "=", "ecommerce_api_client", "(", "(", "request_user", "or", "unenrolled_user", ")", ")", ".", "refunds", ".", "post", "(", "{", "u'course_id'", ":", "course_key_str", ",", "u'username'", ":", "unenrolled_user", ".", "username", "}", ")", "except", "HttpClientError", "as", "exc", ":", "if", "(", "(", "exc", ".", "response", ".", "status_code", "==", "403", ")", "and", "(", "request_user", "!=", "unenrolled_user", ")", ")", ":", "log", ".", "warning", "(", "u'User [%s] was not authorized to initiate a refund for user [%s] upon unenrollment from course [%s]'", ",", "request_user", ".", "id", ",", "unenrolled_user", ".", "id", ",", "course_key_str", ")", "return", "[", "]", "else", ":", "raise", "exc", "if", "refund_ids", ":", "log", ".", "info", "(", "u'Refund successfully opened for user [%s], course [%s]: %r'", ",", "unenrolled_user", ".", "id", ",", "course_key_str", ",", "refund_ids", ")", "if", "(", "course_enrollment", ".", "mode", "!=", "u'verified'", ")", ":", "log", ".", "info", "(", "u'Skipping refund email notification for non-verified mode for user [%s], course [%s], mode: [%s]'", ",", "course_enrollment", ".", "user", ".", "id", ",", "course_enrollment", ".", "course_id", ",", "course_enrollment", ".", "mode", ")", "else", ":", "try", ":", "send_refund_notification", "(", "course_enrollment", ",", "refund_ids", ")", "except", ":", "log", ".", "warning", "(", "u'Could not send email notification for refund.'", ",", "exc_info", "=", "True", ")", "else", ":", "log", ".", "debug", "(", "u'No refund opened for user [%s], course [%s]'", ",", "unenrolled_user", ".", "id", ",", "course_key_str", ")", "return", "refund_ids" ]
attempt to initiate a refund for any orders associated with the seat being unenrolled .
train
false
5,552
def _check_pyface_backend(): try: from traits.trait_base import ETSConfig except ImportError: return (None, 2) backend = ETSConfig.toolkit if (backend == 'qt4'): status = 0 else: status = 1 return (backend, status)
[ "def", "_check_pyface_backend", "(", ")", ":", "try", ":", "from", "traits", ".", "trait_base", "import", "ETSConfig", "except", "ImportError", ":", "return", "(", "None", ",", "2", ")", "backend", "=", "ETSConfig", ".", "toolkit", "if", "(", "backend", "==", "'qt4'", ")", ":", "status", "=", "0", "else", ":", "status", "=", "1", "return", "(", "backend", ",", "status", ")" ]
check the currently selected pyface backend .
train
false
5,554
def save_promo_imgs(obj, img_content): tmp_dst = os.path.join(settings.TMP_PATH, 'promo_imgs', uuid.uuid4().hex) with private_storage.open(tmp_dst, 'wb') as fd: fd.write(img_content) dirname = obj.get_promo_img_dir() destination = os.path.join(dirname, ('%s' % obj.pk)) remove_promo_imgs(destination) resize_promo_imgs(tmp_dst, destination, mkt.PROMO_IMG_SIZES, set_modified_on=[obj])
[ "def", "save_promo_imgs", "(", "obj", ",", "img_content", ")", ":", "tmp_dst", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "TMP_PATH", ",", "'promo_imgs'", ",", "uuid", ".", "uuid4", "(", ")", ".", "hex", ")", "with", "private_storage", ".", "open", "(", "tmp_dst", ",", "'wb'", ")", "as", "fd", ":", "fd", ".", "write", "(", "img_content", ")", "dirname", "=", "obj", ".", "get_promo_img_dir", "(", ")", "destination", "=", "os", ".", "path", ".", "join", "(", "dirname", ",", "(", "'%s'", "%", "obj", ".", "pk", ")", ")", "remove_promo_imgs", "(", "destination", ")", "resize_promo_imgs", "(", "tmp_dst", ",", "destination", ",", "mkt", ".", "PROMO_IMG_SIZES", ",", "set_modified_on", "=", "[", "obj", "]", ")" ]
saves the promo image for obj to its final destination .
train
false
5,555
def VARMA(x, B, C, const=0): P = B.shape[0] Q = C.shape[0] T = x.shape[0] xhat = np.zeros(x.shape) e = np.zeros(x.shape) start = max(P, Q) for t in range(start, T): xhat[t, :] = ((const + (x[(t - P):t, :, np.newaxis] * B).sum(axis=1).sum(axis=0)) + (e[(t - Q):t, :, np.newaxis] * C).sum(axis=1).sum(axis=0)) e[t, :] = (x[t, :] - xhat[t, :]) return (xhat, e)
[ "def", "VARMA", "(", "x", ",", "B", ",", "C", ",", "const", "=", "0", ")", ":", "P", "=", "B", ".", "shape", "[", "0", "]", "Q", "=", "C", ".", "shape", "[", "0", "]", "T", "=", "x", ".", "shape", "[", "0", "]", "xhat", "=", "np", ".", "zeros", "(", "x", ".", "shape", ")", "e", "=", "np", ".", "zeros", "(", "x", ".", "shape", ")", "start", "=", "max", "(", "P", ",", "Q", ")", "for", "t", "in", "range", "(", "start", ",", "T", ")", ":", "xhat", "[", "t", ",", ":", "]", "=", "(", "(", "const", "+", "(", "x", "[", "(", "t", "-", "P", ")", ":", "t", ",", ":", ",", "np", ".", "newaxis", "]", "*", "B", ")", ".", "sum", "(", "axis", "=", "1", ")", ".", "sum", "(", "axis", "=", "0", ")", ")", "+", "(", "e", "[", "(", "t", "-", "Q", ")", ":", "t", ",", ":", ",", "np", ".", "newaxis", "]", "*", "C", ")", ".", "sum", "(", "axis", "=", "1", ")", ".", "sum", "(", "axis", "=", "0", ")", ")", "e", "[", "t", ",", ":", "]", "=", "(", "x", "[", "t", ",", ":", "]", "-", "xhat", "[", "t", ",", ":", "]", ")", "return", "(", "xhat", ",", "e", ")" ]
multivariate linear filter x b xhat = sum{_p}sum{_k} { x .
train
false
5,556
def get_all_subscribers_of_creator(user_id): subscribers_model = user_models.UserSubscribersModel.get(user_id, strict=False) return (subscribers_model.subscriber_ids if subscribers_model else [])
[ "def", "get_all_subscribers_of_creator", "(", "user_id", ")", ":", "subscribers_model", "=", "user_models", ".", "UserSubscribersModel", ".", "get", "(", "user_id", ",", "strict", "=", "False", ")", "return", "(", "subscribers_model", ".", "subscriber_ids", "if", "subscribers_model", "else", "[", "]", ")" ]
returns a list with ids of all users who have subscribed to this creator .
train
false
5,557
def fixColors(argdata): refcolor = argdata[0][0] if (refcolor == '+'): for i in range(len(data)): argdata = flipCol(argdata, i) return argdata
[ "def", "fixColors", "(", "argdata", ")", ":", "refcolor", "=", "argdata", "[", "0", "]", "[", "0", "]", "if", "(", "refcolor", "==", "'+'", ")", ":", "for", "i", "in", "range", "(", "len", "(", "data", ")", ")", ":", "argdata", "=", "flipCol", "(", "argdata", ",", "i", ")", "return", "argdata" ]
invert entire image to make sure the top left corner has a - and not + .
train
false
5,558
def _count_blocks(obj): block_counts = defaultdict(int) block = BytesIO() n = 0 block_write = block.write block_seek = block.seek block_truncate = block.truncate block_getvalue = block.getvalue for c in chain(*obj.as_raw_chunks()): if (sys.version_info[0] == 3): c = c.to_bytes(1, 'big') block_write(c) n += 1 if ((c == '\n') or (n == _BLOCK_SIZE)): value = block_getvalue() block_counts[hash(value)] += len(value) block_seek(0) block_truncate() n = 0 if (n > 0): last_block = block_getvalue() block_counts[hash(last_block)] += len(last_block) return block_counts
[ "def", "_count_blocks", "(", "obj", ")", ":", "block_counts", "=", "defaultdict", "(", "int", ")", "block", "=", "BytesIO", "(", ")", "n", "=", "0", "block_write", "=", "block", ".", "write", "block_seek", "=", "block", ".", "seek", "block_truncate", "=", "block", ".", "truncate", "block_getvalue", "=", "block", ".", "getvalue", "for", "c", "in", "chain", "(", "*", "obj", ".", "as_raw_chunks", "(", ")", ")", ":", "if", "(", "sys", ".", "version_info", "[", "0", "]", "==", "3", ")", ":", "c", "=", "c", ".", "to_bytes", "(", "1", ",", "'big'", ")", "block_write", "(", "c", ")", "n", "+=", "1", "if", "(", "(", "c", "==", "'\\n'", ")", "or", "(", "n", "==", "_BLOCK_SIZE", ")", ")", ":", "value", "=", "block_getvalue", "(", ")", "block_counts", "[", "hash", "(", "value", ")", "]", "+=", "len", "(", "value", ")", "block_seek", "(", "0", ")", "block_truncate", "(", ")", "n", "=", "0", "if", "(", "n", ">", "0", ")", ":", "last_block", "=", "block_getvalue", "(", ")", "block_counts", "[", "hash", "(", "last_block", ")", "]", "+=", "len", "(", "last_block", ")", "return", "block_counts" ]
count the blocks in an object .
train
false
5,560
def get_package_key_suffix(version): if is_release(version): return '' else: return '-testing'
[ "def", "get_package_key_suffix", "(", "version", ")", ":", "if", "is_release", "(", "version", ")", ":", "return", "''", "else", ":", "return", "'-testing'" ]
return the suffix for the keys in which packages for a given version are stored .
train
false
5,561
def _map_port_from_yaml_to_docker(port): if isinstance(port, six.string_types): (port, sep, protocol) = port.partition('/') if protocol: return (int(port), protocol) return int(port) return port
[ "def", "_map_port_from_yaml_to_docker", "(", "port", ")", ":", "if", "isinstance", "(", "port", ",", "six", ".", "string_types", ")", ":", "(", "port", ",", "sep", ",", "protocol", ")", "=", "port", ".", "partition", "(", "'/'", ")", "if", "protocol", ":", "return", "(", "int", "(", "port", ")", ",", "protocol", ")", "return", "int", "(", "port", ")", "return", "port" ]
docker-py interface is not very nice: while for port_bindings they support: .
train
false
5,563
@register.tag def get_upcoming_events(parser, token): try: (tag_name, arg) = token.contents.split(None, 1) except ValueError: raise template.TemplateSyntaxError, ('%r tag requires arguments' % token.contents.split()[0]) matches = re.search('([0-9]+) as (\\w+)', arg) if (not matches): raise template.TemplateSyntaxError, ('%r tag had invalid arguments' % tag_name) (limit, var_name) = matches.groups() return UpcomingEventsNode(var_name, limit)
[ "@", "register", ".", "tag", "def", "get_upcoming_events", "(", "parser", ",", "token", ")", ":", "try", ":", "(", "tag_name", ",", "arg", ")", "=", "token", ".", "contents", ".", "split", "(", "None", ",", "1", ")", "except", "ValueError", ":", "raise", "template", ".", "TemplateSyntaxError", ",", "(", "'%r tag requires arguments'", "%", "token", ".", "contents", ".", "split", "(", ")", "[", "0", "]", ")", "matches", "=", "re", ".", "search", "(", "'([0-9]+) as (\\\\w+)'", ",", "arg", ")", "if", "(", "not", "matches", ")", ":", "raise", "template", ".", "TemplateSyntaxError", ",", "(", "'%r tag had invalid arguments'", "%", "tag_name", ")", "(", "limit", ",", "var_name", ")", "=", "matches", ".", "groups", "(", ")", "return", "UpcomingEventsNode", "(", "var_name", ",", "limit", ")" ]
returns a node which alters the context to provide upcoming events the upcoming events are stored in the variable specified .
train
false
5,565
def default_config(): config = Config() config.TerminalInteractiveShell.colors = 'NoColor' config.TerminalTerminalInteractiveShell.term_title = (False,) config.TerminalInteractiveShell.autocall = 0 f = tempfile.NamedTemporaryFile(suffix=u'test_hist.sqlite', delete=False) config.HistoryManager.hist_file = f.name f.close() config.HistoryManager.db_cache_size = 10000 return config
[ "def", "default_config", "(", ")", ":", "config", "=", "Config", "(", ")", "config", ".", "TerminalInteractiveShell", ".", "colors", "=", "'NoColor'", "config", ".", "TerminalTerminalInteractiveShell", ".", "term_title", "=", "(", "False", ",", ")", "config", ".", "TerminalInteractiveShell", ".", "autocall", "=", "0", "f", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "u'test_hist.sqlite'", ",", "delete", "=", "False", ")", "config", ".", "HistoryManager", ".", "hist_file", "=", "f", ".", "name", "f", ".", "close", "(", ")", "config", ".", "HistoryManager", ".", "db_cache_size", "=", "10000", "return", "config" ]
linux hosts using systemd 207 or later ignore /etc/sysctl .
train
false
5,566
def get_surface(pdb_file, PDB_TO_XYZR='pdb_to_xyzr', MSMS='msms'): xyz_tmp = tempfile.mktemp() PDB_TO_XYZR = (PDB_TO_XYZR + ' %s > %s') make_xyz = (PDB_TO_XYZR % (pdb_file, xyz_tmp)) os.system(make_xyz) assert os.path.isfile(xyz_tmp), ('Failed to generate XYZR file using command:\n%s' % make_xyz) surface_tmp = tempfile.mktemp() MSMS = ((MSMS + ' -probe_radius 1.5 -if %s -of %s > ') + tempfile.mktemp()) make_surface = (MSMS % (xyz_tmp, surface_tmp)) os.system(make_surface) surface_file = (surface_tmp + '.vert') assert os.path.isfile(surface_file), ('Failed to generate surface file using command:\n%s' % make_surface) surface = _read_vertex_array(surface_file) return surface
[ "def", "get_surface", "(", "pdb_file", ",", "PDB_TO_XYZR", "=", "'pdb_to_xyzr'", ",", "MSMS", "=", "'msms'", ")", ":", "xyz_tmp", "=", "tempfile", ".", "mktemp", "(", ")", "PDB_TO_XYZR", "=", "(", "PDB_TO_XYZR", "+", "' %s > %s'", ")", "make_xyz", "=", "(", "PDB_TO_XYZR", "%", "(", "pdb_file", ",", "xyz_tmp", ")", ")", "os", ".", "system", "(", "make_xyz", ")", "assert", "os", ".", "path", ".", "isfile", "(", "xyz_tmp", ")", ",", "(", "'Failed to generate XYZR file using command:\\n%s'", "%", "make_xyz", ")", "surface_tmp", "=", "tempfile", ".", "mktemp", "(", ")", "MSMS", "=", "(", "(", "MSMS", "+", "' -probe_radius 1.5 -if %s -of %s > '", ")", "+", "tempfile", ".", "mktemp", "(", ")", ")", "make_surface", "=", "(", "MSMS", "%", "(", "xyz_tmp", ",", "surface_tmp", ")", ")", "os", ".", "system", "(", "make_surface", ")", "surface_file", "=", "(", "surface_tmp", "+", "'.vert'", ")", "assert", "os", ".", "path", ".", "isfile", "(", "surface_file", ")", ",", "(", "'Failed to generate surface file using command:\\n%s'", "%", "make_surface", ")", "surface", "=", "_read_vertex_array", "(", "surface_file", ")", "return", "surface" ]
return a numeric array that represents the vertex list of the molecular surface .
train
false
5,568
def _prepare_check(context, action, target, pluralized): if (target is None): target = {} match_rule = _build_match_rule(action, target, pluralized) credentials = context.to_policy_values() return (match_rule, target, credentials)
[ "def", "_prepare_check", "(", "context", ",", "action", ",", "target", ",", "pluralized", ")", ":", "if", "(", "target", "is", "None", ")", ":", "target", "=", "{", "}", "match_rule", "=", "_build_match_rule", "(", "action", ",", "target", ",", "pluralized", ")", "credentials", "=", "context", ".", "to_policy_values", "(", ")", "return", "(", "match_rule", ",", "target", ",", "credentials", ")" ]
prepare rule .
train
false
5,571
def generate_targets(target_source): target_source = os.path.abspath(target_source) if os.path.isdir(target_source): target_source_files = glob.glob((target_source + '/*.tsv')) else: target_source_files = [target_source] for target_source_file in target_source_files: with open(target_source_file, 'r') as f: for line in f.readlines(): if line: line = line.strip() if ((not line) or line.startswith('#')): continue (yield line_to_targets(line))
[ "def", "generate_targets", "(", "target_source", ")", ":", "target_source", "=", "os", ".", "path", ".", "abspath", "(", "target_source", ")", "if", "os", ".", "path", ".", "isdir", "(", "target_source", ")", ":", "target_source_files", "=", "glob", ".", "glob", "(", "(", "target_source", "+", "'/*.tsv'", ")", ")", "else", ":", "target_source_files", "=", "[", "target_source", "]", "for", "target_source_file", "in", "target_source_files", ":", "with", "open", "(", "target_source_file", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ".", "readlines", "(", ")", ":", "if", "line", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "(", "not", "line", ")", "or", "line", ".", "startswith", "(", "'#'", ")", ")", ":", "continue", "(", "yield", "line_to_targets", "(", "line", ")", ")" ]
generate all targets from tsv files in specified file or directory .
train
false
5,572
@home_routes.route('/set_role', methods=('GET', 'POST')) @login_required def set_role(): id = request.args['id'] role = request.args['roles'] user = DataGetter.get_user(id) user.role = role save_to_db(user, 'User Role updated') return redirect(url_for('.roles_manager'))
[ "@", "home_routes", ".", "route", "(", "'/set_role'", ",", "methods", "=", "(", "'GET'", ",", "'POST'", ")", ")", "@", "login_required", "def", "set_role", "(", ")", ":", "id", "=", "request", ".", "args", "[", "'id'", "]", "role", "=", "request", ".", "args", "[", "'roles'", "]", "user", "=", "DataGetter", ".", "get_user", "(", "id", ")", "user", ".", "role", "=", "role", "save_to_db", "(", "user", ",", "'User Role updated'", ")", "return", "redirect", "(", "url_for", "(", "'.roles_manager'", ")", ")" ]
assign role to username .
train
false
5,573
def munge_filename(filename): if (not isinstance(filename, unicode)): filename = decode_path(filename) filename = os.path.split(filename)[1] filename = filename.lower().strip() filename = substitute_ascii_equivalents(filename) filename = re.sub(u'[^a-zA-Z0-9_. -]', '', filename).replace(u' ', u'-') filename = re.sub(u'-+', u'-', filename) (name, ext) = os.path.splitext(filename) ext = ext[:MAX_FILENAME_EXTENSION_LENGTH] ext_len = len(ext) name = _munge_to_length(name, max(1, (MIN_FILENAME_TOTAL_LENGTH - ext_len)), (MAX_FILENAME_TOTAL_LENGTH - ext_len)) filename = (name + ext) return filename
[ "def", "munge_filename", "(", "filename", ")", ":", "if", "(", "not", "isinstance", "(", "filename", ",", "unicode", ")", ")", ":", "filename", "=", "decode_path", "(", "filename", ")", "filename", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "[", "1", "]", "filename", "=", "filename", ".", "lower", "(", ")", ".", "strip", "(", ")", "filename", "=", "substitute_ascii_equivalents", "(", "filename", ")", "filename", "=", "re", ".", "sub", "(", "u'[^a-zA-Z0-9_. -]'", ",", "''", ",", "filename", ")", ".", "replace", "(", "u' '", ",", "u'-'", ")", "filename", "=", "re", ".", "sub", "(", "u'-+'", ",", "u'-'", ",", "filename", ")", "(", "name", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "ext", "=", "ext", "[", ":", "MAX_FILENAME_EXTENSION_LENGTH", "]", "ext_len", "=", "len", "(", "ext", ")", "name", "=", "_munge_to_length", "(", "name", ",", "max", "(", "1", ",", "(", "MIN_FILENAME_TOTAL_LENGTH", "-", "ext_len", ")", ")", ",", "(", "MAX_FILENAME_TOTAL_LENGTH", "-", "ext_len", ")", ")", "filename", "=", "(", "name", "+", "ext", ")", "return", "filename" ]
tidies a filename keeps the filename extension .
train
false
5,574
def vserver_add(v_name, v_ip, v_port, v_type, **connection_args): ret = True if vserver_exists(v_name, **connection_args): return False nitro = _connect(**connection_args) if (nitro is None): return False vserver = NSLBVServer() vserver.set_name(v_name) vserver.set_ipv46(v_ip) vserver.set_port(v_port) vserver.set_servicetype(v_type.upper()) try: NSLBVServer.add(nitro, vserver) except NSNitroError as error: log.debug('netscaler module error - NSLBVServer.add() failed: {0}'.format(error)) ret = False _disconnect(nitro) return ret
[ "def", "vserver_add", "(", "v_name", ",", "v_ip", ",", "v_port", ",", "v_type", ",", "**", "connection_args", ")", ":", "ret", "=", "True", "if", "vserver_exists", "(", "v_name", ",", "**", "connection_args", ")", ":", "return", "False", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "False", "vserver", "=", "NSLBVServer", "(", ")", "vserver", ".", "set_name", "(", "v_name", ")", "vserver", ".", "set_ipv46", "(", "v_ip", ")", "vserver", ".", "set_port", "(", "v_port", ")", "vserver", ".", "set_servicetype", "(", "v_type", ".", "upper", "(", ")", ")", "try", ":", "NSLBVServer", ".", "add", "(", "nitro", ",", "vserver", ")", "except", "NSNitroError", "as", "error", ":", "log", ".", "debug", "(", "'netscaler module error - NSLBVServer.add() failed: {0}'", ".", "format", "(", "error", ")", ")", "ret", "=", "False", "_disconnect", "(", "nitro", ")", "return", "ret" ]
add a new lb vserver cli example: .
train
true
5,576
def index_chunk(cls, id_list, reraise=False): for ids in chunked(id_list, 80): documents = [] for id_ in ids: try: documents.append(cls.extract_document(id_)) except UnindexMeBro: cls.unindex(id_) except Exception: log.exception('Unable to extract/index document (id: %d)', id_) if reraise: raise if documents: cls.bulk_index(documents, id_field='id') if settings.DEBUG: reset_queries()
[ "def", "index_chunk", "(", "cls", ",", "id_list", ",", "reraise", "=", "False", ")", ":", "for", "ids", "in", "chunked", "(", "id_list", ",", "80", ")", ":", "documents", "=", "[", "]", "for", "id_", "in", "ids", ":", "try", ":", "documents", ".", "append", "(", "cls", ".", "extract_document", "(", "id_", ")", ")", "except", "UnindexMeBro", ":", "cls", ".", "unindex", "(", "id_", ")", "except", "Exception", ":", "log", ".", "exception", "(", "'Unable to extract/index document (id: %d)'", ",", "id_", ")", "if", "reraise", ":", "raise", "if", "documents", ":", "cls", ".", "bulk_index", "(", "documents", ",", "id_field", "=", "'id'", ")", "if", "settings", ".", "DEBUG", ":", "reset_queries", "(", ")" ]
index a chunk of documents .
train
false
5,577
def make_friedman2(n_samples=100, noise=0.0, random_state=None): generator = check_random_state(random_state) X = generator.rand(n_samples, 4) X[:, 0] *= 100 X[:, 1] *= (520 * np.pi) X[:, 1] += (40 * np.pi) X[:, 3] *= 10 X[:, 3] += 1 y = ((((X[:, 0] ** 2) + (((X[:, 1] * X[:, 2]) - (1 / (X[:, 1] * X[:, 3]))) ** 2)) ** 0.5) + (noise * generator.randn(n_samples))) return (X, y)
[ "def", "make_friedman2", "(", "n_samples", "=", "100", ",", "noise", "=", "0.0", ",", "random_state", "=", "None", ")", ":", "generator", "=", "check_random_state", "(", "random_state", ")", "X", "=", "generator", ".", "rand", "(", "n_samples", ",", "4", ")", "X", "[", ":", ",", "0", "]", "*=", "100", "X", "[", ":", ",", "1", "]", "*=", "(", "520", "*", "np", ".", "pi", ")", "X", "[", ":", ",", "1", "]", "+=", "(", "40", "*", "np", ".", "pi", ")", "X", "[", ":", ",", "3", "]", "*=", "10", "X", "[", ":", ",", "3", "]", "+=", "1", "y", "=", "(", "(", "(", "(", "X", "[", ":", ",", "0", "]", "**", "2", ")", "+", "(", "(", "(", "X", "[", ":", ",", "1", "]", "*", "X", "[", ":", ",", "2", "]", ")", "-", "(", "1", "/", "(", "X", "[", ":", ",", "1", "]", "*", "X", "[", ":", ",", "3", "]", ")", ")", ")", "**", "2", ")", ")", "**", "0.5", ")", "+", "(", "noise", "*", "generator", ".", "randn", "(", "n_samples", ")", ")", ")", "return", "(", "X", ",", "y", ")" ]
generate the "friedman #2" regression problem this dataset is described in friedman [1] and breiman [2] .
train
false
5,578
def do_dict_entry_for_item(parser, token): bits = token.contents.split() if (len(bits) != 6): raise template.TemplateSyntaxError(("'%s' tag takes exactly five arguments" % bits[0])) if (bits[2] != 'from'): raise template.TemplateSyntaxError(("second argument to '%s' tag must be 'from'" % bits[0])) if (bits[4] != 'as'): raise template.TemplateSyntaxError(("fourth argument to '%s' tag must be 'as'" % bits[0])) return DictEntryForItemNode(bits[1], bits[3], bits[5])
[ "def", "do_dict_entry_for_item", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "(", "len", "(", "bits", ")", "!=", "6", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "\"'%s' tag takes exactly five arguments\"", "%", "bits", "[", "0", "]", ")", ")", "if", "(", "bits", "[", "2", "]", "!=", "'from'", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "\"second argument to '%s' tag must be 'from'\"", "%", "bits", "[", "0", "]", ")", ")", "if", "(", "bits", "[", "4", "]", "!=", "'as'", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "(", "\"fourth argument to '%s' tag must be 'as'\"", "%", "bits", "[", "0", "]", ")", ")", "return", "DictEntryForItemNode", "(", "bits", "[", "1", "]", ",", "bits", "[", "3", "]", ",", "bits", "[", "5", "]", ")" ]
given an object and a dictionary keyed with object ids - as returned by the votes_by_user and scores_for_objects template tags - retrieves the value for the given object and stores it in a context variable .
train
false
5,579
def getElementNodeObject(evaluatedLinkValue): if (evaluatedLinkValue.__class__.__name__ != 'ElementNode'): print 'Warning, could not get ElementNode in getElementNodeObject in evaluate for:' print evaluatedLinkValue.__class__.__name__ print evaluatedLinkValue return None if (evaluatedLinkValue.xmlObject == None): print 'Warning, evaluatedLinkValue.xmlObject is None in getElementNodeObject in evaluate for:' print evaluatedLinkValue return None return evaluatedLinkValue.xmlObject
[ "def", "getElementNodeObject", "(", "evaluatedLinkValue", ")", ":", "if", "(", "evaluatedLinkValue", ".", "__class__", ".", "__name__", "!=", "'ElementNode'", ")", ":", "print", "'Warning, could not get ElementNode in getElementNodeObject in evaluate for:'", "print", "evaluatedLinkValue", ".", "__class__", ".", "__name__", "print", "evaluatedLinkValue", "return", "None", "if", "(", "evaluatedLinkValue", ".", "xmlObject", "==", "None", ")", ":", "print", "'Warning, evaluatedLinkValue.xmlObject is None in getElementNodeObject in evaluate for:'", "print", "evaluatedLinkValue", "return", "None", "return", "evaluatedLinkValue", ".", "xmlObject" ]
get elementnodeobject .
train
false
5,580
def wait_for_occupied_port(host, port, timeout=None): if (not host): raise ValueError("Host values of '' or None are not allowed.") if (timeout is None): timeout = occupied_port_timeout for trial in range(50): try: check_port(host, port, timeout=timeout) except IOError: return else: time.sleep(timeout) if (host == client_host(host)): raise IOError(('Port %r not bound on %r' % (port, host))) msg = ('Unable to verify that the server is bound on %r' % port) warnings.warn(msg)
[ "def", "wait_for_occupied_port", "(", "host", ",", "port", ",", "timeout", "=", "None", ")", ":", "if", "(", "not", "host", ")", ":", "raise", "ValueError", "(", "\"Host values of '' or None are not allowed.\"", ")", "if", "(", "timeout", "is", "None", ")", ":", "timeout", "=", "occupied_port_timeout", "for", "trial", "in", "range", "(", "50", ")", ":", "try", ":", "check_port", "(", "host", ",", "port", ",", "timeout", "=", "timeout", ")", "except", "IOError", ":", "return", "else", ":", "time", ".", "sleep", "(", "timeout", ")", "if", "(", "host", "==", "client_host", "(", "host", ")", ")", ":", "raise", "IOError", "(", "(", "'Port %r not bound on %r'", "%", "(", "port", ",", "host", ")", ")", ")", "msg", "=", "(", "'Unable to verify that the server is bound on %r'", "%", "port", ")", "warnings", ".", "warn", "(", "msg", ")" ]
wait for the specified port to become active .
train
false
5,581
def show_security_group_rule(security_group_rule_id, profile=None): conn = _auth(profile) return conn.show_security_group_rule(security_group_rule_id)
[ "def", "show_security_group_rule", "(", "security_group_rule_id", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "show_security_group_rule", "(", "security_group_rule_id", ")" ]
fetches information of a certain security group rule cli example: .
train
false
5,583
def minorticks_off(): gca().minorticks_off()
[ "def", "minorticks_off", "(", ")", ":", "gca", "(", ")", ".", "minorticks_off", "(", ")" ]
remove minor ticks from the current plot .
train
false
5,584
def send_tcp(data, host, port): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((host, port)) sock.sendall(data) response = sock.recv(8192) length = struct.unpack('!H', bytes(response[:2]))[0] while ((len(response) - 2) < length): response += sock.recv(8192) sock.close() return response
[ "def", "send_tcp", "(", "data", ",", "host", ",", "port", ")", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "connect", "(", "(", "host", ",", "port", ")", ")", "sock", ".", "sendall", "(", "data", ")", "response", "=", "sock", ".", "recv", "(", "8192", ")", "length", "=", "struct", ".", "unpack", "(", "'!H'", ",", "bytes", "(", "response", "[", ":", "2", "]", ")", ")", "[", "0", "]", "while", "(", "(", "len", "(", "response", ")", "-", "2", ")", "<", "length", ")", ":", "response", "+=", "sock", ".", "recv", "(", "8192", ")", "sock", ".", "close", "(", ")", "return", "response" ]
helper function to send/receive dns tcp request .
train
false
5,585
def make_definitions(): source = dedent('\n import sys\n\n class C:\n pass\n\n x = C()\n\n def f():\n pass\n\n def g():\n yield\n\n h = lambda: None\n ') definitions = [] definitions += defined_names(source) source += dedent('\n variable = sys or C or x or f or g or g() or h') lines = source.splitlines() script = Script(source, len(lines), len('variable'), None) definitions += script.goto_definitions() script2 = Script(source, 4, len('class C'), None) definitions += script2.usages() source_param = 'def f(a): return a' script_param = Script(source_param, 1, len(source_param), None) definitions += script_param.goto_assignments() return definitions
[ "def", "make_definitions", "(", ")", ":", "source", "=", "dedent", "(", "'\\n import sys\\n\\n class C:\\n pass\\n\\n x = C()\\n\\n def f():\\n pass\\n\\n def g():\\n yield\\n\\n h = lambda: None\\n '", ")", "definitions", "=", "[", "]", "definitions", "+=", "defined_names", "(", "source", ")", "source", "+=", "dedent", "(", "'\\n variable = sys or C or x or f or g or g() or h'", ")", "lines", "=", "source", ".", "splitlines", "(", ")", "script", "=", "Script", "(", "source", ",", "len", "(", "lines", ")", ",", "len", "(", "'variable'", ")", ",", "None", ")", "definitions", "+=", "script", ".", "goto_definitions", "(", ")", "script2", "=", "Script", "(", "source", ",", "4", ",", "len", "(", "'class C'", ")", ",", "None", ")", "definitions", "+=", "script2", ".", "usages", "(", ")", "source_param", "=", "'def f(a): return a'", "script_param", "=", "Script", "(", "source_param", ",", "1", ",", "len", "(", "source_param", ")", ",", "None", ")", "definitions", "+=", "script_param", ".", "goto_assignments", "(", ")", "return", "definitions" ]
return a list of definitions for parametrized tests .
train
false
5,587
def _parse_commit_response(commit_response_pb): mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates completed_keys = [mut_result.key for mut_result in mut_results if mut_result.HasField('key')] return (index_updates, completed_keys)
[ "def", "_parse_commit_response", "(", "commit_response_pb", ")", ":", "mut_results", "=", "commit_response_pb", ".", "mutation_results", "index_updates", "=", "commit_response_pb", ".", "index_updates", "completed_keys", "=", "[", "mut_result", ".", "key", "for", "mut_result", "in", "mut_results", "if", "mut_result", ".", "HasField", "(", "'key'", ")", "]", "return", "(", "index_updates", ",", "completed_keys", ")" ]
extract response data from a commit response .
train
true
5,588
def get_active_backend(): active_backend_and_jobs = getattr(_backend, 'backend_and_jobs', None) if (active_backend_and_jobs is not None): return active_backend_and_jobs active_backend = BACKENDS[DEFAULT_BACKEND]() return (active_backend, DEFAULT_N_JOBS)
[ "def", "get_active_backend", "(", ")", ":", "active_backend_and_jobs", "=", "getattr", "(", "_backend", ",", "'backend_and_jobs'", ",", "None", ")", "if", "(", "active_backend_and_jobs", "is", "not", "None", ")", ":", "return", "active_backend_and_jobs", "active_backend", "=", "BACKENDS", "[", "DEFAULT_BACKEND", "]", "(", ")", "return", "(", "active_backend", ",", "DEFAULT_N_JOBS", ")" ]
return the active default backend .
train
false
5,589
def _unique_partition_id(course): used_ids = set((p.id for p in course.user_partitions)) return generate_int_id(used_ids=used_ids)
[ "def", "_unique_partition_id", "(", "course", ")", ":", "used_ids", "=", "set", "(", "(", "p", ".", "id", "for", "p", "in", "course", ".", "user_partitions", ")", ")", "return", "generate_int_id", "(", "used_ids", "=", "used_ids", ")" ]
return a unique user partition id for the course .
train
false
5,590
def one_hot(x, m=None): if (m is None): m = T.cast((T.max(x) + 1), 'int32') return T.eye(m)[T.cast(x, 'int32')]
[ "def", "one_hot", "(", "x", ",", "m", "=", "None", ")", ":", "if", "(", "m", "is", "None", ")", ":", "m", "=", "T", ".", "cast", "(", "(", "T", ".", "max", "(", "x", ")", "+", "1", ")", ",", "'int32'", ")", "return", "T", ".", "eye", "(", "m", ")", "[", "T", ".", "cast", "(", "x", ",", "'int32'", ")", "]" ]
return a one-hot transform of values parameters values : 1d array integer values .
train
false
5,592
def scalene(x, alpha, beta): return ((alpha * pos(x)) + (beta * neg(x)))
[ "def", "scalene", "(", "x", ",", "alpha", ",", "beta", ")", ":", "return", "(", "(", "alpha", "*", "pos", "(", "x", ")", ")", "+", "(", "beta", "*", "neg", "(", "x", ")", ")", ")" ]
alias for alpha*pos(x) + beta*neg(x) .
train
false
5,593
def test_addition(): assert ((1 + 1) == 2)
[ "def", "test_addition", "(", ")", ":", "assert", "(", "(", "1", "+", "1", ")", "==", "2", ")" ]
check that an addition at the limit of precision is seen .
train
false
5,594
def GetAllRuntimes(): return _all_runtimes
[ "def", "GetAllRuntimes", "(", ")", ":", "return", "_all_runtimes" ]
returns the list of all valid runtimes .
train
false
5,596
def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None, database=None): from django.db import connections, DEFAULT_DB_ALIAS if (not database): database = DEFAULT_DB_ALIAS connection = connections[database] if (not hasattr(connection.ops, 'spatial_version')): raise Exception('The `add_srs_entry` utility only works with spatial backends.') if (connection.ops.oracle or connection.ops.mysql): raise Exception('This utility does not support the Oracle or MySQL spatial backends.') SpatialRefSys = connection.ops.spatial_ref_sys() if (not isinstance(srs, SpatialReference)): srs = SpatialReference(srs) if (srs.srid is None): raise Exception('Spatial reference requires an SRID to be compatible with the spatial backend.') kwargs = {'srid': srs.srid, 'auth_name': auth_name, 'auth_srid': (auth_srid or srs.srid), 'proj4text': srs.proj4} if connection.ops.postgis: kwargs['srtext'] = srs.wkt if connection.ops.spatialite: kwargs['ref_sys_name'] = (ref_sys_name or srs.name) try: sr = SpatialRefSys.objects.using(database).get(srid=srs.srid) except SpatialRefSys.DoesNotExist: sr = SpatialRefSys.objects.using(database).create(**kwargs)
[ "def", "add_srs_entry", "(", "srs", ",", "auth_name", "=", "'EPSG'", ",", "auth_srid", "=", "None", ",", "ref_sys_name", "=", "None", ",", "database", "=", "None", ")", ":", "from", "django", ".", "db", "import", "connections", ",", "DEFAULT_DB_ALIAS", "if", "(", "not", "database", ")", ":", "database", "=", "DEFAULT_DB_ALIAS", "connection", "=", "connections", "[", "database", "]", "if", "(", "not", "hasattr", "(", "connection", ".", "ops", ",", "'spatial_version'", ")", ")", ":", "raise", "Exception", "(", "'The `add_srs_entry` utility only works with spatial backends.'", ")", "if", "(", "connection", ".", "ops", ".", "oracle", "or", "connection", ".", "ops", ".", "mysql", ")", ":", "raise", "Exception", "(", "'This utility does not support the Oracle or MySQL spatial backends.'", ")", "SpatialRefSys", "=", "connection", ".", "ops", ".", "spatial_ref_sys", "(", ")", "if", "(", "not", "isinstance", "(", "srs", ",", "SpatialReference", ")", ")", ":", "srs", "=", "SpatialReference", "(", "srs", ")", "if", "(", "srs", ".", "srid", "is", "None", ")", ":", "raise", "Exception", "(", "'Spatial reference requires an SRID to be compatible with the spatial backend.'", ")", "kwargs", "=", "{", "'srid'", ":", "srs", ".", "srid", ",", "'auth_name'", ":", "auth_name", ",", "'auth_srid'", ":", "(", "auth_srid", "or", "srs", ".", "srid", ")", ",", "'proj4text'", ":", "srs", ".", "proj4", "}", "if", "connection", ".", "ops", ".", "postgis", ":", "kwargs", "[", "'srtext'", "]", "=", "srs", ".", "wkt", "if", "connection", ".", "ops", ".", "spatialite", ":", "kwargs", "[", "'ref_sys_name'", "]", "=", "(", "ref_sys_name", "or", "srs", ".", "name", ")", "try", ":", "sr", "=", "SpatialRefSys", ".", "objects", ".", "using", "(", "database", ")", ".", "get", "(", "srid", "=", "srs", ".", "srid", ")", "except", "SpatialRefSys", ".", "DoesNotExist", ":", "sr", "=", "SpatialRefSys", ".", "objects", ".", "using", "(", "database", ")", ".", "create", "(", "**", "kwargs", ")" ]
this function takes a gdal spatialreference system and adds its information to the spatial_ref_sys table of the spatial backend .
train
false
5,597
def parse_only_date(raw, assume_utc=True, as_utc=True): f = (utcnow if assume_utc else now) default = f().replace(hour=0, minute=0, second=0, microsecond=0, day=15) return fix_only_date(parse_date(raw, default=default, assume_utc=assume_utc, as_utc=as_utc))
[ "def", "parse_only_date", "(", "raw", ",", "assume_utc", "=", "True", ",", "as_utc", "=", "True", ")", ":", "f", "=", "(", "utcnow", "if", "assume_utc", "else", "now", ")", "default", "=", "f", "(", ")", ".", "replace", "(", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ",", "day", "=", "15", ")", "return", "fix_only_date", "(", "parse_date", "(", "raw", ",", "default", "=", "default", ",", "assume_utc", "=", "assume_utc", ",", "as_utc", "=", "as_utc", ")", ")" ]
parse a date string that contains no time information in a manner that guarantees that the month and year are always correct in all timezones .
train
false
5,599
def test_greater_than(): assert (hug.types.greater_than(10)('11') == 11) assert (hug.types.greater_than(10)(11) == 11) assert (hug.types.greater_than(10)(1000) == 1000) assert ('10' in hug.types.greater_than(10).__doc__) with pytest.raises(ValueError): assert hug.types.greater_than(10)(9)
[ "def", "test_greater_than", "(", ")", ":", "assert", "(", "hug", ".", "types", ".", "greater_than", "(", "10", ")", "(", "'11'", ")", "==", "11", ")", "assert", "(", "hug", ".", "types", ".", "greater_than", "(", "10", ")", "(", "11", ")", "==", "11", ")", "assert", "(", "hug", ".", "types", ".", "greater_than", "(", "10", ")", "(", "1000", ")", "==", "1000", ")", "assert", "(", "'10'", "in", "hug", ".", "types", ".", "greater_than", "(", "10", ")", ".", "__doc__", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "assert", "hug", ".", "types", ".", "greater_than", "(", "10", ")", "(", "9", ")" ]
tests that hugs greater than type succefully limis the values passed in .
train
false
5,602
def startCc(CallControlCapabilities_presence=0): a = TpPd(pd=3) b = MessageType(mesType=9) packet = (a / b) if (CallControlCapabilities_presence is 1): c = CallControlCapabilitiesHdr(ieiCCC=21, eightBitCCC=0) packet = (packet / c) return packet
[ "def", "startCc", "(", "CallControlCapabilities_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "9", ")", "packet", "=", "(", "a", "/", "b", ")", "if", "(", "CallControlCapabilities_presence", "is", "1", ")", ":", "c", "=", "CallControlCapabilitiesHdr", "(", "ieiCCC", "=", "21", ",", "eightBitCCC", "=", "0", ")", "packet", "=", "(", "packet", "/", "c", ")", "return", "packet" ]
start cc section 9 .
train
true
5,606
def addOffsetAddToLists(loop, offset, vector3Index, vertexes): vector3Index += offset loop.append(vector3Index) vertexes.append(vector3Index)
[ "def", "addOffsetAddToLists", "(", "loop", ",", "offset", ",", "vector3Index", ",", "vertexes", ")", ":", "vector3Index", "+=", "offset", "loop", ".", "append", "(", "vector3Index", ")", "vertexes", ".", "append", "(", "vector3Index", ")" ]
add an indexed loop to the vertexes .
train
false
5,607
def listQThreads(): thr = findObj('[Tt]hread') thr = [t for t in thr if isinstance(t, QtCore.QThread)] import sip for t in thr: print('--> ', t) print((' Qt ID: 0x%x' % sip.unwrapinstance(t)))
[ "def", "listQThreads", "(", ")", ":", "thr", "=", "findObj", "(", "'[Tt]hread'", ")", "thr", "=", "[", "t", "for", "t", "in", "thr", "if", "isinstance", "(", "t", ",", "QtCore", ".", "QThread", ")", "]", "import", "sip", "for", "t", "in", "thr", ":", "print", "(", "'--> '", ",", "t", ")", "print", "(", "(", "' Qt ID: 0x%x'", "%", "sip", ".", "unwrapinstance", "(", "t", ")", ")", ")" ]
prints thread ids for all qthreads .
train
false
5,608
def config_settings_loader(request=None): conf = SPConfig() conf.load(copy.deepcopy(settings.SAML_CONFIG)) return conf
[ "def", "config_settings_loader", "(", "request", "=", "None", ")", ":", "conf", "=", "SPConfig", "(", ")", "conf", ".", "load", "(", "copy", ".", "deepcopy", "(", "settings", ".", "SAML_CONFIG", ")", ")", "return", "conf" ]
utility function to load the pysaml2 configuration .
train
true
5,609
def find_checks(argument_name): checks = [] function_type = type(find_checks) for (name, function) in globals().iteritems(): if (type(function) is function_type): args = inspect.getargspec(function)[0] if ((len(args) >= 1) and args[0].startswith(argument_name)): checks.append((name, function, args)) checks.sort() return checks
[ "def", "find_checks", "(", "argument_name", ")", ":", "checks", "=", "[", "]", "function_type", "=", "type", "(", "find_checks", ")", "for", "(", "name", ",", "function", ")", "in", "globals", "(", ")", ".", "iteritems", "(", ")", ":", "if", "(", "type", "(", "function", ")", "is", "function_type", ")", ":", "args", "=", "inspect", ".", "getargspec", "(", "function", ")", "[", "0", "]", "if", "(", "(", "len", "(", "args", ")", ">=", "1", ")", "and", "args", "[", "0", "]", ".", "startswith", "(", "argument_name", ")", ")", ":", "checks", ".", "append", "(", "(", "name", ",", "function", ",", "args", ")", ")", "checks", ".", "sort", "(", ")", "return", "checks" ]
find all globally visible functions where the first argument name starts with argument_name .
train
true
5,610
def do_aggregate_list(cs, args): aggregates = cs.aggregates.list() columns = ['Id', 'Name', 'Availability Zone'] if (cs.api_version >= api_versions.APIVersion('2.41')): columns.append('UUID') utils.print_list(aggregates, columns)
[ "def", "do_aggregate_list", "(", "cs", ",", "args", ")", ":", "aggregates", "=", "cs", ".", "aggregates", ".", "list", "(", ")", "columns", "=", "[", "'Id'", ",", "'Name'", ",", "'Availability Zone'", "]", "if", "(", "cs", ".", "api_version", ">=", "api_versions", ".", "APIVersion", "(", "'2.41'", ")", ")", ":", "columns", ".", "append", "(", "'UUID'", ")", "utils", ".", "print_list", "(", "aggregates", ",", "columns", ")" ]
print a list of all aggregates .
train
false
5,611
def extract_mnist_labels(filename, num_images): if (not tf.gfile.Exists((filename + '.npy'))): with gzip.open(filename) as bytestream: bytestream.read(8) buf = bytestream.read((1 * num_images)) labels = np.frombuffer(buf, dtype=np.uint8).astype(np.int32) np.save(filename, labels) return labels else: with tf.gfile.Open((filename + '.npy'), mode='r') as file_obj: return np.load(file_obj)
[ "def", "extract_mnist_labels", "(", "filename", ",", "num_images", ")", ":", "if", "(", "not", "tf", ".", "gfile", ".", "Exists", "(", "(", "filename", "+", "'.npy'", ")", ")", ")", ":", "with", "gzip", ".", "open", "(", "filename", ")", "as", "bytestream", ":", "bytestream", ".", "read", "(", "8", ")", "buf", "=", "bytestream", ".", "read", "(", "(", "1", "*", "num_images", ")", ")", "labels", "=", "np", ".", "frombuffer", "(", "buf", ",", "dtype", "=", "np", ".", "uint8", ")", ".", "astype", "(", "np", ".", "int32", ")", "np", ".", "save", "(", "filename", ",", "labels", ")", "return", "labels", "else", ":", "with", "tf", ".", "gfile", ".", "Open", "(", "(", "filename", "+", "'.npy'", ")", ",", "mode", "=", "'r'", ")", "as", "file_obj", ":", "return", "np", ".", "load", "(", "file_obj", ")" ]
extract the labels into a vector of int64 label ids .
train
false
5,612
def eval_location2(pymodule, offset): pyname_finder = ScopeNameFinder(pymodule) return pyname_finder.get_primary_and_pyname_at(offset)
[ "def", "eval_location2", "(", "pymodule", ",", "offset", ")", ":", "pyname_finder", "=", "ScopeNameFinder", "(", "pymodule", ")", "return", "pyname_finder", ".", "get_primary_and_pyname_at", "(", "offset", ")" ]
find the primary and pyname at offset .
train
false
5,614
def agent_build_get_by_triple(context, hypervisor, os, architecture): return IMPL.agent_build_get_by_triple(context, hypervisor, os, architecture)
[ "def", "agent_build_get_by_triple", "(", "context", ",", "hypervisor", ",", "os", ",", "architecture", ")", ":", "return", "IMPL", ".", "agent_build_get_by_triple", "(", "context", ",", "hypervisor", ",", "os", ",", "architecture", ")" ]
get agent build by hypervisor/os/architecture triple .
train
false
5,615
def label_param(registry, xml_parent, data): base_param(registry, xml_parent, data, True, 'org.jvnet.jenkins.plugins.nodelabelparameter.LabelParameterDefinition')
[ "def", "label_param", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "base_param", "(", "registry", ",", "xml_parent", ",", "data", ",", "True", ",", "'org.jvnet.jenkins.plugins.nodelabelparameter.LabelParameterDefinition'", ")" ]
yaml: label a node label parameter .
train
false
5,616
def parse_otu_map(otu_map_f, otu_ids_to_exclude=None, delim='_'): if (otu_ids_to_exclude is None): otu_ids_to_exclude = {} result = defaultdict(int) sample_ids = [] sample_id_idx = {} otu_ids = [] otu_count = 0 sample_count = 0 for line in otu_map_f: fields = line.strip().split(' DCTB ') otu_id = fields[0] if (otu_id in otu_ids_to_exclude): continue for seq_id in fields[1:]: sample_id = seq_id.split(delim)[0] try: sample_index = sample_id_idx[sample_id] except KeyError: sample_index = sample_count sample_id_idx[sample_id] = sample_index sample_count += 1 sample_ids.append(sample_id) result[(otu_count, sample_index)] += 1 otu_count += 1 otu_ids.append(otu_id) return (result, sample_ids, otu_ids)
[ "def", "parse_otu_map", "(", "otu_map_f", ",", "otu_ids_to_exclude", "=", "None", ",", "delim", "=", "'_'", ")", ":", "if", "(", "otu_ids_to_exclude", "is", "None", ")", ":", "otu_ids_to_exclude", "=", "{", "}", "result", "=", "defaultdict", "(", "int", ")", "sample_ids", "=", "[", "]", "sample_id_idx", "=", "{", "}", "otu_ids", "=", "[", "]", "otu_count", "=", "0", "sample_count", "=", "0", "for", "line", "in", "otu_map_f", ":", "fields", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "' DCTB '", ")", "otu_id", "=", "fields", "[", "0", "]", "if", "(", "otu_id", "in", "otu_ids_to_exclude", ")", ":", "continue", "for", "seq_id", "in", "fields", "[", "1", ":", "]", ":", "sample_id", "=", "seq_id", ".", "split", "(", "delim", ")", "[", "0", "]", "try", ":", "sample_index", "=", "sample_id_idx", "[", "sample_id", "]", "except", "KeyError", ":", "sample_index", "=", "sample_count", "sample_id_idx", "[", "sample_id", "]", "=", "sample_index", "sample_count", "+=", "1", "sample_ids", ".", "append", "(", "sample_id", ")", "result", "[", "(", "otu_count", ",", "sample_index", ")", "]", "+=", "1", "otu_count", "+=", "1", "otu_ids", ".", "append", "(", "otu_id", ")", "return", "(", "result", ",", "sample_ids", ",", "otu_ids", ")" ]
parse otu map file into a sparse dict {:count} this function is much more memory efficent than fields_to_dict and and the result dict is of the correct format to be passed to table_factory for creating otutable objects .
train
false
5,618
def permanent_redirect_to_people_search(request, property, value): if (property == 'seeking'): property = 'can_pitch_in' if (' ' in value): escaped_value = (('"' + value) + '"') else: escaped_value = value q = ('%s:%s' % (property, escaped_value)) get_args = {u'q': q} destination_url = ((reverse('mysite.profile.views.people') + '?') + http.urlencode(get_args)) return HttpResponsePermanentRedirect(destination_url)
[ "def", "permanent_redirect_to_people_search", "(", "request", ",", "property", ",", "value", ")", ":", "if", "(", "property", "==", "'seeking'", ")", ":", "property", "=", "'can_pitch_in'", "if", "(", "' '", "in", "value", ")", ":", "escaped_value", "=", "(", "(", "'\"'", "+", "value", ")", "+", "'\"'", ")", "else", ":", "escaped_value", "=", "value", "q", "=", "(", "'%s:%s'", "%", "(", "property", ",", "escaped_value", ")", ")", "get_args", "=", "{", "u'q'", ":", "q", "}", "destination_url", "=", "(", "(", "reverse", "(", "'mysite.profile.views.people'", ")", "+", "'?'", ")", "+", "http", ".", "urlencode", "(", "get_args", ")", ")", "return", "HttpResponsePermanentRedirect", "(", "destination_url", ")" ]
property is the "tag name" .
train
false
5,619
def split_policy_string(policy_string): if ('-' in policy_string): (base, policy_index) = policy_string.rsplit('-', 1) else: (base, policy_index) = (policy_string, None) policy = POLICIES.get_by_index(policy_index) if (get_policy_string(base, policy) != policy_string): raise PolicyError('Unknown policy', index=policy_index) return (base, policy)
[ "def", "split_policy_string", "(", "policy_string", ")", ":", "if", "(", "'-'", "in", "policy_string", ")", ":", "(", "base", ",", "policy_index", ")", "=", "policy_string", ".", "rsplit", "(", "'-'", ",", "1", ")", "else", ":", "(", "base", ",", "policy_index", ")", "=", "(", "policy_string", ",", "None", ")", "policy", "=", "POLICIES", ".", "get_by_index", "(", "policy_index", ")", "if", "(", "get_policy_string", "(", "base", ",", "policy", ")", "!=", "policy_string", ")", ":", "raise", "PolicyError", "(", "'Unknown policy'", ",", "index", "=", "policy_index", ")", "return", "(", "base", ",", "policy", ")" ]
helper function to convert a string representing a base and a policy .
train
false
5,620
def biDiText(text): text = s3_unicode(text) if (biDiImported and current.deployment_settings.get_pdf_bidi()): isArabic = False isBidi = False for c in text: cat = unicodedata.bidirectional(c) if (cat in ('AL', 'AN')): isArabic = True isBidi = True break elif (cat in ('R', 'RLE', 'RLO')): isBidi = True if isArabic: text = arabic_reshaper.reshape(text) if isBidi: text = get_display(text) return text
[ "def", "biDiText", "(", "text", ")", ":", "text", "=", "s3_unicode", "(", "text", ")", "if", "(", "biDiImported", "and", "current", ".", "deployment_settings", ".", "get_pdf_bidi", "(", ")", ")", ":", "isArabic", "=", "False", "isBidi", "=", "False", "for", "c", "in", "text", ":", "cat", "=", "unicodedata", ".", "bidirectional", "(", "c", ")", "if", "(", "cat", "in", "(", "'AL'", ",", "'AN'", ")", ")", ":", "isArabic", "=", "True", "isBidi", "=", "True", "break", "elif", "(", "cat", "in", "(", "'R'", ",", "'RLE'", ",", "'RLO'", ")", ")", ":", "isBidi", "=", "True", "if", "isArabic", ":", "text", "=", "arabic_reshaper", ".", "reshape", "(", "text", ")", "if", "isBidi", ":", "text", "=", "get_display", "(", "text", ")", "return", "text" ]
ensure that rtl text is rendered rtl & also that arabic text is rewritten to use the joined format .
train
false
5,621
def resize_url(url, maxwidth): return '{0}?{1}'.format(PROXY_URL, urlencode({'url': url.replace('http://', ''), 'w': maxwidth}))
[ "def", "resize_url", "(", "url", ",", "maxwidth", ")", ":", "return", "'{0}?{1}'", ".", "format", "(", "PROXY_URL", ",", "urlencode", "(", "{", "'url'", ":", "url", ".", "replace", "(", "'http://'", ",", "''", ")", ",", "'w'", ":", "maxwidth", "}", ")", ")" ]
return a proxied image url that resizes the original image to maxwidth .
train
false
5,622
def sslwrap_simple(sock, keyfile=None, certfile=None): if hasattr(sock, '_sock'): sock = sock._sock ssl_sock = _ssl.sslwrap(sock, 0, keyfile, certfile, CERT_NONE, PROTOCOL_SSLv23, None) try: sock.getpeername() except: pass else: ssl_sock.do_handshake() return ssl_sock
[ "def", "sslwrap_simple", "(", "sock", ",", "keyfile", "=", "None", ",", "certfile", "=", "None", ")", ":", "if", "hasattr", "(", "sock", ",", "'_sock'", ")", ":", "sock", "=", "sock", ".", "_sock", "ssl_sock", "=", "_ssl", ".", "sslwrap", "(", "sock", ",", "0", ",", "keyfile", ",", "certfile", ",", "CERT_NONE", ",", "PROTOCOL_SSLv23", ",", "None", ")", "try", ":", "sock", ".", "getpeername", "(", ")", "except", ":", "pass", "else", ":", "ssl_sock", ".", "do_handshake", "(", ")", "return", "ssl_sock" ]
a replacement for the old socket .
train
false
5,623
def membership_required(function=None): def decorator(request, *args, **kwargs): group = get_object_or_404(Group, slug=kwargs['slug']) if request.user.is_anonymous(): return HttpResponseRedirect(reverse('django.contrib.auth.views.login')) if GroupMember.objects.is_member(group, request.user): return function(request, *args, **kwargs) else: return HttpResponseRedirect(reverse('groups:join', args=[group.slug])) return decorator
[ "def", "membership_required", "(", "function", "=", "None", ")", ":", "def", "decorator", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "slug", "=", "kwargs", "[", "'slug'", "]", ")", "if", "request", ".", "user", ".", "is_anonymous", "(", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'django.contrib.auth.views.login'", ")", ")", "if", "GroupMember", ".", "objects", ".", "is_member", "(", "group", ",", "request", ".", "user", ")", ":", "return", "function", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'groups:join'", ",", "args", "=", "[", "group", ".", "slug", "]", ")", ")", "return", "decorator" ]
decorator for views that require user to be a member of a group .
train
false
5,625
def file_iter(fname, sep=None): for line in open(fname): if (line and (line[0] != '#')): (yield line.split(sep))
[ "def", "file_iter", "(", "fname", ",", "sep", "=", "None", ")", ":", "for", "line", "in", "open", "(", "fname", ")", ":", "if", "(", "line", "and", "(", "line", "[", "0", "]", "!=", "'#'", ")", ")", ":", "(", "yield", "line", ".", "split", "(", "sep", ")", ")" ]
this generator iterates over a file and yields its lines splitted via the c{sep} parameter .
train
false
5,627
def get_upload_pipeline(in_fd, out_fd, rate_limit=None, gpg_key=None, lzop=True): commands = [] if (rate_limit is not None): commands.append(PipeViewerRateLimitFilter(rate_limit)) if lzop: commands.append(LZOCompressionFilter()) if (gpg_key is not None): commands.append(GPGEncryptionFilter(gpg_key)) return Pipeline(commands, in_fd, out_fd)
[ "def", "get_upload_pipeline", "(", "in_fd", ",", "out_fd", ",", "rate_limit", "=", "None", ",", "gpg_key", "=", "None", ",", "lzop", "=", "True", ")", ":", "commands", "=", "[", "]", "if", "(", "rate_limit", "is", "not", "None", ")", ":", "commands", ".", "append", "(", "PipeViewerRateLimitFilter", "(", "rate_limit", ")", ")", "if", "lzop", ":", "commands", ".", "append", "(", "LZOCompressionFilter", "(", ")", ")", "if", "(", "gpg_key", "is", "not", "None", ")", ":", "commands", ".", "append", "(", "GPGEncryptionFilter", "(", "gpg_key", ")", ")", "return", "Pipeline", "(", "commands", ",", "in_fd", ",", "out_fd", ")" ]
create a unix pipeline to process a file for uploading .
train
true
5,628
def _not_converged(y_truth, y_prediction, tol=0.001): return (np.abs((y_truth - y_prediction)).sum() > tol)
[ "def", "_not_converged", "(", "y_truth", ",", "y_prediction", ",", "tol", "=", "0.001", ")", ":", "return", "(", "np", ".", "abs", "(", "(", "y_truth", "-", "y_prediction", ")", ")", ".", "sum", "(", ")", ">", "tol", ")" ]
basic convergence check .
train
false
5,630
def build_traversal_spec(client_factory, name, spec_type, path, skip, select_set): traversal_spec = client_factory.create('ns0:TraversalSpec') traversal_spec.name = name traversal_spec.type = spec_type traversal_spec.path = path traversal_spec.skip = skip traversal_spec.selectSet = select_set return traversal_spec
[ "def", "build_traversal_spec", "(", "client_factory", ",", "name", ",", "spec_type", ",", "path", ",", "skip", ",", "select_set", ")", ":", "traversal_spec", "=", "client_factory", ".", "create", "(", "'ns0:TraversalSpec'", ")", "traversal_spec", ".", "name", "=", "name", "traversal_spec", ".", "type", "=", "spec_type", "traversal_spec", ".", "path", "=", "path", "traversal_spec", ".", "skip", "=", "skip", "traversal_spec", ".", "selectSet", "=", "select_set", "return", "traversal_spec" ]
builds the traversal spec object .
train
false
5,631
def escapeToXml(text, isattrib=0): text = text.replace('&', '&amp;') text = text.replace('<', '&lt;') text = text.replace('>', '&gt;') if (isattrib == 1): text = text.replace("'", '&apos;') text = text.replace('"', '&quot;') return text
[ "def", "escapeToXml", "(", "text", ",", "isattrib", "=", "0", ")", ":", "text", "=", "text", ".", "replace", "(", "'&'", ",", "'&amp;'", ")", "text", "=", "text", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", "text", "=", "text", ".", "replace", "(", "'>'", ",", "'&gt;'", ")", "if", "(", "isattrib", "==", "1", ")", ":", "text", "=", "text", ".", "replace", "(", "\"'\"", ",", "'&apos;'", ")", "text", "=", "text", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", "return", "text" ]
escape text to proper xml form .
train
false
5,632
def episode_num(season=None, episode=None, **kwargs): numbering = kwargs.pop(u'numbering', u'standard') if (numbering == u'standard'): if ((season is not None) and episode): return u'S{0:0>2}E{1:02}'.format(season, episode) elif (numbering == u'absolute'): if ((not (season and episode)) and (season or episode)): return u'{0:0>3}'.format((season or episode))
[ "def", "episode_num", "(", "season", "=", "None", ",", "episode", "=", "None", ",", "**", "kwargs", ")", ":", "numbering", "=", "kwargs", ".", "pop", "(", "u'numbering'", ",", "u'standard'", ")", "if", "(", "numbering", "==", "u'standard'", ")", ":", "if", "(", "(", "season", "is", "not", "None", ")", "and", "episode", ")", ":", "return", "u'S{0:0>2}E{1:02}'", ".", "format", "(", "season", ",", "episode", ")", "elif", "(", "numbering", "==", "u'absolute'", ")", ":", "if", "(", "(", "not", "(", "season", "and", "episode", ")", ")", "and", "(", "season", "or", "episode", ")", ")", ":", "return", "u'{0:0>3}'", ".", "format", "(", "(", "season", "or", "episode", ")", ")" ]
convert season and episode into string .
train
false
5,633
def get_unit_status(code): output = check_output(('heyu onstate ' + code), shell=True) return int(output.decode('utf-8')[0])
[ "def", "get_unit_status", "(", "code", ")", ":", "output", "=", "check_output", "(", "(", "'heyu onstate '", "+", "code", ")", ",", "shell", "=", "True", ")", "return", "int", "(", "output", ".", "decode", "(", "'utf-8'", ")", "[", "0", "]", ")" ]
get on/off status for given unit .
train
false
5,634
def _copy_gl_functions(source, dest, constants=False): if isinstance(source, BaseGLProxy): s = {} for key in dir(source): s[key] = getattr(source, key) source = s elif (not isinstance(source, dict)): source = source.__dict__ if (not isinstance(dest, dict)): dest = dest.__dict__ funcnames = [name for name in source.keys() if name.startswith('gl')] for name in funcnames: dest[name] = source[name] if constants: constnames = [name for name in source.keys() if name.startswith('GL_')] for name in constnames: dest[name] = source[name]
[ "def", "_copy_gl_functions", "(", "source", ",", "dest", ",", "constants", "=", "False", ")", ":", "if", "isinstance", "(", "source", ",", "BaseGLProxy", ")", ":", "s", "=", "{", "}", "for", "key", "in", "dir", "(", "source", ")", ":", "s", "[", "key", "]", "=", "getattr", "(", "source", ",", "key", ")", "source", "=", "s", "elif", "(", "not", "isinstance", "(", "source", ",", "dict", ")", ")", ":", "source", "=", "source", ".", "__dict__", "if", "(", "not", "isinstance", "(", "dest", ",", "dict", ")", ")", ":", "dest", "=", "dest", ".", "__dict__", "funcnames", "=", "[", "name", "for", "name", "in", "source", ".", "keys", "(", ")", "if", "name", ".", "startswith", "(", "'gl'", ")", "]", "for", "name", "in", "funcnames", ":", "dest", "[", "name", "]", "=", "source", "[", "name", "]", "if", "constants", ":", "constnames", "=", "[", "name", "for", "name", "in", "source", ".", "keys", "(", ")", "if", "name", ".", "startswith", "(", "'GL_'", ")", "]", "for", "name", "in", "constnames", ":", "dest", "[", "name", "]", "=", "source", "[", "name", "]" ]
inject all objects that start with gl from the source into the dest .
train
true
5,635
def _ratings_success_msg(app, old_status, old_modified): if old_modified: old_modified = datetime.strptime(old_modified, '%Y-%m-%dT%H:%M:%S') if (old_status != app.status): return _submission_msgs()['complete'] elif (old_modified != app.last_rated_time()): return _submission_msgs()['content_ratings_saved']
[ "def", "_ratings_success_msg", "(", "app", ",", "old_status", ",", "old_modified", ")", ":", "if", "old_modified", ":", "old_modified", "=", "datetime", ".", "strptime", "(", "old_modified", ",", "'%Y-%m-%dT%H:%M:%S'", ")", "if", "(", "old_status", "!=", "app", ".", "status", ")", ":", "return", "_submission_msgs", "(", ")", "[", "'complete'", "]", "elif", "(", "old_modified", "!=", "app", ".", "last_rated_time", "(", ")", ")", ":", "return", "_submission_msgs", "(", ")", "[", "'content_ratings_saved'", "]" ]
ratings can be created via iarc pinging our api .
train
false
5,637
def map(func): if (is_py2 and (text == func)): func = unicode def expand_kv(kv): return func(*kv) def map_values(value): cls = type(value) if isinstance(value, dict): return cls(_map(expand_kv, value.items())) else: return cls(_map(func, value)) return transform(map_values)
[ "def", "map", "(", "func", ")", ":", "if", "(", "is_py2", "and", "(", "text", "==", "func", ")", ")", ":", "func", "=", "unicode", "def", "expand_kv", "(", "kv", ")", ":", "return", "func", "(", "*", "kv", ")", "def", "map_values", "(", "value", ")", ":", "cls", "=", "type", "(", "value", ")", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "return", "cls", "(", "_map", "(", "expand_kv", ",", "value", ".", "items", "(", ")", ")", ")", "else", ":", "return", "cls", "(", "_map", "(", "func", ",", "value", ")", ")", "return", "transform", "(", "map_values", ")" ]
concurrently converts a list of requests to responses .
train
true
5,639
def data2proddummy(x): groups = np.unique(lmap(tuple, x.tolist())) return (x == groups[:, None, :]).all((-1)).T.astype(int)[:, :(-1)]
[ "def", "data2proddummy", "(", "x", ")", ":", "groups", "=", "np", ".", "unique", "(", "lmap", "(", "tuple", ",", "x", ".", "tolist", "(", ")", ")", ")", "return", "(", "x", "==", "groups", "[", ":", ",", "None", ",", ":", "]", ")", ".", "all", "(", "(", "-", "1", ")", ")", ".", "T", ".", "astype", "(", "int", ")", "[", ":", ",", ":", "(", "-", "1", ")", "]" ]
creates product dummy variables from 2 columns of 2d array drops last dummy variable .
train
false
5,641
def test_async_track_states(event_loop): hass = get_test_home_assistant() try: point1 = dt_util.utcnow() point2 = (point1 + timedelta(seconds=5)) point3 = (point2 + timedelta(seconds=5)) @asyncio.coroutine @patch('homeassistant.core.dt_util.utcnow') def run_test(mock_utcnow): 'Run the test.' mock_utcnow.return_value = point2 with state.AsyncTrackStates(hass) as states: mock_utcnow.return_value = point1 hass.states.set('light.test', 'on') mock_utcnow.return_value = point2 hass.states.set('light.test2', 'on') state2 = hass.states.get('light.test2') mock_utcnow.return_value = point3 hass.states.set('light.test3', 'on') state3 = hass.states.get('light.test3') assert ([state2, state3] == sorted(states, key=(lambda state: state.entity_id))) event_loop.run_until_complete(run_test()) finally: hass.stop()
[ "def", "test_async_track_states", "(", "event_loop", ")", ":", "hass", "=", "get_test_home_assistant", "(", ")", "try", ":", "point1", "=", "dt_util", ".", "utcnow", "(", ")", "point2", "=", "(", "point1", "+", "timedelta", "(", "seconds", "=", "5", ")", ")", "point3", "=", "(", "point2", "+", "timedelta", "(", "seconds", "=", "5", ")", ")", "@", "asyncio", ".", "coroutine", "@", "patch", "(", "'homeassistant.core.dt_util.utcnow'", ")", "def", "run_test", "(", "mock_utcnow", ")", ":", "mock_utcnow", ".", "return_value", "=", "point2", "with", "state", ".", "AsyncTrackStates", "(", "hass", ")", "as", "states", ":", "mock_utcnow", ".", "return_value", "=", "point1", "hass", ".", "states", ".", "set", "(", "'light.test'", ",", "'on'", ")", "mock_utcnow", ".", "return_value", "=", "point2", "hass", ".", "states", ".", "set", "(", "'light.test2'", ",", "'on'", ")", "state2", "=", "hass", ".", "states", ".", "get", "(", "'light.test2'", ")", "mock_utcnow", ".", "return_value", "=", "point3", "hass", ".", "states", ".", "set", "(", "'light.test3'", ",", "'on'", ")", "state3", "=", "hass", ".", "states", ".", "get", "(", "'light.test3'", ")", "assert", "(", "[", "state2", ",", "state3", "]", "==", "sorted", "(", "states", ",", "key", "=", "(", "lambda", "state", ":", "state", ".", "entity_id", ")", ")", ")", "event_loop", ".", "run_until_complete", "(", "run_test", "(", ")", ")", "finally", ":", "hass", ".", "stop", "(", ")" ]
test asynctrackstates context manager .
train
false
5,642
def dirichlet_logpdf_vec(x, alpha): shape = x.shape if (len(shape) == 1): try: return stats.dirichlet.logpdf(x, alpha) except: x[(-1)] = (1.0 - np.sum(x[:(-1)])) return stats.dirichlet.logpdf(x, alpha) elif (len(shape) == 2): size = shape[0] if (len(alpha.shape) == 1): return np.array([dirichlet_logpdf_vec(x[i, :], alpha) for i in range(size)]) else: return np.array([dirichlet_logpdf_vec(x[i, :], alpha[i, :]) for i in range(size)]) elif (len(shape) == 3): size = shape[0] return np.array([dirichlet_logpdf_vec(x[i, :, :], alpha) for i in range(size)]) else: raise NotImplementedError()
[ "def", "dirichlet_logpdf_vec", "(", "x", ",", "alpha", ")", ":", "shape", "=", "x", ".", "shape", "if", "(", "len", "(", "shape", ")", "==", "1", ")", ":", "try", ":", "return", "stats", ".", "dirichlet", ".", "logpdf", "(", "x", ",", "alpha", ")", "except", ":", "x", "[", "(", "-", "1", ")", "]", "=", "(", "1.0", "-", "np", ".", "sum", "(", "x", "[", ":", "(", "-", "1", ")", "]", ")", ")", "return", "stats", ".", "dirichlet", ".", "logpdf", "(", "x", ",", "alpha", ")", "elif", "(", "len", "(", "shape", ")", "==", "2", ")", ":", "size", "=", "shape", "[", "0", "]", "if", "(", "len", "(", "alpha", ".", "shape", ")", "==", "1", ")", ":", "return", "np", ".", "array", "(", "[", "dirichlet_logpdf_vec", "(", "x", "[", "i", ",", ":", "]", ",", "alpha", ")", "for", "i", "in", "range", "(", "size", ")", "]", ")", "else", ":", "return", "np", ".", "array", "(", "[", "dirichlet_logpdf_vec", "(", "x", "[", "i", ",", ":", "]", ",", "alpha", "[", "i", ",", ":", "]", ")", "for", "i", "in", "range", "(", "size", ")", "]", ")", "elif", "(", "len", "(", "shape", ")", "==", "3", ")", ":", "size", "=", "shape", "[", "0", "]", "return", "np", ".", "array", "(", "[", "dirichlet_logpdf_vec", "(", "x", "[", "i", ",", ":", ",", ":", "]", ",", "alpha", ")", "for", "i", "in", "range", "(", "size", ")", "]", ")", "else", ":", "raise", "NotImplementedError", "(", ")" ]
vectorized version of stats .
train
false
5,643
def _mark_asn1_named_ec_curve(backend, ec_cdata): backend._lib.EC_KEY_set_asn1_flag(ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE)
[ "def", "_mark_asn1_named_ec_curve", "(", "backend", ",", "ec_cdata", ")", ":", "backend", ".", "_lib", ".", "EC_KEY_set_asn1_flag", "(", "ec_cdata", ",", "backend", ".", "_lib", ".", "OPENSSL_EC_NAMED_CURVE", ")" ]
set the named curve flag on the ec_key .
train
false
5,644
@register.inclusion_tag('inclusion.html') def inclusion_unlimited_args(one, two='hi', *args): return {'result': ('inclusion_unlimited_args - Expected result: %s' % ', '.join((str(arg) for arg in ([one, two] + list(args)))))}
[ "@", "register", ".", "inclusion_tag", "(", "'inclusion.html'", ")", "def", "inclusion_unlimited_args", "(", "one", ",", "two", "=", "'hi'", ",", "*", "args", ")", ":", "return", "{", "'result'", ":", "(", "'inclusion_unlimited_args - Expected result: %s'", "%", "', '", ".", "join", "(", "(", "str", "(", "arg", ")", "for", "arg", "in", "(", "[", "one", ",", "two", "]", "+", "list", "(", "args", ")", ")", ")", ")", ")", "}" ]
expected inclusion_unlimited_args __doc__ .
train
false
5,645
def remove_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): (event_source_obj, ctx, funk) = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) funk.arn = lambda_arn if (not dry): rule_response = event_source_obj.remove(funk) return rule_response else: return event_source_obj
[ "def", "remove_event_source", "(", "event_source", ",", "lambda_arn", ",", "target_function", ",", "boto_session", ",", "dry", "=", "False", ")", ":", "(", "event_source_obj", ",", "ctx", ",", "funk", ")", "=", "get_event_source", "(", "event_source", ",", "lambda_arn", ",", "target_function", ",", "boto_session", ",", "dry", "=", "False", ")", "funk", ".", "arn", "=", "lambda_arn", "if", "(", "not", "dry", ")", ":", "rule_response", "=", "event_source_obj", ".", "remove", "(", "funk", ")", "return", "rule_response", "else", ":", "return", "event_source_obj" ]
given an event_source dictionary .
train
true
5,646
@cache_permission def can_suggest(user, translation): if (not translation.subproject.enable_suggestions): return False if has_group_perm(user, 'trans.add_suggestion', translation): return True return check_permission(user, translation.subproject.project, 'trans.add_suggestion')
[ "@", "cache_permission", "def", "can_suggest", "(", "user", ",", "translation", ")", ":", "if", "(", "not", "translation", ".", "subproject", ".", "enable_suggestions", ")", ":", "return", "False", "if", "has_group_perm", "(", "user", ",", "'trans.add_suggestion'", ",", "translation", ")", ":", "return", "True", "return", "check_permission", "(", "user", ",", "translation", ".", "subproject", ".", "project", ",", "'trans.add_suggestion'", ")" ]
checks whether user can add suggestions to given translation .
train
false
5,647
def label_ranking_average_precision_score(y_true, y_score): check_consistent_length(y_true, y_score) y_true = check_array(y_true, ensure_2d=False) y_score = check_array(y_score, ensure_2d=False) if (y_true.shape != y_score.shape): raise ValueError('y_true and y_score have different shape') y_type = type_of_target(y_true) if ((y_type != 'multilabel-indicator') and (not ((y_type == 'binary') and (y_true.ndim == 2)))): raise ValueError('{0} format is not supported'.format(y_type)) y_true = csr_matrix(y_true) y_score = (- y_score) (n_samples, n_labels) = y_true.shape out = 0.0 for (i, (start, stop)) in enumerate(zip(y_true.indptr, y_true.indptr[1:])): relevant = y_true.indices[start:stop] if ((relevant.size == 0) or (relevant.size == n_labels)): out += 1.0 continue scores_i = y_score[i] rank = rankdata(scores_i, 'max')[relevant] L = rankdata(scores_i[relevant], 'max') out += (L / rank).mean() return (out / n_samples)
[ "def", "label_ranking_average_precision_score", "(", "y_true", ",", "y_score", ")", ":", "check_consistent_length", "(", "y_true", ",", "y_score", ")", "y_true", "=", "check_array", "(", "y_true", ",", "ensure_2d", "=", "False", ")", "y_score", "=", "check_array", "(", "y_score", ",", "ensure_2d", "=", "False", ")", "if", "(", "y_true", ".", "shape", "!=", "y_score", ".", "shape", ")", ":", "raise", "ValueError", "(", "'y_true and y_score have different shape'", ")", "y_type", "=", "type_of_target", "(", "y_true", ")", "if", "(", "(", "y_type", "!=", "'multilabel-indicator'", ")", "and", "(", "not", "(", "(", "y_type", "==", "'binary'", ")", "and", "(", "y_true", ".", "ndim", "==", "2", ")", ")", ")", ")", ":", "raise", "ValueError", "(", "'{0} format is not supported'", ".", "format", "(", "y_type", ")", ")", "y_true", "=", "csr_matrix", "(", "y_true", ")", "y_score", "=", "(", "-", "y_score", ")", "(", "n_samples", ",", "n_labels", ")", "=", "y_true", ".", "shape", "out", "=", "0.0", "for", "(", "i", ",", "(", "start", ",", "stop", ")", ")", "in", "enumerate", "(", "zip", "(", "y_true", ".", "indptr", ",", "y_true", ".", "indptr", "[", "1", ":", "]", ")", ")", ":", "relevant", "=", "y_true", ".", "indices", "[", "start", ":", "stop", "]", "if", "(", "(", "relevant", ".", "size", "==", "0", ")", "or", "(", "relevant", ".", "size", "==", "n_labels", ")", ")", ":", "out", "+=", "1.0", "continue", "scores_i", "=", "y_score", "[", "i", "]", "rank", "=", "rankdata", "(", "scores_i", ",", "'max'", ")", "[", "relevant", "]", "L", "=", "rankdata", "(", "scores_i", "[", "relevant", "]", ",", "'max'", ")", "out", "+=", "(", "L", "/", "rank", ")", ".", "mean", "(", ")", "return", "(", "out", "/", "n_samples", ")" ]
compute ranking-based average precision label ranking average precision is the average over each ground truth label assigned to each sample .
train
false
5,648
def StandardizedMoment(xs, k): var = CentralMoment(xs, 2) std = math.sqrt(var) return (CentralMoment(xs, k) / (std ** k))
[ "def", "StandardizedMoment", "(", "xs", ",", "k", ")", ":", "var", "=", "CentralMoment", "(", "xs", ",", "2", ")", "std", "=", "math", ".", "sqrt", "(", "var", ")", "return", "(", "CentralMoment", "(", "xs", ",", "k", ")", "/", "(", "std", "**", "k", ")", ")" ]
computes the kth standardized moment of xs .
train
false
5,649
def rastrigin_scaled(individual): N = len(individual) return (((10 * N) + sum((((((10 ** (i / (N - 1))) * x) ** 2) - (10 * cos((((2 * pi) * (10 ** (i / (N - 1)))) * x)))) for (i, x) in enumerate(individual)))),)
[ "def", "rastrigin_scaled", "(", "individual", ")", ":", "N", "=", "len", "(", "individual", ")", "return", "(", "(", "(", "10", "*", "N", ")", "+", "sum", "(", "(", "(", "(", "(", "(", "10", "**", "(", "i", "/", "(", "N", "-", "1", ")", ")", ")", "*", "x", ")", "**", "2", ")", "-", "(", "10", "*", "cos", "(", "(", "(", "(", "2", "*", "pi", ")", "*", "(", "10", "**", "(", "i", "/", "(", "N", "-", "1", ")", ")", ")", ")", "*", "x", ")", ")", ")", ")", "for", "(", "i", ",", "x", ")", "in", "enumerate", "(", "individual", ")", ")", ")", ")", ",", ")" ]
scaled rastrigin test objective function .
train
false
5,650
def record_action(action, request, data=None): if (data is None): data = {} data['user-agent'] = request.META.get('HTTP_USER_AGENT') data['locale'] = request.LANG data['src'] = request.GET.get('src', '') record_stat(action, request, **data)
[ "def", "record_action", "(", "action", ",", "request", ",", "data", "=", "None", ")", ":", "if", "(", "data", "is", "None", ")", ":", "data", "=", "{", "}", "data", "[", "'user-agent'", "]", "=", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ")", "data", "[", "'locale'", "]", "=", "request", ".", "LANG", "data", "[", "'src'", "]", "=", "request", ".", "GET", ".", "get", "(", "'src'", ",", "''", ")", "record_stat", "(", "action", ",", "request", ",", "**", "data", ")" ]
records the given action by sending it to the metrics servers .
train
false
5,652
def print_rcode(expr, **settings): print(rcode(expr, **settings))
[ "def", "print_rcode", "(", "expr", ",", "**", "settings", ")", ":", "print", "(", "rcode", "(", "expr", ",", "**", "settings", ")", ")" ]
prints r representation of the given expression .
train
false
5,653
def keyed_md5(secret, challenge): warnings.warn('keyed_md5() is deprecated. Use the stdlib module hmac instead.', DeprecationWarning, stacklevel=2) return hmac.HMAC(secret, challenge).hexdigest()
[ "def", "keyed_md5", "(", "secret", ",", "challenge", ")", ":", "warnings", ".", "warn", "(", "'keyed_md5() is deprecated. Use the stdlib module hmac instead.'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "return", "hmac", ".", "HMAC", "(", "secret", ",", "challenge", ")", ".", "hexdigest", "(", ")" ]
create the keyed md5 string for the given secret and challenge .
train
false
5,654
def _pr_compile(regex, cleanup=None): return (_re_compile(regex), cleanup)
[ "def", "_pr_compile", "(", "regex", ",", "cleanup", "=", "None", ")", ":", "return", "(", "_re_compile", "(", "regex", ")", ",", "cleanup", ")" ]
prepare a 2-tuple of compiled regex and callable .
train
false
5,655
def _calc_array_sizeof(ndim): ctx = cpu_target.target_context return ctx.calc_array_sizeof(ndim)
[ "def", "_calc_array_sizeof", "(", "ndim", ")", ":", "ctx", "=", "cpu_target", ".", "target_context", "return", "ctx", ".", "calc_array_sizeof", "(", "ndim", ")" ]
use the abi size in the cpu target .
train
false
5,658
def http_date(timestamp=None): return _dump_date(timestamp, ' ')
[ "def", "http_date", "(", "timestamp", "=", "None", ")", ":", "return", "_dump_date", "(", "timestamp", ",", "' '", ")" ]
return the current date and time formatted for a message header .
train
false
5,660
@task def destroy_instance(nodetype, instance_id): env.nodetype = nodetype instance = ec2_utils.GetInstance(env.region, instance_id) assert instance, ('Instance %s not found' % instance_id) with settings(host_string=instance.public_dns_name): if (instance.state == 'running'): check_min_healthy_instances(3) drain() stop() fprint(('Terminating instance %s' % instance_id)) ec2_utils.TerminateInstance(env.region, instance_id)
[ "@", "task", "def", "destroy_instance", "(", "nodetype", ",", "instance_id", ")", ":", "env", ".", "nodetype", "=", "nodetype", "instance", "=", "ec2_utils", ".", "GetInstance", "(", "env", ".", "region", ",", "instance_id", ")", "assert", "instance", ",", "(", "'Instance %s not found'", "%", "instance_id", ")", "with", "settings", "(", "host_string", "=", "instance", ".", "public_dns_name", ")", ":", "if", "(", "instance", ".", "state", "==", "'running'", ")", ":", "check_min_healthy_instances", "(", "3", ")", "drain", "(", ")", "stop", "(", ")", "fprint", "(", "(", "'Terminating instance %s'", "%", "instance_id", ")", ")", "ec2_utils", ".", "TerminateInstance", "(", "env", ".", "region", ",", "instance_id", ")" ]
stop and terminate an instance .
train
false