id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
40,536
def parallel_beta_diversity_process_run_results_f(f): for line in f: fields = line.strip().split(' DCTB ') dm_components = fields[:(-1)] output_fp = fields[(-1)] dm = assemble_distance_matrix(map(open, dm_components)) output_f = open(output_fp, 'w') output_f.write(dm) output_f.close() return True
[ "def", "parallel_beta_diversity_process_run_results_f", "(", "f", ")", ":", "for", "line", "in", "f", ":", "fields", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "' DCTB '", ")", "dm_components", "=", "fields", "[", ":", "(", "-", "1", ")", "]", "output_fp", "=", "fields", "[", "(", "-", "1", ")", "]", "dm", "=", "assemble_distance_matrix", "(", "map", "(", "open", ",", "dm_components", ")", ")", "output_f", "=", "open", "(", "output_fp", ",", "'w'", ")", "output_f", ".", "write", "(", "dm", ")", "output_f", ".", "close", "(", ")", "return", "True" ]
handles re-assembling of a distance matrix from component vectors .
train
false
40,537
def get_chost(): return get_var('CHOST')
[ "def", "get_chost", "(", ")", ":", "return", "get_var", "(", "'CHOST'", ")" ]
get the value of chost variable in the make .
train
false
40,538
def member(): return s3_rest_controller()
[ "def", "member", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
40,539
def objectify_predicate(selector_func): return type(selector_func.__name__, (Predicate,), {'__doc__': selector_func.__doc__, '__call__': (lambda self, *a, **kw: selector_func(*a, **kw))})
[ "def", "objectify_predicate", "(", "selector_func", ")", ":", "return", "type", "(", "selector_func", ".", "__name__", ",", "(", "Predicate", ",", ")", ",", "{", "'__doc__'", ":", "selector_func", ".", "__doc__", ",", "'__call__'", ":", "(", "lambda", "self", ",", "*", "a", ",", "**", "kw", ":", "selector_func", "(", "*", "a", ",", "**", "kw", ")", ")", "}", ")" ]
most of the time .
train
false
40,540
def ftp_put_command(connection, local_path, remote_dir): try: ftp = ftplib.FTP(host=connection.host, user=connection.username, passwd=connection.password) ftp.cwd(remote_dir) name = os.path.split(local_path)[1] f = open(local_path, 'rb') ftp.storbinary(('STOR ' + name), f) f.close() ftp.close() except Exception: LOG.error(_('File transfer to PowerVM manager failed')) raise exception.PowerVMFTPTransferFailed(ftp_cmd='PUT', source_path=local_path, dest_path=remote_dir)
[ "def", "ftp_put_command", "(", "connection", ",", "local_path", ",", "remote_dir", ")", ":", "try", ":", "ftp", "=", "ftplib", ".", "FTP", "(", "host", "=", "connection", ".", "host", ",", "user", "=", "connection", ".", "username", ",", "passwd", "=", "connection", ".", "password", ")", "ftp", ".", "cwd", "(", "remote_dir", ")", "name", "=", "os", ".", "path", ".", "split", "(", "local_path", ")", "[", "1", "]", "f", "=", "open", "(", "local_path", ",", "'rb'", ")", "ftp", ".", "storbinary", "(", "(", "'STOR '", "+", "name", ")", ",", "f", ")", "f", ".", "close", "(", ")", "ftp", ".", "close", "(", ")", "except", "Exception", ":", "LOG", ".", "error", "(", "_", "(", "'File transfer to PowerVM manager failed'", ")", ")", "raise", "exception", ".", "PowerVMFTPTransferFailed", "(", "ftp_cmd", "=", "'PUT'", ",", "source_path", "=", "local_path", ",", "dest_path", "=", "remote_dir", ")" ]
method to transfer a file via ftp .
train
false
40,541
def _make_stim_channel(trigger_chs, slope, threshold, stim_code, trigger_values): if (slope == '+'): trig_chs_bin = (trigger_chs > threshold) elif (slope == '-'): trig_chs_bin = (trigger_chs < threshold) else: raise ValueError("slope needs to be '+' or '-'") if (stim_code == 'binary'): trigger_values = (2 ** np.arange(len(trigger_chs))) elif (stim_code != 'channel'): raise ValueError(("stim_code must be 'binary' or 'channel', got %s" % repr(stim_code))) trig_chs = (trig_chs_bin * trigger_values[:, np.newaxis]) return np.array(trig_chs.sum(axis=0), ndmin=2)
[ "def", "_make_stim_channel", "(", "trigger_chs", ",", "slope", ",", "threshold", ",", "stim_code", ",", "trigger_values", ")", ":", "if", "(", "slope", "==", "'+'", ")", ":", "trig_chs_bin", "=", "(", "trigger_chs", ">", "threshold", ")", "elif", "(", "slope", "==", "'-'", ")", ":", "trig_chs_bin", "=", "(", "trigger_chs", "<", "threshold", ")", "else", ":", "raise", "ValueError", "(", "\"slope needs to be '+' or '-'\"", ")", "if", "(", "stim_code", "==", "'binary'", ")", ":", "trigger_values", "=", "(", "2", "**", "np", ".", "arange", "(", "len", "(", "trigger_chs", ")", ")", ")", "elif", "(", "stim_code", "!=", "'channel'", ")", ":", "raise", "ValueError", "(", "(", "\"stim_code must be 'binary' or 'channel', got %s\"", "%", "repr", "(", "stim_code", ")", ")", ")", "trig_chs", "=", "(", "trig_chs_bin", "*", "trigger_values", "[", ":", ",", "np", ".", "newaxis", "]", ")", "return", "np", ".", "array", "(", "trig_chs", ".", "sum", "(", "axis", "=", "0", ")", ",", "ndmin", "=", "2", ")" ]
create synthetic stim channel from multiple trigger channels .
train
false
40,542
@_np.deprecate(message='scipy.constants.F2K is deprecated in scipy 0.18.0. Use scipy.constants.convert_temperature instead. Note that the new function has a different signature.') def F2K(F): return C2K(F2C(_np.asanyarray(F)))
[ "@", "_np", ".", "deprecate", "(", "message", "=", "'scipy.constants.F2K is deprecated in scipy 0.18.0. Use scipy.constants.convert_temperature instead. Note that the new function has a different signature.'", ")", "def", "F2K", "(", "F", ")", ":", "return", "C2K", "(", "F2C", "(", "_np", ".", "asanyarray", "(", "F", ")", ")", ")" ]
convert fahrenheit to kelvin parameters f : array_like fahrenheit temperature(s) to be converted .
train
false
40,543
def get_repository(client, repository=''): try: return client.snapshot.get_repository(repository=repository) except (elasticsearch.TransportError, elasticsearch.NotFoundError): logger.error('Repository {0} not found.'.format(repository)) return False
[ "def", "get_repository", "(", "client", ",", "repository", "=", "''", ")", ":", "try", ":", "return", "client", ".", "snapshot", ".", "get_repository", "(", "repository", "=", "repository", ")", "except", "(", "elasticsearch", ".", "TransportError", ",", "elasticsearch", ".", "NotFoundError", ")", ":", "logger", ".", "error", "(", "'Repository {0} not found.'", ".", "format", "(", "repository", ")", ")", "return", "False" ]
return configuration information for the indicated repository .
train
false
40,544
def test_validate_faulty_wcs(): h = fits.Header() h[u'RADESYSA'] = u'ICRS' h[u'PV2_1'] = 1.0 hdu = fits.PrimaryHDU([[0]], header=h) hdulist = fits.HDUList([hdu]) wcs.validate(hdulist)
[ "def", "test_validate_faulty_wcs", "(", ")", ":", "h", "=", "fits", ".", "Header", "(", ")", "h", "[", "u'RADESYSA'", "]", "=", "u'ICRS'", "h", "[", "u'PV2_1'", "]", "=", "1.0", "hdu", "=", "fits", ".", "PrimaryHDU", "(", "[", "[", "0", "]", "]", ",", "header", "=", "h", ")", "hdulist", "=", "fits", ".", "HDUList", "(", "[", "hdu", "]", ")", "wcs", ".", "validate", "(", "hdulist", ")" ]
from github issue #2053 .
train
false
40,545
def register_as(name, formatter_class): if (not isinstance(name, six.string_types)): warnings.warn(('Use parameter ordering: name, formatter_class (for: %s)' % formatter_class)) _formatter_class = name name = formatter_class formatter_class = _formatter_class if isinstance(formatter_class, six.string_types): scoped_formatter_class_name = formatter_class formatter_class = LazyObject(scoped_formatter_class_name) assert (isinstance(formatter_class, LazyObject) or issubclass(formatter_class, Formatter)) _formatter_registry[name] = formatter_class
[ "def", "register_as", "(", "name", ",", "formatter_class", ")", ":", "if", "(", "not", "isinstance", "(", "name", ",", "six", ".", "string_types", ")", ")", ":", "warnings", ".", "warn", "(", "(", "'Use parameter ordering: name, formatter_class (for: %s)'", "%", "formatter_class", ")", ")", "_formatter_class", "=", "name", "name", "=", "formatter_class", "formatter_class", "=", "_formatter_class", "if", "isinstance", "(", "formatter_class", ",", "six", ".", "string_types", ")", ":", "scoped_formatter_class_name", "=", "formatter_class", "formatter_class", "=", "LazyObject", "(", "scoped_formatter_class_name", ")", "assert", "(", "isinstance", "(", "formatter_class", ",", "LazyObject", ")", "or", "issubclass", "(", "formatter_class", ",", "Formatter", ")", ")", "_formatter_registry", "[", "name", "]", "=", "formatter_class" ]
register formatter class with given name .
train
false
40,546
def bytes2str(bs): if (isinstance(bs, type('')) and (PY_MAJOR_VERSION > 2)): return bs.decode('latin1') else: return bs
[ "def", "bytes2str", "(", "bs", ")", ":", "if", "(", "isinstance", "(", "bs", ",", "type", "(", "''", ")", ")", "and", "(", "PY_MAJOR_VERSION", ">", "2", ")", ")", ":", "return", "bs", ".", "decode", "(", "'latin1'", ")", "else", ":", "return", "bs" ]
for cross compatibility between python 2 and python 3 strings .
train
true
40,547
def review_request_published_cb(sender, user, review_request, trivial, changedesc, **kwargs): siteconfig = SiteConfiguration.objects.get_current() if (siteconfig.get(u'mail_send_review_mail') and (not trivial)): mail_review_request(review_request, user, changedesc)
[ "def", "review_request_published_cb", "(", "sender", ",", "user", ",", "review_request", ",", "trivial", ",", "changedesc", ",", "**", "kwargs", ")", ":", "siteconfig", "=", "SiteConfiguration", ".", "objects", ".", "get_current", "(", ")", "if", "(", "siteconfig", ".", "get", "(", "u'mail_send_review_mail'", ")", "and", "(", "not", "trivial", ")", ")", ":", "mail_review_request", "(", "review_request", ",", "user", ",", "changedesc", ")" ]
send e-mail when a review request is published .
train
false
40,548
def drain(file_like, read_size, timeout): while True: with ChunkReadTimeout(timeout): chunk = file_like.read(read_size) if (not chunk): break
[ "def", "drain", "(", "file_like", ",", "read_size", ",", "timeout", ")", ":", "while", "True", ":", "with", "ChunkReadTimeout", "(", "timeout", ")", ":", "chunk", "=", "file_like", ".", "read", "(", "read_size", ")", "if", "(", "not", "chunk", ")", ":", "break" ]
read and discard any bytes from file_like .
train
false
40,549
def CountWordErrors(ocr_text, truth_text): return CountErrors(ocr_text.split(), truth_text.split())
[ "def", "CountWordErrors", "(", "ocr_text", ",", "truth_text", ")", ":", "return", "CountErrors", "(", "ocr_text", ".", "split", "(", ")", ",", "truth_text", ".", "split", "(", ")", ")" ]
counts the word drop and add errors as a bag of words .
train
false
40,550
def key2param(key): result = [] key = list(key) if (not key[0].isalpha()): result.append('x') for c in key: if c.isalnum(): result.append(c) else: result.append('_') return ''.join(result)
[ "def", "key2param", "(", "key", ")", ":", "result", "=", "[", "]", "key", "=", "list", "(", "key", ")", "if", "(", "not", "key", "[", "0", "]", ".", "isalpha", "(", ")", ")", ":", "result", ".", "append", "(", "'x'", ")", "for", "c", "in", "key", ":", "if", "c", ".", "isalnum", "(", ")", ":", "result", ".", "append", "(", "c", ")", "else", ":", "result", ".", "append", "(", "'_'", ")", "return", "''", ".", "join", "(", "result", ")" ]
converts key names into parameter names .
train
false
40,551
def hexii(s, width=16, skip=True): return hexdump(s, width, skip, True)
[ "def", "hexii", "(", "s", ",", "width", "=", "16", ",", "skip", "=", "True", ")", ":", "return", "hexdump", "(", "s", ",", "width", ",", "skip", ",", "True", ")" ]
hexii -> str return a hexii-dump of a string .
train
false
40,552
def long_path(path): if (sabnzbd.WIN32 and path and (not path.startswith(u'\\\\?\\'))): if path.startswith('\\\\'): path = path.replace(u'\\\\', u'\\\\?\\UNC\\', 1) else: path = (u'\\\\?\\' + path) return path
[ "def", "long_path", "(", "path", ")", ":", "if", "(", "sabnzbd", ".", "WIN32", "and", "path", "and", "(", "not", "path", ".", "startswith", "(", "u'\\\\\\\\?\\\\'", ")", ")", ")", ":", "if", "path", ".", "startswith", "(", "'\\\\\\\\'", ")", ":", "path", "=", "path", ".", "replace", "(", "u'\\\\\\\\'", ",", "u'\\\\\\\\?\\\\UNC\\\\'", ",", "1", ")", "else", ":", "path", "=", "(", "u'\\\\\\\\?\\\\'", "+", "path", ")", "return", "path" ]
for windows .
train
false
40,556
def process_chain_both(callbacks, errbacks, input, *a, **kw): d = defer.Deferred() for (cb, eb) in zip(callbacks, errbacks): d.addCallbacks(cb, eb, callbackArgs=a, callbackKeywords=kw, errbackArgs=a, errbackKeywords=kw) if isinstance(input, failure.Failure): d.errback(input) else: d.callback(input) return d
[ "def", "process_chain_both", "(", "callbacks", ",", "errbacks", ",", "input", ",", "*", "a", ",", "**", "kw", ")", ":", "d", "=", "defer", ".", "Deferred", "(", ")", "for", "(", "cb", ",", "eb", ")", "in", "zip", "(", "callbacks", ",", "errbacks", ")", ":", "d", ".", "addCallbacks", "(", "cb", ",", "eb", ",", "callbackArgs", "=", "a", ",", "callbackKeywords", "=", "kw", ",", "errbackArgs", "=", "a", ",", "errbackKeywords", "=", "kw", ")", "if", "isinstance", "(", "input", ",", "failure", ".", "Failure", ")", ":", "d", ".", "errback", "(", "input", ")", "else", ":", "d", ".", "callback", "(", "input", ")", "return", "d" ]
return a deferred built by chaining the given callbacks and errbacks .
train
false
40,557
def md(): return Mde2mdConverter()
[ "def", "md", "(", ")", ":", "return", "Mde2mdConverter", "(", ")" ]
this makes a converter from markdown-extra to markdown .
train
false
40,558
def getCurrentThreadData(): global ThreadData return ThreadData
[ "def", "getCurrentThreadData", "(", ")", ":", "global", "ThreadData", "return", "ThreadData" ]
returns current threads local data .
train
false
40,559
def get_with_url(url): try: params = {'url': url, 'client_id': api_key} request = requests.get(API_BASE.format('resolve'), params=params) request.raise_for_status() except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e: raise APIError('{}'.format(e)) json = request.json() if (not json): return None else: return json
[ "def", "get_with_url", "(", "url", ")", ":", "try", ":", "params", "=", "{", "'url'", ":", "url", ",", "'client_id'", ":", "api_key", "}", "request", "=", "requests", ".", "get", "(", "API_BASE", ".", "format", "(", "'resolve'", ")", ",", "params", "=", "params", ")", "request", ".", "raise_for_status", "(", ")", "except", "(", "requests", ".", "exceptions", ".", "HTTPError", ",", "requests", ".", "exceptions", ".", "ConnectionError", ")", "as", "e", ":", "raise", "APIError", "(", "'{}'", ".", "format", "(", "e", ")", ")", "json", "=", "request", ".", "json", "(", ")", "if", "(", "not", "json", ")", ":", "return", "None", "else", ":", "return", "json" ]
takes a soundcloud url and returns an item .
train
false
40,560
def names2dnsrepr(x): if (type(x) is str): if (x and (x[(-1)] == '\x00')): return x x = [x] res = [] for n in x: termin = '\x00' if (n.count('.') == 0): termin += '\x00' n = (''.join(map((lambda y: (chr(len(y)) + y)), n.split('.'))) + termin) res.append(n) return ''.join(res)
[ "def", "names2dnsrepr", "(", "x", ")", ":", "if", "(", "type", "(", "x", ")", "is", "str", ")", ":", "if", "(", "x", "and", "(", "x", "[", "(", "-", "1", ")", "]", "==", "'\\x00'", ")", ")", ":", "return", "x", "x", "=", "[", "x", "]", "res", "=", "[", "]", "for", "n", "in", "x", ":", "termin", "=", "'\\x00'", "if", "(", "n", ".", "count", "(", "'.'", ")", "==", "0", ")", ":", "termin", "+=", "'\\x00'", "n", "=", "(", "''", ".", "join", "(", "map", "(", "(", "lambda", "y", ":", "(", "chr", "(", "len", "(", "y", ")", ")", "+", "y", ")", ")", ",", "n", ".", "split", "(", "'.'", ")", ")", ")", "+", "termin", ")", "res", ".", "append", "(", "n", ")", "return", "''", ".", "join", "(", "res", ")" ]
take as input a list of dns names or a single dns name and encode it in dns format if a string that is already a dns name in dns format is passed .
train
false
40,563
def print_data_keys(data, item=None): path = (displayable_path(item.path) if item else None) formatted = [] for (key, value) in data.items(): formatted.append(key) if (len(formatted) == 0): return line_format = u'{0}{{0}}'.format((u' ' * 4)) if path: ui.print_(displayable_path(path)) for field in sorted(formatted): ui.print_(line_format.format(field))
[ "def", "print_data_keys", "(", "data", ",", "item", "=", "None", ")", ":", "path", "=", "(", "displayable_path", "(", "item", ".", "path", ")", "if", "item", "else", "None", ")", "formatted", "=", "[", "]", "for", "(", "key", ",", "value", ")", "in", "data", ".", "items", "(", ")", ":", "formatted", ".", "append", "(", "key", ")", "if", "(", "len", "(", "formatted", ")", "==", "0", ")", ":", "return", "line_format", "=", "u'{0}{{0}}'", ".", "format", "(", "(", "u' '", "*", "4", ")", ")", "if", "path", ":", "ui", ".", "print_", "(", "displayable_path", "(", "path", ")", ")", "for", "field", "in", "sorted", "(", "formatted", ")", ":", "ui", ".", "print_", "(", "line_format", ".", "format", "(", "field", ")", ")" ]
print only the keys for an item .
train
false
40,566
def from_url(url, db=None, **kwargs): from redis.client import Redis return Redis.from_url(url, db, **kwargs)
[ "def", "from_url", "(", "url", ",", "db", "=", "None", ",", "**", "kwargs", ")", ":", "from", "redis", ".", "client", "import", "Redis", "return", "Redis", ".", "from_url", "(", "url", ",", "db", ",", "**", "kwargs", ")" ]
returns an active redis client generated from the given database url .
train
true
40,567
@docstring.dedent_interpd def angle_spectrum(x, Fs=None, window=None, pad_to=None, sides=None): return _single_spectrum_helper(x=x, Fs=Fs, window=window, pad_to=pad_to, sides=sides, mode=u'angle')
[ "@", "docstring", ".", "dedent_interpd", "def", "angle_spectrum", "(", "x", ",", "Fs", "=", "None", ",", "window", "=", "None", ",", "pad_to", "=", "None", ",", "sides", "=", "None", ")", ":", "return", "_single_spectrum_helper", "(", "x", "=", "x", ",", "Fs", "=", "Fs", ",", "window", "=", "window", ",", "pad_to", "=", "pad_to", ",", "sides", "=", "sides", ",", "mode", "=", "u'angle'", ")" ]
compute the angle of the frequency spectrum of *x* .
train
false
40,568
def partition_list(): return ceph_cfg.partition_list()
[ "def", "partition_list", "(", ")", ":", "return", "ceph_cfg", ".", "partition_list", "(", ")" ]
list partitions by disk cli example: .
train
false
40,570
def _add_attribute(cls, key, value): if ('__mapper__' in cls.__dict__): if isinstance(value, Column): _undefer_column_name(key, value) cls.__table__.append_column(value) cls.__mapper__.add_property(key, value) elif isinstance(value, ColumnProperty): for col in value.columns: if (isinstance(col, Column) and (col.table is None)): _undefer_column_name(key, col) cls.__table__.append_column(col) cls.__mapper__.add_property(key, value) elif isinstance(value, MapperProperty): cls.__mapper__.add_property(key, clsregistry._deferred_relationship(cls, value)) elif (isinstance(value, QueryableAttribute) and (value.key != key)): value = synonym(value.key) cls.__mapper__.add_property(key, clsregistry._deferred_relationship(cls, value)) else: type.__setattr__(cls, key, value) else: type.__setattr__(cls, key, value)
[ "def", "_add_attribute", "(", "cls", ",", "key", ",", "value", ")", ":", "if", "(", "'__mapper__'", "in", "cls", ".", "__dict__", ")", ":", "if", "isinstance", "(", "value", ",", "Column", ")", ":", "_undefer_column_name", "(", "key", ",", "value", ")", "cls", ".", "__table__", ".", "append_column", "(", "value", ")", "cls", ".", "__mapper__", ".", "add_property", "(", "key", ",", "value", ")", "elif", "isinstance", "(", "value", ",", "ColumnProperty", ")", ":", "for", "col", "in", "value", ".", "columns", ":", "if", "(", "isinstance", "(", "col", ",", "Column", ")", "and", "(", "col", ".", "table", "is", "None", ")", ")", ":", "_undefer_column_name", "(", "key", ",", "col", ")", "cls", ".", "__table__", ".", "append_column", "(", "col", ")", "cls", ".", "__mapper__", ".", "add_property", "(", "key", ",", "value", ")", "elif", "isinstance", "(", "value", ",", "MapperProperty", ")", ":", "cls", ".", "__mapper__", ".", "add_property", "(", "key", ",", "clsregistry", ".", "_deferred_relationship", "(", "cls", ",", "value", ")", ")", "elif", "(", "isinstance", "(", "value", ",", "QueryableAttribute", ")", "and", "(", "value", ".", "key", "!=", "key", ")", ")", ":", "value", "=", "synonym", "(", "value", ".", "key", ")", "cls", ".", "__mapper__", ".", "add_property", "(", "key", ",", "clsregistry", ".", "_deferred_relationship", "(", "cls", ",", "value", ")", ")", "else", ":", "type", ".", "__setattr__", "(", "cls", ",", "key", ",", "value", ")", "else", ":", "type", ".", "__setattr__", "(", "cls", ",", "key", ",", "value", ")" ]
add an attribute to an existing declarative class .
train
false
40,572
def describe_file_handle(fthing): if is_block(fthing): return u'block device' else: return fthing.name
[ "def", "describe_file_handle", "(", "fthing", ")", ":", "if", "is_block", "(", "fthing", ")", ":", "return", "u'block device'", "else", ":", "return", "fthing", ".", "name" ]
return the name of file or a description .
train
false
40,573
def is_neutron(): return CONF.use_neutron
[ "def", "is_neutron", "(", ")", ":", "return", "CONF", ".", "use_neutron" ]
does this configuration mean were neutron .
train
false
40,574
def is_naive(value): return (value.utcoffset() is None)
[ "def", "is_naive", "(", "value", ")", ":", "return", "(", "value", ".", "utcoffset", "(", ")", "is", "None", ")" ]
determines if a given datetime .
train
false
40,575
def _validate_numa_nodes(nodes): if ((nodes is not None) and ((not strutils.is_int_like(nodes)) or (int(nodes) < 1))): raise exception.InvalidNUMANodesNumber(nodes=nodes)
[ "def", "_validate_numa_nodes", "(", "nodes", ")", ":", "if", "(", "(", "nodes", "is", "not", "None", ")", "and", "(", "(", "not", "strutils", ".", "is_int_like", "(", "nodes", ")", ")", "or", "(", "int", "(", "nodes", ")", "<", "1", ")", ")", ")", ":", "raise", "exception", ".", "InvalidNUMANodesNumber", "(", "nodes", "=", "nodes", ")" ]
validate numa nodes number .
train
false
40,577
def on_parent_exit(signame): def noop(): pass try: libc = cdll['libc.so.6'] except OSError: return noop try: prctl = libc.prctl except AttributeError: return noop signum = getattr(signal, signame) def set_parent_exit_signal(): result = prctl(PR_SET_PDEATHSIG, signum) if (result != 0): raise Exception(('prctl failed with error code %s' % result)) return set_parent_exit_signal
[ "def", "on_parent_exit", "(", "signame", ")", ":", "def", "noop", "(", ")", ":", "pass", "try", ":", "libc", "=", "cdll", "[", "'libc.so.6'", "]", "except", "OSError", ":", "return", "noop", "try", ":", "prctl", "=", "libc", ".", "prctl", "except", "AttributeError", ":", "return", "noop", "signum", "=", "getattr", "(", "signal", ",", "signame", ")", "def", "set_parent_exit_signal", "(", ")", ":", "result", "=", "prctl", "(", "PR_SET_PDEATHSIG", ",", "signum", ")", "if", "(", "result", "!=", "0", ")", ":", "raise", "Exception", "(", "(", "'prctl failed with error code %s'", "%", "result", ")", ")", "return", "set_parent_exit_signal" ]
return a function to be run in a child process which will trigger signame to be sent when the parent process dies .
train
false
40,578
def fixSetGroupID(childPath): if ((os.name == u'nt') or (os.name == u'ce')): return parentPath = os.path.dirname(childPath) parentStat = os.stat(parentPath) parentMode = stat.S_IMODE(parentStat[stat.ST_MODE]) childPath = os.path.join(parentPath, os.path.basename(childPath)) if (parentMode & stat.S_ISGID): parentGID = parentStat[stat.ST_GID] childStat = os.stat(childPath) childGID = childStat[stat.ST_GID] if (childGID == parentGID): return childPath_owner = childStat.st_uid user_id = os.geteuid() if ((user_id != 0) and (user_id != childPath_owner)): sickrage.srCore.srLogger.debug(((u'Not running as root or owner of ' + childPath) + u', not trying to set the set-group-ID')) return try: os.chown(childPath, (-1), parentGID) sickrage.srCore.srLogger.debug((u'Respecting the set-group-ID bit on the parent directory for %s' % childPath)) except OSError: sickrage.srCore.srLogger.error((u'Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)' % (childPath, parentGID)))
[ "def", "fixSetGroupID", "(", "childPath", ")", ":", "if", "(", "(", "os", ".", "name", "==", "u'nt'", ")", "or", "(", "os", ".", "name", "==", "u'ce'", ")", ")", ":", "return", "parentPath", "=", "os", ".", "path", ".", "dirname", "(", "childPath", ")", "parentStat", "=", "os", ".", "stat", "(", "parentPath", ")", "parentMode", "=", "stat", ".", "S_IMODE", "(", "parentStat", "[", "stat", ".", "ST_MODE", "]", ")", "childPath", "=", "os", ".", "path", ".", "join", "(", "parentPath", ",", "os", ".", "path", ".", "basename", "(", "childPath", ")", ")", "if", "(", "parentMode", "&", "stat", ".", "S_ISGID", ")", ":", "parentGID", "=", "parentStat", "[", "stat", ".", "ST_GID", "]", "childStat", "=", "os", ".", "stat", "(", "childPath", ")", "childGID", "=", "childStat", "[", "stat", ".", "ST_GID", "]", "if", "(", "childGID", "==", "parentGID", ")", ":", "return", "childPath_owner", "=", "childStat", ".", "st_uid", "user_id", "=", "os", ".", "geteuid", "(", ")", "if", "(", "(", "user_id", "!=", "0", ")", "and", "(", "user_id", "!=", "childPath_owner", ")", ")", ":", "sickrage", ".", "srCore", ".", "srLogger", ".", "debug", "(", "(", "(", "u'Not running as root or owner of '", "+", "childPath", ")", "+", "u', not trying to set the set-group-ID'", ")", ")", "return", "try", ":", "os", ".", "chown", "(", "childPath", ",", "(", "-", "1", ")", ",", "parentGID", ")", "sickrage", ".", "srCore", ".", "srLogger", ".", "debug", "(", "(", "u'Respecting the set-group-ID bit on the parent directory for %s'", "%", "childPath", ")", ")", "except", "OSError", ":", "sickrage", ".", "srCore", ".", "srLogger", ".", "error", "(", "(", "u'Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)'", "%", "(", "childPath", ",", "parentGID", ")", ")", ")" ]
inherid sgid from parent .
train
false
40,579
def _try_get_string(dev, index, langid=None, default_str_i0='', default_access_error='Error Accessing String'): if (index == 0): string = default_str_i0 else: try: if (langid is None): string = util.get_string(dev, index) else: string = util.get_string(dev, index, langid) except: string = default_access_error return string
[ "def", "_try_get_string", "(", "dev", ",", "index", ",", "langid", "=", "None", ",", "default_str_i0", "=", "''", ",", "default_access_error", "=", "'Error Accessing String'", ")", ":", "if", "(", "index", "==", "0", ")", ":", "string", "=", "default_str_i0", "else", ":", "try", ":", "if", "(", "langid", "is", "None", ")", ":", "string", "=", "util", ".", "get_string", "(", "dev", ",", "index", ")", "else", ":", "string", "=", "util", ".", "get_string", "(", "dev", ",", "index", ",", "langid", ")", "except", ":", "string", "=", "default_access_error", "return", "string" ]
try to get a string .
train
true
40,580
@contextlib.contextmanager def task_logging(task): old_task = getattr(local_context, u'task', u'') local_context.task = task try: (yield) finally: local_context.task = old_task
[ "@", "contextlib", ".", "contextmanager", "def", "task_logging", "(", "task", ")", ":", "old_task", "=", "getattr", "(", "local_context", ",", "u'task'", ",", "u''", ")", "local_context", ".", "task", "=", "task", "try", ":", "(", "yield", ")", "finally", ":", "local_context", ".", "task", "=", "old_task" ]
context manager which adds task information to log messages .
train
false
40,582
def init_compare(): print 'Initializing comparison feature' subprocess.Popen(['hg', 'clone', '..', 'a']).wait() subprocess.Popen(['hg', 'clone', '..', 'b']).wait()
[ "def", "init_compare", "(", ")", ":", "print", "'Initializing comparison feature'", "subprocess", ".", "Popen", "(", "[", "'hg'", ",", "'clone'", ",", "'..'", ",", "'a'", "]", ")", ".", "wait", "(", ")", "subprocess", ".", "Popen", "(", "[", "'hg'", ",", "'clone'", ",", "'..'", ",", "'b'", "]", ")", ".", "wait", "(", ")" ]
initializes the comparison feature .
train
false
40,583
@public def roots(f, *gens, **flags): from sympy.polys.polytools import to_rational_coeffs flags = dict(flags) auto = flags.pop('auto', True) cubics = flags.pop('cubics', True) trig = flags.pop('trig', False) quartics = flags.pop('quartics', True) quintics = flags.pop('quintics', False) multiple = flags.pop('multiple', False) filter = flags.pop('filter', None) predicate = flags.pop('predicate', None) if isinstance(f, list): if gens: raise ValueError('redundant generators given') x = Dummy('x') (poly, i) = ({}, (len(f) - 1)) for coeff in f: (poly[i], i) = (sympify(coeff), (i - 1)) f = Poly(poly, x, field=True) else: try: f = Poly(f, *gens, **flags) if ((f.length == 2) and (f.degree() != 1)): n = f.degree() npow_bases = [] expr = f.as_expr() con = expr.as_independent(*gens)[0] for p in Mul.make_args(con): if (p.is_Pow and (not (p.exp % n))): npow_bases.append((p.base ** (p.exp / n))) else: other.append(p) if npow_bases: b = Mul(*npow_bases) B = Dummy() d = roots(Poly(((expr - con) + ((B ** n) * Mul(*others))), *gens, **flags), *gens, **flags) rv = {} for (k, v) in d.items(): rv[k.subs(B, b)] = v return rv except GeneratorsNeeded: if multiple: return [] else: return {} if f.is_multivariate: raise PolynomialError('multivariate polynomials are not supported') def _update_dict(result, root, k): if (root in result): result[root] += k else: result[root] = k def _try_decompose(f): 'Find roots using functional decomposition. ' (factors, roots) = (f.decompose(), []) for root in _try_heuristics(factors[0]): roots.append(root) for factor in factors[1:]: (previous, roots) = (list(roots), []) for root in previous: g = (factor - Poly(root, f.gen)) for root in _try_heuristics(g): roots.append(root) return roots def _try_heuristics(f): 'Find roots using formulas and some tricks. ' if f.is_ground: return [] if f.is_monomial: return ([S(0)] * f.degree()) if (f.length() == 2): if (f.degree() == 1): return list(map(cancel, roots_linear(f))) else: return roots_binomial(f) result = [] for i in [(-1), 1]: if (not f.eval(i)): f = f.quo(Poly((f.gen - i), f.gen)) result.append(i) break n = f.degree() if (n == 1): result += list(map(cancel, roots_linear(f))) elif (n == 2): result += list(map(cancel, roots_quadratic(f))) elif f.is_cyclotomic: result += roots_cyclotomic(f) elif ((n == 3) and cubics): result += roots_cubic(f, trig=trig) elif ((n == 4) and quartics): result += roots_quartic(f) elif ((n == 5) and quintics): result += roots_quintic(f) return result ((k,), f) = f.terms_gcd() if (not k): zeros = {} else: zeros = {S(0): k} (coeff, f) = preprocess_roots(f) if (auto and f.get_domain().has_Ring): f = f.to_field() rescale_x = None translate_x = None result = {} if (not f.is_ground): if (not f.get_domain().is_Exact): for r in f.nroots(): _update_dict(result, r, 1) elif (f.degree() == 1): result[roots_linear(f)[0]] = 1 elif (f.length() == 2): roots_fun = (roots_quadratic if (f.degree() == 2) else roots_binomial) for r in roots_fun(f): _update_dict(result, r, 1) else: (_, factors) = Poly(f.as_expr()).factor_list() if ((len(factors) == 1) and (f.degree() == 2)): for r in roots_quadratic(f): _update_dict(result, r, 1) elif ((len(factors) == 1) and (factors[0][1] == 1)): if f.get_domain().is_EX: res = to_rational_coeffs(f) if res: if (res[0] is None): (translate_x, f) = res[2:] else: (rescale_x, f) = (res[1], res[(-1)]) result = roots(f) if (not result): for root in _try_decompose(f): _update_dict(result, root, 1) else: for root in _try_decompose(f): _update_dict(result, root, 1) else: for (factor, k) in factors: for r in _try_heuristics(Poly(factor, f.gen, field=True)): _update_dict(result, r, k) if (coeff is not S.One): (_result, result) = (result, {}) for (root, k) in _result.items(): result[(coeff * root)] = k result.update(zeros) if (filter not in [None, 'C']): handlers = {'Z': (lambda r: r.is_Integer), 'Q': (lambda r: r.is_Rational), 'R': (lambda r: r.is_real), 'I': (lambda r: r.is_imaginary)} try: query = handlers[filter] except KeyError: raise ValueError(('Invalid filter: %s' % filter)) for zero in dict(result).keys(): if (not query(zero)): del result[zero] if (predicate is not None): for zero in dict(result).keys(): if (not predicate(zero)): del result[zero] if rescale_x: result1 = {} for (k, v) in result.items(): result1[(k * rescale_x)] = v result = result1 if translate_x: result1 = {} for (k, v) in result.items(): result1[(k + translate_x)] = v result = result1 if (not multiple): return result else: zeros = [] for zero in ordered(result): zeros.extend(([zero] * result[zero])) return zeros
[ "@", "public", "def", "roots", "(", "f", ",", "*", "gens", ",", "**", "flags", ")", ":", "from", "sympy", ".", "polys", ".", "polytools", "import", "to_rational_coeffs", "flags", "=", "dict", "(", "flags", ")", "auto", "=", "flags", ".", "pop", "(", "'auto'", ",", "True", ")", "cubics", "=", "flags", ".", "pop", "(", "'cubics'", ",", "True", ")", "trig", "=", "flags", ".", "pop", "(", "'trig'", ",", "False", ")", "quartics", "=", "flags", ".", "pop", "(", "'quartics'", ",", "True", ")", "quintics", "=", "flags", ".", "pop", "(", "'quintics'", ",", "False", ")", "multiple", "=", "flags", ".", "pop", "(", "'multiple'", ",", "False", ")", "filter", "=", "flags", ".", "pop", "(", "'filter'", ",", "None", ")", "predicate", "=", "flags", ".", "pop", "(", "'predicate'", ",", "None", ")", "if", "isinstance", "(", "f", ",", "list", ")", ":", "if", "gens", ":", "raise", "ValueError", "(", "'redundant generators given'", ")", "x", "=", "Dummy", "(", "'x'", ")", "(", "poly", ",", "i", ")", "=", "(", "{", "}", ",", "(", "len", "(", "f", ")", "-", "1", ")", ")", "for", "coeff", "in", "f", ":", "(", "poly", "[", "i", "]", ",", "i", ")", "=", "(", "sympify", "(", "coeff", ")", ",", "(", "i", "-", "1", ")", ")", "f", "=", "Poly", "(", "poly", ",", "x", ",", "field", "=", "True", ")", "else", ":", "try", ":", "f", "=", "Poly", "(", "f", ",", "*", "gens", ",", "**", "flags", ")", "if", "(", "(", "f", ".", "length", "==", "2", ")", "and", "(", "f", ".", "degree", "(", ")", "!=", "1", ")", ")", ":", "n", "=", "f", ".", "degree", "(", ")", "npow_bases", "=", "[", "]", "expr", "=", "f", ".", "as_expr", "(", ")", "con", "=", "expr", ".", "as_independent", "(", "*", "gens", ")", "[", "0", "]", "for", "p", "in", "Mul", ".", "make_args", "(", "con", ")", ":", "if", "(", "p", ".", "is_Pow", "and", "(", "not", "(", "p", ".", "exp", "%", "n", ")", ")", ")", ":", "npow_bases", ".", "append", "(", "(", "p", ".", "base", "**", "(", "p", ".", "exp", "/", "n", ")", ")", ")", "else", ":", "other", ".", "append", "(", "p", ")", "if", "npow_bases", ":", "b", "=", "Mul", "(", "*", "npow_bases", ")", "B", "=", "Dummy", "(", ")", "d", "=", "roots", "(", "Poly", "(", "(", "(", "expr", "-", "con", ")", "+", "(", "(", "B", "**", "n", ")", "*", "Mul", "(", "*", "others", ")", ")", ")", ",", "*", "gens", ",", "**", "flags", ")", ",", "*", "gens", ",", "**", "flags", ")", "rv", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "rv", "[", "k", ".", "subs", "(", "B", ",", "b", ")", "]", "=", "v", "return", "rv", "except", "GeneratorsNeeded", ":", "if", "multiple", ":", "return", "[", "]", "else", ":", "return", "{", "}", "if", "f", ".", "is_multivariate", ":", "raise", "PolynomialError", "(", "'multivariate polynomials are not supported'", ")", "def", "_update_dict", "(", "result", ",", "root", ",", "k", ")", ":", "if", "(", "root", "in", "result", ")", ":", "result", "[", "root", "]", "+=", "k", "else", ":", "result", "[", "root", "]", "=", "k", "def", "_try_decompose", "(", "f", ")", ":", "(", "factors", ",", "roots", ")", "=", "(", "f", ".", "decompose", "(", ")", ",", "[", "]", ")", "for", "root", "in", "_try_heuristics", "(", "factors", "[", "0", "]", ")", ":", "roots", ".", "append", "(", "root", ")", "for", "factor", "in", "factors", "[", "1", ":", "]", ":", "(", "previous", ",", "roots", ")", "=", "(", "list", "(", "roots", ")", ",", "[", "]", ")", "for", "root", "in", "previous", ":", "g", "=", "(", "factor", "-", "Poly", "(", "root", ",", "f", ".", "gen", ")", ")", "for", "root", "in", "_try_heuristics", "(", "g", ")", ":", "roots", ".", "append", "(", "root", ")", "return", "roots", "def", "_try_heuristics", "(", "f", ")", ":", "if", "f", ".", "is_ground", ":", "return", "[", "]", "if", "f", ".", "is_monomial", ":", "return", "(", "[", "S", "(", "0", ")", "]", "*", "f", ".", "degree", "(", ")", ")", "if", "(", "f", ".", "length", "(", ")", "==", "2", ")", ":", "if", "(", "f", ".", "degree", "(", ")", "==", "1", ")", ":", "return", "list", "(", "map", "(", "cancel", ",", "roots_linear", "(", "f", ")", ")", ")", "else", ":", "return", "roots_binomial", "(", "f", ")", "result", "=", "[", "]", "for", "i", "in", "[", "(", "-", "1", ")", ",", "1", "]", ":", "if", "(", "not", "f", ".", "eval", "(", "i", ")", ")", ":", "f", "=", "f", ".", "quo", "(", "Poly", "(", "(", "f", ".", "gen", "-", "i", ")", ",", "f", ".", "gen", ")", ")", "result", ".", "append", "(", "i", ")", "break", "n", "=", "f", ".", "degree", "(", ")", "if", "(", "n", "==", "1", ")", ":", "result", "+=", "list", "(", "map", "(", "cancel", ",", "roots_linear", "(", "f", ")", ")", ")", "elif", "(", "n", "==", "2", ")", ":", "result", "+=", "list", "(", "map", "(", "cancel", ",", "roots_quadratic", "(", "f", ")", ")", ")", "elif", "f", ".", "is_cyclotomic", ":", "result", "+=", "roots_cyclotomic", "(", "f", ")", "elif", "(", "(", "n", "==", "3", ")", "and", "cubics", ")", ":", "result", "+=", "roots_cubic", "(", "f", ",", "trig", "=", "trig", ")", "elif", "(", "(", "n", "==", "4", ")", "and", "quartics", ")", ":", "result", "+=", "roots_quartic", "(", "f", ")", "elif", "(", "(", "n", "==", "5", ")", "and", "quintics", ")", ":", "result", "+=", "roots_quintic", "(", "f", ")", "return", "result", "(", "(", "k", ",", ")", ",", "f", ")", "=", "f", ".", "terms_gcd", "(", ")", "if", "(", "not", "k", ")", ":", "zeros", "=", "{", "}", "else", ":", "zeros", "=", "{", "S", "(", "0", ")", ":", "k", "}", "(", "coeff", ",", "f", ")", "=", "preprocess_roots", "(", "f", ")", "if", "(", "auto", "and", "f", ".", "get_domain", "(", ")", ".", "has_Ring", ")", ":", "f", "=", "f", ".", "to_field", "(", ")", "rescale_x", "=", "None", "translate_x", "=", "None", "result", "=", "{", "}", "if", "(", "not", "f", ".", "is_ground", ")", ":", "if", "(", "not", "f", ".", "get_domain", "(", ")", ".", "is_Exact", ")", ":", "for", "r", "in", "f", ".", "nroots", "(", ")", ":", "_update_dict", "(", "result", ",", "r", ",", "1", ")", "elif", "(", "f", ".", "degree", "(", ")", "==", "1", ")", ":", "result", "[", "roots_linear", "(", "f", ")", "[", "0", "]", "]", "=", "1", "elif", "(", "f", ".", "length", "(", ")", "==", "2", ")", ":", "roots_fun", "=", "(", "roots_quadratic", "if", "(", "f", ".", "degree", "(", ")", "==", "2", ")", "else", "roots_binomial", ")", "for", "r", "in", "roots_fun", "(", "f", ")", ":", "_update_dict", "(", "result", ",", "r", ",", "1", ")", "else", ":", "(", "_", ",", "factors", ")", "=", "Poly", "(", "f", ".", "as_expr", "(", ")", ")", ".", "factor_list", "(", ")", "if", "(", "(", "len", "(", "factors", ")", "==", "1", ")", "and", "(", "f", ".", "degree", "(", ")", "==", "2", ")", ")", ":", "for", "r", "in", "roots_quadratic", "(", "f", ")", ":", "_update_dict", "(", "result", ",", "r", ",", "1", ")", "elif", "(", "(", "len", "(", "factors", ")", "==", "1", ")", "and", "(", "factors", "[", "0", "]", "[", "1", "]", "==", "1", ")", ")", ":", "if", "f", ".", "get_domain", "(", ")", ".", "is_EX", ":", "res", "=", "to_rational_coeffs", "(", "f", ")", "if", "res", ":", "if", "(", "res", "[", "0", "]", "is", "None", ")", ":", "(", "translate_x", ",", "f", ")", "=", "res", "[", "2", ":", "]", "else", ":", "(", "rescale_x", ",", "f", ")", "=", "(", "res", "[", "1", "]", ",", "res", "[", "(", "-", "1", ")", "]", ")", "result", "=", "roots", "(", "f", ")", "if", "(", "not", "result", ")", ":", "for", "root", "in", "_try_decompose", "(", "f", ")", ":", "_update_dict", "(", "result", ",", "root", ",", "1", ")", "else", ":", "for", "root", "in", "_try_decompose", "(", "f", ")", ":", "_update_dict", "(", "result", ",", "root", ",", "1", ")", "else", ":", "for", "(", "factor", ",", "k", ")", "in", "factors", ":", "for", "r", "in", "_try_heuristics", "(", "Poly", "(", "factor", ",", "f", ".", "gen", ",", "field", "=", "True", ")", ")", ":", "_update_dict", "(", "result", ",", "r", ",", "k", ")", "if", "(", "coeff", "is", "not", "S", ".", "One", ")", ":", "(", "_result", ",", "result", ")", "=", "(", "result", ",", "{", "}", ")", "for", "(", "root", ",", "k", ")", "in", "_result", ".", "items", "(", ")", ":", "result", "[", "(", "coeff", "*", "root", ")", "]", "=", "k", "result", ".", "update", "(", "zeros", ")", "if", "(", "filter", "not", "in", "[", "None", ",", "'C'", "]", ")", ":", "handlers", "=", "{", "'Z'", ":", "(", "lambda", "r", ":", "r", ".", "is_Integer", ")", ",", "'Q'", ":", "(", "lambda", "r", ":", "r", ".", "is_Rational", ")", ",", "'R'", ":", "(", "lambda", "r", ":", "r", ".", "is_real", ")", ",", "'I'", ":", "(", "lambda", "r", ":", "r", ".", "is_imaginary", ")", "}", "try", ":", "query", "=", "handlers", "[", "filter", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "(", "'Invalid filter: %s'", "%", "filter", ")", ")", "for", "zero", "in", "dict", "(", "result", ")", ".", "keys", "(", ")", ":", "if", "(", "not", "query", "(", "zero", ")", ")", ":", "del", "result", "[", "zero", "]", "if", "(", "predicate", "is", "not", "None", ")", ":", "for", "zero", "in", "dict", "(", "result", ")", ".", "keys", "(", ")", ":", "if", "(", "not", "predicate", "(", "zero", ")", ")", ":", "del", "result", "[", "zero", "]", "if", "rescale_x", ":", "result1", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "result", ".", "items", "(", ")", ":", "result1", "[", "(", "k", "*", "rescale_x", ")", "]", "=", "v", "result", "=", "result1", "if", "translate_x", ":", "result1", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "result", ".", "items", "(", ")", ":", "result1", "[", "(", "k", "+", "translate_x", ")", "]", "=", "v", "result", "=", "result1", "if", "(", "not", "multiple", ")", ":", "return", "result", "else", ":", "zeros", "=", "[", "]", "for", "zero", "in", "ordered", "(", "result", ")", ":", "zeros", ".", "extend", "(", "(", "[", "zero", "]", "*", "result", "[", "zero", "]", ")", ")", "return", "zeros" ]
get nodes from graph g with indegree 0 .
train
false
40,584
def addXMLFromLoopComplexZ(attributes, depth, loop, output, z): addBeginXMLTag(attributes, depth, 'path', output) for pointComplexIndex in xrange(len(loop)): pointComplex = loop[pointComplexIndex] addXMLFromXYZ((depth + 1), pointComplexIndex, output, pointComplex.real, pointComplex.imag, z) addEndXMLTag(depth, 'path', output)
[ "def", "addXMLFromLoopComplexZ", "(", "attributes", ",", "depth", ",", "loop", ",", "output", ",", "z", ")", ":", "addBeginXMLTag", "(", "attributes", ",", "depth", ",", "'path'", ",", "output", ")", "for", "pointComplexIndex", "in", "xrange", "(", "len", "(", "loop", ")", ")", ":", "pointComplex", "=", "loop", "[", "pointComplexIndex", "]", "addXMLFromXYZ", "(", "(", "depth", "+", "1", ")", ",", "pointComplexIndex", ",", "output", ",", "pointComplex", ".", "real", ",", "pointComplex", ".", "imag", ",", "z", ")", "addEndXMLTag", "(", "depth", ",", "'path'", ",", "output", ")" ]
add xml from loop .
train
false
40,585
def _unpack_epochs(epochs): if (len(epochs.event_id) > 1): epochs = [epochs[k] for k in epochs.event_id] else: epochs = [epochs] return epochs
[ "def", "_unpack_epochs", "(", "epochs", ")", ":", "if", "(", "len", "(", "epochs", ".", "event_id", ")", ">", "1", ")", ":", "epochs", "=", "[", "epochs", "[", "k", "]", "for", "k", "in", "epochs", ".", "event_id", "]", "else", ":", "epochs", "=", "[", "epochs", "]", "return", "epochs" ]
aux function .
train
false
40,587
def parse_iso8601_timestamp(timestamp): return datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
[ "def", "parse_iso8601_timestamp", "(", "timestamp", ")", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "timestamp", ",", "'%Y-%m-%dT%H:%M:%S.%fZ'", ")" ]
parse a particular type of iso8601 formatted timestamp .
train
false
40,588
def remove_tenant_user(request, tenant_id, user_id): client = keystoneclient(request, admin=True) roles = client.roles.roles_for_user(user_id, tenant_id) for role in roles: client.roles.remove_user_role(user_id, role.id, tenant_id)
[ "def", "remove_tenant_user", "(", "request", ",", "tenant_id", ",", "user_id", ")", ":", "client", "=", "keystoneclient", "(", "request", ",", "admin", "=", "True", ")", "roles", "=", "client", ".", "roles", ".", "roles_for_user", "(", "user_id", ",", "tenant_id", ")", "for", "role", "in", "roles", ":", "client", ".", "roles", ".", "remove_user_role", "(", "user_id", ",", "role", ".", "id", ",", "tenant_id", ")" ]
removes all roles from a user on a tenant .
train
false
40,589
def send_all_stats(): deployment_id = helper.get_deployment_id() if (not deployment_id): return logging.debug('Getting all stats from every deployment node.') all_stats = helper.get_all_stats() portal_path = hermes_constants.PORTAL_STATS_PATH.format(deployment_id) url = '{0}{1}'.format(hermes_constants.PORTAL_URL, portal_path) data = {JSONTags.DEPLOYMENT_ID: deployment_id, JSONTags.TIMESTAMP: datetime.datetime.utcnow(), JSONTags.ALL_STATS: json.dumps(all_stats)} logging.debug('Sending all stats to the AppScale Portal. Data: \n{}'.format(data)) request = helper.create_request(url=url, method='POST', body=urllib.urlencode(data)) response = helper.urlfetch(request) if (not response[JSONTags.SUCCESS]): logging.error('Inaccessible resource: {}'.format(url)) return
[ "def", "send_all_stats", "(", ")", ":", "deployment_id", "=", "helper", ".", "get_deployment_id", "(", ")", "if", "(", "not", "deployment_id", ")", ":", "return", "logging", ".", "debug", "(", "'Getting all stats from every deployment node.'", ")", "all_stats", "=", "helper", ".", "get_all_stats", "(", ")", "portal_path", "=", "hermes_constants", ".", "PORTAL_STATS_PATH", ".", "format", "(", "deployment_id", ")", "url", "=", "'{0}{1}'", ".", "format", "(", "hermes_constants", ".", "PORTAL_URL", ",", "portal_path", ")", "data", "=", "{", "JSONTags", ".", "DEPLOYMENT_ID", ":", "deployment_id", ",", "JSONTags", ".", "TIMESTAMP", ":", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ",", "JSONTags", ".", "ALL_STATS", ":", "json", ".", "dumps", "(", "all_stats", ")", "}", "logging", ".", "debug", "(", "'Sending all stats to the AppScale Portal. Data: \\n{}'", ".", "format", "(", "data", ")", ")", "request", "=", "helper", ".", "create_request", "(", "url", "=", "url", ",", "method", "=", "'POST'", ",", "body", "=", "urllib", ".", "urlencode", "(", "data", ")", ")", "response", "=", "helper", ".", "urlfetch", "(", "request", ")", "if", "(", "not", "response", "[", "JSONTags", ".", "SUCCESS", "]", ")", ":", "logging", ".", "error", "(", "'Inaccessible resource: {}'", ".", "format", "(", "url", ")", ")", "return" ]
calls get_all_stats and sends the deployment monitoring stats to the appscale portal .
train
false
40,590
def temporary_mount(filesystem, options=None): mountpoint = temporary_directory() try: mounted_fs = mount(filesystem, mountpoint, options=options) except: mountpoint.remove() raise return TemporaryMountedFilesystem(fs=mounted_fs)
[ "def", "temporary_mount", "(", "filesystem", ",", "options", "=", "None", ")", ":", "mountpoint", "=", "temporary_directory", "(", ")", "try", ":", "mounted_fs", "=", "mount", "(", "filesystem", ",", "mountpoint", ",", "options", "=", "options", ")", "except", ":", "mountpoint", ".", "remove", "(", ")", "raise", "return", "TemporaryMountedFilesystem", "(", "fs", "=", "mounted_fs", ")" ]
mount filesystem at a temporary mountpoint with options .
train
false
40,592
def indefinite_article(word): return 'a'
[ "def", "indefinite_article", "(", "word", ")", ":", "return", "'a'" ]
returns the indefinite article for a given word .
train
false
40,593
def get_permalink_ids(self): return list(self.get_permalink_ids_iter())
[ "def", "get_permalink_ids", "(", "self", ")", ":", "return", "list", "(", "self", ".", "get_permalink_ids_iter", "(", ")", ")" ]
method to get permalink ids from content .
train
false
40,595
def _get_blocks(rows, coords, idx): for idx_name in ('query', 'hit', 'midline', 'qannot', 'hannot'): assert (idx_name in idx) blocks = [] for (start, end) in coords: block = {} block['query'] = rows[idx['query']][start:end] block['hit'] = rows[idx['hit']][start:end] block['similarity'] = rows[idx['midline']][start:end] if (idx['qannot'] is not None): block['query_annotation'] = rows[idx['qannot']][start:end] if (idx['hannot'] is not None): block['hit_annotation'] = rows[idx['hannot']][start:end] blocks.append(block) return blocks
[ "def", "_get_blocks", "(", "rows", ",", "coords", ",", "idx", ")", ":", "for", "idx_name", "in", "(", "'query'", ",", "'hit'", ",", "'midline'", ",", "'qannot'", ",", "'hannot'", ")", ":", "assert", "(", "idx_name", "in", "idx", ")", "blocks", "=", "[", "]", "for", "(", "start", ",", "end", ")", "in", "coords", ":", "block", "=", "{", "}", "block", "[", "'query'", "]", "=", "rows", "[", "idx", "[", "'query'", "]", "]", "[", "start", ":", "end", "]", "block", "[", "'hit'", "]", "=", "rows", "[", "idx", "[", "'hit'", "]", "]", "[", "start", ":", "end", "]", "block", "[", "'similarity'", "]", "=", "rows", "[", "idx", "[", "'midline'", "]", "]", "[", "start", ":", "end", "]", "if", "(", "idx", "[", "'qannot'", "]", "is", "not", "None", ")", ":", "block", "[", "'query_annotation'", "]", "=", "rows", "[", "idx", "[", "'qannot'", "]", "]", "[", "start", ":", "end", "]", "if", "(", "idx", "[", "'hannot'", "]", "is", "not", "None", ")", ":", "block", "[", "'hit_annotation'", "]", "=", "rows", "[", "idx", "[", "'hannot'", "]", "]", "[", "start", ":", "end", "]", "blocks", ".", "append", "(", "block", ")", "return", "blocks" ]
returns a list of dictionaries of sequences split by the coordinates .
train
false
40,596
def to_language(locale): if ('_' in locale): return to_language(translation.trans_real.to_language(locale)) elif ('-' in locale): (lang, region) = locale.lower().split('-') if (region == 'latn'): region = region.capitalize() else: region = region.upper() return ('%s-%s' % (lang, region)) else: return translation.trans_real.to_language(locale)
[ "def", "to_language", "(", "locale", ")", ":", "if", "(", "'_'", "in", "locale", ")", ":", "return", "to_language", "(", "translation", ".", "trans_real", ".", "to_language", "(", "locale", ")", ")", "elif", "(", "'-'", "in", "locale", ")", ":", "(", "lang", ",", "region", ")", "=", "locale", ".", "lower", "(", ")", ".", "split", "(", "'-'", ")", "if", "(", "region", "==", "'latn'", ")", ":", "region", "=", "region", ".", "capitalize", "(", ")", "else", ":", "region", "=", "region", ".", "upper", "(", ")", "return", "(", "'%s-%s'", "%", "(", "lang", ",", "region", ")", ")", "else", ":", "return", "translation", ".", "trans_real", ".", "to_language", "(", "locale", ")" ]
turns a locale name into a language name .
train
false
40,597
def mirror_from(source, attributes, only_getters=True): def decorator(cls): for attribute in attributes: def make_gs_etter(source, attribute): def getter(self): return getattr(getattr(self, source), attribute) def setter(self, value): setattr(getattr(self, source), attribute, value) return (getter, setter) (getter, setter) = make_gs_etter(source, attribute) if only_getters: setattr(cls, attribute, property(getter)) else: setattr(cls, attribute, property(getter, setter)) return cls return decorator
[ "def", "mirror_from", "(", "source", ",", "attributes", ",", "only_getters", "=", "True", ")", ":", "def", "decorator", "(", "cls", ")", ":", "for", "attribute", "in", "attributes", ":", "def", "make_gs_etter", "(", "source", ",", "attribute", ")", ":", "def", "getter", "(", "self", ")", ":", "return", "getattr", "(", "getattr", "(", "self", ",", "source", ")", ",", "attribute", ")", "def", "setter", "(", "self", ",", "value", ")", ":", "setattr", "(", "getattr", "(", "self", ",", "source", ")", ",", "attribute", ",", "value", ")", "return", "(", "getter", ",", "setter", ")", "(", "getter", ",", "setter", ")", "=", "make_gs_etter", "(", "source", ",", "attribute", ")", "if", "only_getters", ":", "setattr", "(", "cls", ",", "attribute", ",", "property", "(", "getter", ")", ")", "else", ":", "setattr", "(", "cls", ",", "attribute", ",", "property", "(", "getter", ",", "setter", ")", ")", "return", "cls", "return", "decorator" ]
decorator for classes that mirror some attributes from an instance variable .
train
false
40,598
def check_call(*popenargs, **kwargs): retcode = call(*popenargs, **kwargs) cmd = kwargs.get('args') if (cmd is None): cmd = popenargs[0] if retcode: raise CalledProcessError(retcode, cmd) return retcode
[ "def", "check_call", "(", "*", "popenargs", ",", "**", "kwargs", ")", ":", "retcode", "=", "call", "(", "*", "popenargs", ",", "**", "kwargs", ")", "cmd", "=", "kwargs", ".", "get", "(", "'args'", ")", "if", "(", "cmd", "is", "None", ")", ":", "cmd", "=", "popenargs", "[", "0", "]", "if", "retcode", ":", "raise", "CalledProcessError", "(", "retcode", ",", "cmd", ")", "return", "retcode" ]
run command with arguments .
train
true
40,600
def getTransformedPathByKey(key, xmlElement): if (key not in xmlElement.attributeDictionary): return [] word = str(xmlElement.attributeDictionary[key]).strip() evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement) if (evaluatedLinkValue.__class__ == list): return getPathByList(evaluatedLinkValue) xmlElementObject = getXMLElementObject(evaluatedLinkValueClass) if (xmlElementObject == None): return [] return xmlElementObject.getTransformedPaths()[0]
[ "def", "getTransformedPathByKey", "(", "key", ",", "xmlElement", ")", ":", "if", "(", "key", "not", "in", "xmlElement", ".", "attributeDictionary", ")", ":", "return", "[", "]", "word", "=", "str", "(", "xmlElement", ".", "attributeDictionary", "[", "key", "]", ")", ".", "strip", "(", ")", "evaluatedLinkValue", "=", "getEvaluatedLinkValue", "(", "word", ",", "xmlElement", ")", "if", "(", "evaluatedLinkValue", ".", "__class__", "==", "list", ")", ":", "return", "getPathByList", "(", "evaluatedLinkValue", ")", "xmlElementObject", "=", "getXMLElementObject", "(", "evaluatedLinkValueClass", ")", "if", "(", "xmlElementObject", "==", "None", ")", ":", "return", "[", "]", "return", "xmlElementObject", ".", "getTransformedPaths", "(", ")", "[", "0", "]" ]
get transformed path from prefix and xml element .
train
false
40,601
def _from_ordinalf(x, tz=None): if (tz is None): tz = _get_rc_timezone() ix = int(x) dt = datetime.datetime.fromordinal(ix) remainder = (float(x) - ix) (hour, remainder) = divmod((24 * remainder), 1) (minute, remainder) = divmod((60 * remainder), 1) (second, remainder) = divmod((60 * remainder), 1) microsecond = int((1000000.0 * remainder)) if (microsecond < 10): microsecond = 0 dt = datetime.datetime(dt.year, dt.month, dt.day, int(hour), int(minute), int(second), microsecond, tzinfo=UTC).astimezone(tz) if (microsecond > 999990): dt += datetime.timedelta(microseconds=(1000000.0 - microsecond)) return dt
[ "def", "_from_ordinalf", "(", "x", ",", "tz", "=", "None", ")", ":", "if", "(", "tz", "is", "None", ")", ":", "tz", "=", "_get_rc_timezone", "(", ")", "ix", "=", "int", "(", "x", ")", "dt", "=", "datetime", ".", "datetime", ".", "fromordinal", "(", "ix", ")", "remainder", "=", "(", "float", "(", "x", ")", "-", "ix", ")", "(", "hour", ",", "remainder", ")", "=", "divmod", "(", "(", "24", "*", "remainder", ")", ",", "1", ")", "(", "minute", ",", "remainder", ")", "=", "divmod", "(", "(", "60", "*", "remainder", ")", ",", "1", ")", "(", "second", ",", "remainder", ")", "=", "divmod", "(", "(", "60", "*", "remainder", ")", ",", "1", ")", "microsecond", "=", "int", "(", "(", "1000000.0", "*", "remainder", ")", ")", "if", "(", "microsecond", "<", "10", ")", ":", "microsecond", "=", "0", "dt", "=", "datetime", ".", "datetime", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ",", "int", "(", "hour", ")", ",", "int", "(", "minute", ")", ",", "int", "(", "second", ")", ",", "microsecond", ",", "tzinfo", "=", "UTC", ")", ".", "astimezone", "(", "tz", ")", "if", "(", "microsecond", ">", "999990", ")", ":", "dt", "+=", "datetime", ".", "timedelta", "(", "microseconds", "=", "(", "1000000.0", "-", "microsecond", ")", ")", "return", "dt" ]
convert gregorian float of the date .
train
true
40,602
def list_cache_nodes_full(opts=None, provider=None, base=None): if (opts is None): opts = __opts__ if (opts.get('update_cachedir', False) is False): return if (base is None): base = os.path.join(opts['cachedir'], 'active') minions = {} for driver in os.listdir(base): minions[driver] = {} prov_dir = os.path.join(base, driver) for prov in os.listdir(prov_dir): if (provider and (provider != prov)): continue minions[driver][prov] = {} min_dir = os.path.join(prov_dir, prov) for fname in os.listdir(min_dir): fpath = os.path.join(min_dir, fname) minion_id = fname[:(-2)] with salt.utils.fopen(fpath, 'r') as fh_: minions[driver][prov][minion_id] = msgpack.load(fh_) return minions
[ "def", "list_cache_nodes_full", "(", "opts", "=", "None", ",", "provider", "=", "None", ",", "base", "=", "None", ")", ":", "if", "(", "opts", "is", "None", ")", ":", "opts", "=", "__opts__", "if", "(", "opts", ".", "get", "(", "'update_cachedir'", ",", "False", ")", "is", "False", ")", ":", "return", "if", "(", "base", "is", "None", ")", ":", "base", "=", "os", ".", "path", ".", "join", "(", "opts", "[", "'cachedir'", "]", ",", "'active'", ")", "minions", "=", "{", "}", "for", "driver", "in", "os", ".", "listdir", "(", "base", ")", ":", "minions", "[", "driver", "]", "=", "{", "}", "prov_dir", "=", "os", ".", "path", ".", "join", "(", "base", ",", "driver", ")", "for", "prov", "in", "os", ".", "listdir", "(", "prov_dir", ")", ":", "if", "(", "provider", "and", "(", "provider", "!=", "prov", ")", ")", ":", "continue", "minions", "[", "driver", "]", "[", "prov", "]", "=", "{", "}", "min_dir", "=", "os", ".", "path", ".", "join", "(", "prov_dir", ",", "prov", ")", "for", "fname", "in", "os", ".", "listdir", "(", "min_dir", ")", ":", "fpath", "=", "os", ".", "path", ".", "join", "(", "min_dir", ",", "fname", ")", "minion_id", "=", "fname", "[", ":", "(", "-", "2", ")", "]", "with", "salt", ".", "utils", ".", "fopen", "(", "fpath", ",", "'r'", ")", "as", "fh_", ":", "minions", "[", "driver", "]", "[", "prov", "]", "[", "minion_id", "]", "=", "msgpack", ".", "load", "(", "fh_", ")", "return", "minions" ]
return a list of minion data from the cloud cache .
train
false
40,604
def add_vendor_dir(path, index=1): venv_path = os.path.join(path, 'lib', _PYTHON_VERSION, 'site-packages') if os.path.isdir(venv_path): site_dir = venv_path elif os.path.isdir(path): site_dir = path else: raise ValueError(('virtualenv: cannot access %s: No such virtualenv or site directory' % path)) sys_path = sys.path[:] del sys.path[index:] import site site.addsitedir(site_dir) sys.path.extend(sys_path[index:])
[ "def", "add_vendor_dir", "(", "path", ",", "index", "=", "1", ")", ":", "venv_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'lib'", ",", "_PYTHON_VERSION", ",", "'site-packages'", ")", "if", "os", ".", "path", ".", "isdir", "(", "venv_path", ")", ":", "site_dir", "=", "venv_path", "elif", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "site_dir", "=", "path", "else", ":", "raise", "ValueError", "(", "(", "'virtualenv: cannot access %s: No such virtualenv or site directory'", "%", "path", ")", ")", "sys_path", "=", "sys", ".", "path", "[", ":", "]", "del", "sys", ".", "path", "[", "index", ":", "]", "import", "site", "site", ".", "addsitedir", "(", "site_dir", ")", "sys", ".", "path", ".", "extend", "(", "sys_path", "[", "index", ":", "]", ")" ]
insert site dir or virtualenv at a given index in sys .
train
false
40,605
def _xx_time_effects(x, y): xx = np.dot(x.values.T, x.values) xt = x.sum(level=0).values count = y.unstack().count(1).values selector = (count > 0) xt = xt[selector] count = count[selector] return (xx - np.dot((xt.T / count), xt))
[ "def", "_xx_time_effects", "(", "x", ",", "y", ")", ":", "xx", "=", "np", ".", "dot", "(", "x", ".", "values", ".", "T", ",", "x", ".", "values", ")", "xt", "=", "x", ".", "sum", "(", "level", "=", "0", ")", ".", "values", "count", "=", "y", ".", "unstack", "(", ")", ".", "count", "(", "1", ")", ".", "values", "selector", "=", "(", "count", ">", "0", ")", "xt", "=", "xt", "[", "selector", "]", "count", "=", "count", "[", "selector", "]", "return", "(", "xx", "-", "np", ".", "dot", "(", "(", "xt", ".", "T", "/", "count", ")", ",", "xt", ")", ")" ]
returns xx - ^-1 .
train
false
40,606
def _root(name='', all_roots=False): if (_sd_version() >= 219): if all_roots: return [os.path.join(x, name) for x in ('/var/lib/machines', '/var/lib/container')] else: return os.path.join('/var/lib/machines', name) else: ret = os.path.join('/var/lib/container', name) if all_roots: return [ret] else: return ret
[ "def", "_root", "(", "name", "=", "''", ",", "all_roots", "=", "False", ")", ":", "if", "(", "_sd_version", "(", ")", ">=", "219", ")", ":", "if", "all_roots", ":", "return", "[", "os", ".", "path", ".", "join", "(", "x", ",", "name", ")", "for", "x", "in", "(", "'/var/lib/machines'", ",", "'/var/lib/container'", ")", "]", "else", ":", "return", "os", ".", "path", ".", "join", "(", "'/var/lib/machines'", ",", "name", ")", "else", ":", "ret", "=", "os", ".", "path", ".", "join", "(", "'/var/lib/container'", ",", "name", ")", "if", "all_roots", ":", "return", "[", "ret", "]", "else", ":", "return", "ret" ]
return the container root directory .
train
true
40,607
def PhdIterator(handle): phd_records = Phd.parse(handle) for phd_record in phd_records: name = phd_record.file_name.split(None, 1)[0] seq_record = SeqRecord(phd_record.seq, id=name, name=name, description=phd_record.file_name) seq_record.annotations = phd_record.comments seq_record.letter_annotations['phred_quality'] = [int(site[1]) for site in phd_record.sites] try: seq_record.letter_annotations['peak_location'] = [int(site[2]) for site in phd_record.sites] except IndexError: pass (yield seq_record)
[ "def", "PhdIterator", "(", "handle", ")", ":", "phd_records", "=", "Phd", ".", "parse", "(", "handle", ")", "for", "phd_record", "in", "phd_records", ":", "name", "=", "phd_record", ".", "file_name", ".", "split", "(", "None", ",", "1", ")", "[", "0", "]", "seq_record", "=", "SeqRecord", "(", "phd_record", ".", "seq", ",", "id", "=", "name", ",", "name", "=", "name", ",", "description", "=", "phd_record", ".", "file_name", ")", "seq_record", ".", "annotations", "=", "phd_record", ".", "comments", "seq_record", ".", "letter_annotations", "[", "'phred_quality'", "]", "=", "[", "int", "(", "site", "[", "1", "]", ")", "for", "site", "in", "phd_record", ".", "sites", "]", "try", ":", "seq_record", ".", "letter_annotations", "[", "'peak_location'", "]", "=", "[", "int", "(", "site", "[", "2", "]", ")", "for", "site", "in", "phd_record", ".", "sites", "]", "except", "IndexError", ":", "pass", "(", "yield", "seq_record", ")" ]
returns seqrecord objects from a phd file .
train
false
40,608
@task def test_api2(ctx): test_module(ctx, module=API_TESTS2)
[ "@", "task", "def", "test_api2", "(", "ctx", ")", ":", "test_module", "(", "ctx", ",", "module", "=", "API_TESTS2", ")" ]
run the api test suite .
train
false
40,609
def sg_label(sg_id, sg_name): return (((PREFIX + str(sg_id)) + '_') + sg_name)
[ "def", "sg_label", "(", "sg_id", ",", "sg_name", ")", ":", "return", "(", "(", "(", "PREFIX", "+", "str", "(", "sg_id", ")", ")", "+", "'_'", ")", "+", "sg_name", ")" ]
construct the security group id used as chain identifier in midonet .
train
false
40,611
def windows_friendly_colon_split(config_string): if Platform.is_win32(): return COLON_NON_WIN_PATH.split(config_string) else: return config_string.split(':')
[ "def", "windows_friendly_colon_split", "(", "config_string", ")", ":", "if", "Platform", ".", "is_win32", "(", ")", ":", "return", "COLON_NON_WIN_PATH", ".", "split", "(", "config_string", ")", "else", ":", "return", "config_string", ".", "split", "(", "':'", ")" ]
perform a split by : on the config_string without splitting on the start of windows path .
train
false
40,612
def chirp(t, f0, t1, f1, method='linear', phi=0, vertex_zero=True): phase = _chirp_phase(t, f0, t1, f1, method, vertex_zero) phi *= (pi / 180) return cos((phase + phi))
[ "def", "chirp", "(", "t", ",", "f0", ",", "t1", ",", "f1", ",", "method", "=", "'linear'", ",", "phi", "=", "0", ",", "vertex_zero", "=", "True", ")", ":", "phase", "=", "_chirp_phase", "(", "t", ",", "f0", ",", "t1", ",", "f1", ",", "method", ",", "vertex_zero", ")", "phi", "*=", "(", "pi", "/", "180", ")", "return", "cos", "(", "(", "phase", "+", "phi", ")", ")" ]
frequency-swept cosine generator .
train
false
40,613
def record_utm_registration_attribution(request, user): utm_cookie_name = RegistrationCookieConfiguration.current().utm_cookie_name utm_cookie = request.COOKIES.get(utm_cookie_name) if (user and utm_cookie): utm = json.loads(utm_cookie) for utm_parameter_name in REGISTRATION_UTM_PARAMETERS: utm_parameter = utm.get(utm_parameter_name) if utm_parameter: UserAttribute.set_user_attribute(user, REGISTRATION_UTM_PARAMETERS.get(utm_parameter_name), utm_parameter) created_at_unixtime = utm.get('created_at') if created_at_unixtime: created_at_datetime = datetime.datetime.fromtimestamp((int(created_at_unixtime) / float(1000)), tz=UTC) UserAttribute.set_user_attribute(user, REGISTRATION_UTM_CREATED_AT, created_at_datetime)
[ "def", "record_utm_registration_attribution", "(", "request", ",", "user", ")", ":", "utm_cookie_name", "=", "RegistrationCookieConfiguration", ".", "current", "(", ")", ".", "utm_cookie_name", "utm_cookie", "=", "request", ".", "COOKIES", ".", "get", "(", "utm_cookie_name", ")", "if", "(", "user", "and", "utm_cookie", ")", ":", "utm", "=", "json", ".", "loads", "(", "utm_cookie", ")", "for", "utm_parameter_name", "in", "REGISTRATION_UTM_PARAMETERS", ":", "utm_parameter", "=", "utm", ".", "get", "(", "utm_parameter_name", ")", "if", "utm_parameter", ":", "UserAttribute", ".", "set_user_attribute", "(", "user", ",", "REGISTRATION_UTM_PARAMETERS", ".", "get", "(", "utm_parameter_name", ")", ",", "utm_parameter", ")", "created_at_unixtime", "=", "utm", ".", "get", "(", "'created_at'", ")", "if", "created_at_unixtime", ":", "created_at_datetime", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "(", "int", "(", "created_at_unixtime", ")", "/", "float", "(", "1000", ")", ")", ",", "tz", "=", "UTC", ")", "UserAttribute", ".", "set_user_attribute", "(", "user", ",", "REGISTRATION_UTM_CREATED_AT", ",", "created_at_datetime", ")" ]
attribute this users registration to the latest utm referrer .
train
false
40,614
def put_pipeline_definition(pipeline_id, pipeline_objects, parameter_objects=None, parameter_values=None, region=None, key=None, keyid=None, profile=None): parameter_objects = (parameter_objects or []) parameter_values = (parameter_values or []) client = _get_client(region, key, keyid, profile) r = {} try: response = client.put_pipeline_definition(pipelineId=pipeline_id, pipelineObjects=pipeline_objects, parameterObjects=parameter_objects, parameterValues=parameter_values) if response['errored']: r['error'] = response['validationErrors'] else: r['result'] = response except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: r['error'] = str(e) return r
[ "def", "put_pipeline_definition", "(", "pipeline_id", ",", "pipeline_objects", ",", "parameter_objects", "=", "None", ",", "parameter_values", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "parameter_objects", "=", "(", "parameter_objects", "or", "[", "]", ")", "parameter_values", "=", "(", "parameter_values", "or", "[", "]", ")", "client", "=", "_get_client", "(", "region", ",", "key", ",", "keyid", ",", "profile", ")", "r", "=", "{", "}", "try", ":", "response", "=", "client", ".", "put_pipeline_definition", "(", "pipelineId", "=", "pipeline_id", ",", "pipelineObjects", "=", "pipeline_objects", ",", "parameterObjects", "=", "parameter_objects", ",", "parameterValues", "=", "parameter_values", ")", "if", "response", "[", "'errored'", "]", ":", "r", "[", "'error'", "]", "=", "response", "[", "'validationErrors'", "]", "else", ":", "r", "[", "'result'", "]", "=", "response", "except", "(", "botocore", ".", "exceptions", ".", "BotoCoreError", ",", "botocore", ".", "exceptions", ".", "ClientError", ")", "as", "e", ":", "r", "[", "'error'", "]", "=", "str", "(", "e", ")", "return", "r" ]
add tasks .
train
true
40,615
def build_UNIX_integration_tests(mixin_class, name, fixture): class RealTests(mixin_class, AsyncTestCase, ): '\n Tests that endpoints are available over the network interfaces that\n real API users will be connecting from.\n ' def setUp(self): path = os.path.relpath(self.mktemp()) self.app = fixture(self) self.port = reactor.listenUNIX(path, Site(self.app.resource())) self.addCleanup(self.port.stopListening) self.agent = ProxyAgent(UNIXClientEndpoint(reactor, path), reactor) super(RealTests, self).setUp() RealTests.__name__ += name RealTests.__module__ = mixin_class.__module__ return RealTests
[ "def", "build_UNIX_integration_tests", "(", "mixin_class", ",", "name", ",", "fixture", ")", ":", "class", "RealTests", "(", "mixin_class", ",", "AsyncTestCase", ",", ")", ":", "def", "setUp", "(", "self", ")", ":", "path", "=", "os", ".", "path", ".", "relpath", "(", "self", ".", "mktemp", "(", ")", ")", "self", ".", "app", "=", "fixture", "(", "self", ")", "self", ".", "port", "=", "reactor", ".", "listenUNIX", "(", "path", ",", "Site", "(", "self", ".", "app", ".", "resource", "(", ")", ")", ")", "self", ".", "addCleanup", "(", "self", ".", "port", ".", "stopListening", ")", "self", ".", "agent", "=", "ProxyAgent", "(", "UNIXClientEndpoint", "(", "reactor", ",", "path", ")", ",", "reactor", ")", "super", "(", "RealTests", ",", "self", ")", ".", "setUp", "(", ")", "RealTests", ".", "__name__", "+=", "name", "RealTests", ".", "__module__", "=", "mixin_class", ".", "__module__", "return", "RealTests" ]
build asynctestcase class that runs the tests in the mixin class with real queries over a unix socket .
train
false
40,616
def draw_rimmed_box(screen, box_rect, box_color, rim_width=0, rim_color=Color('black')): if rim_width: rim_rect = Rect((box_rect.left - rim_width), (box_rect.top - rim_width), (box_rect.width + (rim_width * 2)), (box_rect.height + (rim_width * 2))) pygame.draw.rect(screen, rim_color, rim_rect) pygame.draw.rect(screen, box_color, box_rect)
[ "def", "draw_rimmed_box", "(", "screen", ",", "box_rect", ",", "box_color", ",", "rim_width", "=", "0", ",", "rim_color", "=", "Color", "(", "'black'", ")", ")", ":", "if", "rim_width", ":", "rim_rect", "=", "Rect", "(", "(", "box_rect", ".", "left", "-", "rim_width", ")", ",", "(", "box_rect", ".", "top", "-", "rim_width", ")", ",", "(", "box_rect", ".", "width", "+", "(", "rim_width", "*", "2", ")", ")", ",", "(", "box_rect", ".", "height", "+", "(", "rim_width", "*", "2", ")", ")", ")", "pygame", ".", "draw", ".", "rect", "(", "screen", ",", "rim_color", ",", "rim_rect", ")", "pygame", ".", "draw", ".", "rect", "(", "screen", ",", "box_color", ",", "box_rect", ")" ]
draw a rimmed box on the given surface .
train
false
40,617
def _sid_subdir_path(sid): padded_sid = format(sid, '06') return os.path.join(padded_sid[0:2], padded_sid[2:4], '{0}.bcolz'.format(str(padded_sid)))
[ "def", "_sid_subdir_path", "(", "sid", ")", ":", "padded_sid", "=", "format", "(", "sid", ",", "'06'", ")", "return", "os", ".", "path", ".", "join", "(", "padded_sid", "[", "0", ":", "2", "]", ",", "padded_sid", "[", "2", ":", "4", "]", ",", "'{0}.bcolz'", ".", "format", "(", "str", "(", "padded_sid", ")", ")", ")" ]
format subdir path to limit the number directories in any given subdirectory to 100 .
train
true
40,620
def ownership(registry, xml_parent, data): ownership_plugin = XML.SubElement(xml_parent, 'com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty') ownership = XML.SubElement(ownership_plugin, 'ownership') owner = str(data.get('enabled', True)).lower() XML.SubElement(ownership, 'ownershipEnabled').text = owner XML.SubElement(ownership, 'primaryOwnerId').text = data.get('owner') coownersIds = XML.SubElement(ownership, 'coownersIds') for coowner in data.get('co-owners', []): XML.SubElement(coownersIds, 'string').text = coowner
[ "def", "ownership", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "ownership_plugin", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.synopsys.arc.jenkins.plugins.ownership.jobs.JobOwnerJobProperty'", ")", "ownership", "=", "XML", ".", "SubElement", "(", "ownership_plugin", ",", "'ownership'", ")", "owner", "=", "str", "(", "data", ".", "get", "(", "'enabled'", ",", "True", ")", ")", ".", "lower", "(", ")", "XML", ".", "SubElement", "(", "ownership", ",", "'ownershipEnabled'", ")", ".", "text", "=", "owner", "XML", ".", "SubElement", "(", "ownership", ",", "'primaryOwnerId'", ")", ".", "text", "=", "data", ".", "get", "(", "'owner'", ")", "coownersIds", "=", "XML", ".", "SubElement", "(", "ownership", ",", "'coownersIds'", ")", "for", "coowner", "in", "data", ".", "get", "(", "'co-owners'", ",", "[", "]", ")", ":", "XML", ".", "SubElement", "(", "coownersIds", ",", "'string'", ")", ".", "text", "=", "coowner" ]
yaml: ownership plugin provides explicit ownership for jobs and slave nodes .
train
false
40,621
def dpll_satisfiable(expr, all_models=False): clauses = conjuncts(to_cnf(expr)) if (False in clauses): if all_models: return (f for f in [False]) return False symbols = sorted(_find_predicates(expr), key=default_sort_key) symbols_int_repr = range(1, (len(symbols) + 1)) clauses_int_repr = to_int_repr(clauses, symbols) solver = SATSolver(clauses_int_repr, symbols_int_repr, set(), symbols) models = solver._find_model() if all_models: return _all_models(models) try: return next(models) except StopIteration: return False
[ "def", "dpll_satisfiable", "(", "expr", ",", "all_models", "=", "False", ")", ":", "clauses", "=", "conjuncts", "(", "to_cnf", "(", "expr", ")", ")", "if", "(", "False", "in", "clauses", ")", ":", "if", "all_models", ":", "return", "(", "f", "for", "f", "in", "[", "False", "]", ")", "return", "False", "symbols", "=", "sorted", "(", "_find_predicates", "(", "expr", ")", ",", "key", "=", "default_sort_key", ")", "symbols_int_repr", "=", "range", "(", "1", ",", "(", "len", "(", "symbols", ")", "+", "1", ")", ")", "clauses_int_repr", "=", "to_int_repr", "(", "clauses", ",", "symbols", ")", "solver", "=", "SATSolver", "(", "clauses_int_repr", ",", "symbols_int_repr", ",", "set", "(", ")", ",", "symbols", ")", "models", "=", "solver", ".", "_find_model", "(", ")", "if", "all_models", ":", "return", "_all_models", "(", "models", ")", "try", ":", "return", "next", "(", "models", ")", "except", "StopIteration", ":", "return", "False" ]
check satisfiability of a propositional sentence .
train
false
40,622
def eggs(x, y): global fr, st fr = inspect.currentframe() st = inspect.stack() p = x q = (y / 0)
[ "def", "eggs", "(", "x", ",", "y", ")", ":", "global", "fr", ",", "st", "fr", "=", "inspect", ".", "currentframe", "(", ")", "st", "=", "inspect", ".", "stack", "(", ")", "p", "=", "x", "q", "=", "(", "y", "/", "0", ")" ]
a docstring .
train
false
40,625
def register_ui(review_ui): if (not issubclass(review_ui, FileAttachmentReviewUI)): raise TypeError(u'Only FileAttachmentReviewUI subclasses can be registered') _file_attachment_review_uis.append(review_ui)
[ "def", "register_ui", "(", "review_ui", ")", ":", "if", "(", "not", "issubclass", "(", "review_ui", ",", "FileAttachmentReviewUI", ")", ")", ":", "raise", "TypeError", "(", "u'Only FileAttachmentReviewUI subclasses can be registered'", ")", "_file_attachment_review_uis", ".", "append", "(", "review_ui", ")" ]
registers a review ui class .
train
false
40,626
def ConfigureRemoteApi(app_id, path, auth_func, servername=None, rpc_server_factory=appengine_rpc.HttpRpcServer, rtok=None, secure=False, services=None, default_auth_domain=None, save_cookies=False, use_remote_datastore=True): if ((not servername) and (not app_id)): raise ConfigurationError('app_id or servername required') if (not servername): servername = ('%s.appspot.com' % (app_id,)) server = rpc_server_factory(servername, auth_func, GetUserAgent(), GetSourceName(), save_cookies=save_cookies, debug_data=False, secure=secure) if (not app_id): app_id = GetRemoteAppIdFromServer(server, path, rtok) ConfigureRemoteApiFromServer(server, path, app_id, services, default_auth_domain, use_remote_datastore) return server
[ "def", "ConfigureRemoteApi", "(", "app_id", ",", "path", ",", "auth_func", ",", "servername", "=", "None", ",", "rpc_server_factory", "=", "appengine_rpc", ".", "HttpRpcServer", ",", "rtok", "=", "None", ",", "secure", "=", "False", ",", "services", "=", "None", ",", "default_auth_domain", "=", "None", ",", "save_cookies", "=", "False", ",", "use_remote_datastore", "=", "True", ")", ":", "if", "(", "(", "not", "servername", ")", "and", "(", "not", "app_id", ")", ")", ":", "raise", "ConfigurationError", "(", "'app_id or servername required'", ")", "if", "(", "not", "servername", ")", ":", "servername", "=", "(", "'%s.appspot.com'", "%", "(", "app_id", ",", ")", ")", "server", "=", "rpc_server_factory", "(", "servername", ",", "auth_func", ",", "GetUserAgent", "(", ")", ",", "GetSourceName", "(", ")", ",", "save_cookies", "=", "save_cookies", ",", "debug_data", "=", "False", ",", "secure", "=", "secure", ")", "if", "(", "not", "app_id", ")", ":", "app_id", "=", "GetRemoteAppIdFromServer", "(", "server", ",", "path", ",", "rtok", ")", "ConfigureRemoteApiFromServer", "(", "server", ",", "path", ",", "app_id", ",", "services", ",", "default_auth_domain", ",", "use_remote_datastore", ")", "return", "server" ]
does necessary setup to allow easy remote access to app engine apis .
train
false
40,627
def test_data_name_third_party_package(): data_dir = os.path.join(os.path.dirname(__file__), u'data') sys.path.insert(0, data_dir) try: import test_package filename = test_package.get_data_filename() assert (filename == os.path.join(data_dir, u'test_package', u'data', u'foo.txt')) finally: sys.path.pop(0)
[ "def", "test_data_name_third_party_package", "(", ")", ":", "data_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "u'data'", ")", "sys", ".", "path", ".", "insert", "(", "0", ",", "data_dir", ")", "try", ":", "import", "test_package", "filename", "=", "test_package", ".", "get_data_filename", "(", ")", "assert", "(", "filename", "==", "os", ".", "path", ".", "join", "(", "data_dir", ",", "u'test_package'", ",", "u'data'", ",", "u'foo.txt'", ")", ")", "finally", ":", "sys", ".", "path", ".", "pop", "(", "0", ")" ]
regression test for issue #1256 tests that get_pkg_data_filename works in a third-party package that doesnt make any relative imports from the module its used from .
train
false
40,628
def test_sample_wrong_X(): ada = ADASYN(random_state=RND_SEED) ada.fit(X, Y) assert_raises(RuntimeError, ada.sample, np.random.random((100, 40)), np.array((([0] * 50) + ([1] * 50))))
[ "def", "test_sample_wrong_X", "(", ")", ":", "ada", "=", "ADASYN", "(", "random_state", "=", "RND_SEED", ")", "ada", ".", "fit", "(", "X", ",", "Y", ")", "assert_raises", "(", "RuntimeError", ",", "ada", ".", "sample", ",", "np", ".", "random", ".", "random", "(", "(", "100", ",", "40", ")", ")", ",", "np", ".", "array", "(", "(", "(", "[", "0", "]", "*", "50", ")", "+", "(", "[", "1", "]", "*", "50", ")", ")", ")", ")" ]
test either if an error is raised when x is different at fitting and sampling .
train
false
40,629
def patch_os(): patch_module('os')
[ "def", "patch_os", "(", ")", ":", "patch_module", "(", "'os'", ")" ]
replace :func:os .
train
false
40,630
def _simple_blockify(tuples, dtype): (values, placement) = _stack_arrays(tuples, dtype) if ((dtype is not None) and (values.dtype != dtype)): values = values.astype(dtype) block = make_block(values, placement=placement) return [block]
[ "def", "_simple_blockify", "(", "tuples", ",", "dtype", ")", ":", "(", "values", ",", "placement", ")", "=", "_stack_arrays", "(", "tuples", ",", "dtype", ")", "if", "(", "(", "dtype", "is", "not", "None", ")", "and", "(", "values", ".", "dtype", "!=", "dtype", ")", ")", ":", "values", "=", "values", ".", "astype", "(", "dtype", ")", "block", "=", "make_block", "(", "values", ",", "placement", "=", "placement", ")", "return", "[", "block", "]" ]
return a single array of a block that has a single dtype; if dtype is not none .
train
true
40,631
def is_valid_route_dist(route_dist): return is_valid_ext_comm_attr(route_dist)
[ "def", "is_valid_route_dist", "(", "route_dist", ")", ":", "return", "is_valid_ext_comm_attr", "(", "route_dist", ")" ]
validates *route_dist* as string representation of route distinguisher .
train
false
40,632
def command_copytree(args): for srcdir in args.srcdirs: basename = os.path.basename(srcdir) destdir2 = os.path.normpath(os.path.join(args.destdir, basename)) if os.path.exists(destdir2): shutil.rmtree(destdir2) sys.stdout.write(('copytree: %s => %s\n' % (srcdir, destdir2))) shutil.copytree(srcdir, destdir2) return 0
[ "def", "command_copytree", "(", "args", ")", ":", "for", "srcdir", "in", "args", ".", "srcdirs", ":", "basename", "=", "os", ".", "path", ".", "basename", "(", "srcdir", ")", "destdir2", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "args", ".", "destdir", ",", "basename", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "destdir2", ")", ":", "shutil", ".", "rmtree", "(", "destdir2", ")", "sys", ".", "stdout", ".", "write", "(", "(", "'copytree: %s => %s\\n'", "%", "(", "srcdir", ",", "destdir2", ")", ")", ")", "shutil", ".", "copytree", "(", "srcdir", ",", "destdir2", ")", "return", "0" ]
copy one or more source directory(s) below a destination directory .
train
true
40,634
def refresh(): if _TRAFFICCTL: cmd = _traffic_ctl('config', 'reload') else: cmd = _traffic_line('-x') log.debug('Running: %s', cmd) return _subprocess(cmd)
[ "def", "refresh", "(", ")", ":", "if", "_TRAFFICCTL", ":", "cmd", "=", "_traffic_ctl", "(", "'config'", ",", "'reload'", ")", "else", ":", "cmd", "=", "_traffic_line", "(", "'-x'", ")", "log", ".", "debug", "(", "'Running: %s'", ",", "cmd", ")", "return", "_subprocess", "(", "cmd", ")" ]
this waits for the timeout on each host .
train
false
40,635
def _get_pip_bin(bin_env): if (not bin_env): which_result = __salt__['cmd.which_bin'](['pip', 'pip2', 'pip3', 'pip-python']) if (which_result is None): raise CommandNotFoundError('Could not find a `pip` binary') if salt.utils.is_windows(): return which_result.encode('string-escape') return which_result if os.path.isdir(bin_env): if salt.utils.is_windows(): pip_bin = os.path.join(bin_env, 'Scripts', 'pip.exe').encode('string-escape') else: pip_bin = os.path.join(bin_env, 'bin', 'pip') if os.path.isfile(pip_bin): return pip_bin msg = 'Could not find a `pip` binary in virtualenv {0}'.format(bin_env) raise CommandNotFoundError(msg) elif os.access(bin_env, os.X_OK): if (os.path.isfile(bin_env) or os.path.islink(bin_env)): return bin_env else: raise CommandNotFoundError('Could not find a `pip` binary')
[ "def", "_get_pip_bin", "(", "bin_env", ")", ":", "if", "(", "not", "bin_env", ")", ":", "which_result", "=", "__salt__", "[", "'cmd.which_bin'", "]", "(", "[", "'pip'", ",", "'pip2'", ",", "'pip3'", ",", "'pip-python'", "]", ")", "if", "(", "which_result", "is", "None", ")", ":", "raise", "CommandNotFoundError", "(", "'Could not find a `pip` binary'", ")", "if", "salt", ".", "utils", ".", "is_windows", "(", ")", ":", "return", "which_result", ".", "encode", "(", "'string-escape'", ")", "return", "which_result", "if", "os", ".", "path", ".", "isdir", "(", "bin_env", ")", ":", "if", "salt", ".", "utils", ".", "is_windows", "(", ")", ":", "pip_bin", "=", "os", ".", "path", ".", "join", "(", "bin_env", ",", "'Scripts'", ",", "'pip.exe'", ")", ".", "encode", "(", "'string-escape'", ")", "else", ":", "pip_bin", "=", "os", ".", "path", ".", "join", "(", "bin_env", ",", "'bin'", ",", "'pip'", ")", "if", "os", ".", "path", ".", "isfile", "(", "pip_bin", ")", ":", "return", "pip_bin", "msg", "=", "'Could not find a `pip` binary in virtualenv {0}'", ".", "format", "(", "bin_env", ")", "raise", "CommandNotFoundError", "(", "msg", ")", "elif", "os", ".", "access", "(", "bin_env", ",", "os", ".", "X_OK", ")", ":", "if", "(", "os", ".", "path", ".", "isfile", "(", "bin_env", ")", "or", "os", ".", "path", ".", "islink", "(", "bin_env", ")", ")", ":", "return", "bin_env", "else", ":", "raise", "CommandNotFoundError", "(", "'Could not find a `pip` binary'", ")" ]
locate the pip binary .
train
false
40,636
def load_certificate(type, buffer): if isinstance(buffer, _text_type): buffer = buffer.encode('ascii') bio = _new_mem_buf(buffer) if (type == FILETYPE_PEM): x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) elif (type == FILETYPE_ASN1): x509 = _lib.d2i_X509_bio(bio, _ffi.NULL) else: raise ValueError('type argument must be FILETYPE_PEM or FILETYPE_ASN1') if (x509 == _ffi.NULL): _raise_current_error() cert = X509.__new__(X509) cert._x509 = _ffi.gc(x509, _lib.X509_free) return cert
[ "def", "load_certificate", "(", "type", ",", "buffer", ")", ":", "if", "isinstance", "(", "buffer", ",", "_text_type", ")", ":", "buffer", "=", "buffer", ".", "encode", "(", "'ascii'", ")", "bio", "=", "_new_mem_buf", "(", "buffer", ")", "if", "(", "type", "==", "FILETYPE_PEM", ")", ":", "x509", "=", "_lib", ".", "PEM_read_bio_X509", "(", "bio", ",", "_ffi", ".", "NULL", ",", "_ffi", ".", "NULL", ",", "_ffi", ".", "NULL", ")", "elif", "(", "type", "==", "FILETYPE_ASN1", ")", ":", "x509", "=", "_lib", ".", "d2i_X509_bio", "(", "bio", ",", "_ffi", ".", "NULL", ")", "else", ":", "raise", "ValueError", "(", "'type argument must be FILETYPE_PEM or FILETYPE_ASN1'", ")", "if", "(", "x509", "==", "_ffi", ".", "NULL", ")", ":", "_raise_current_error", "(", ")", "cert", "=", "X509", ".", "__new__", "(", "X509", ")", "cert", ".", "_x509", "=", "_ffi", ".", "gc", "(", "x509", ",", "_lib", ".", "X509_free", ")", "return", "cert" ]
load public and secret key from a zmq certificate .
train
false
40,637
def getArchivableObjectAddToParent(archivableClass, xmlElement): archivableObject = archivableClass() archivableObject.xmlElement = xmlElement xmlElement.object = archivableObject archivableObject.setToObjectAttributeDictionary() xmlElement.parent.object.archivableObjects.append(archivableObject) return archivableObject
[ "def", "getArchivableObjectAddToParent", "(", "archivableClass", ",", "xmlElement", ")", ":", "archivableObject", "=", "archivableClass", "(", ")", "archivableObject", ".", "xmlElement", "=", "xmlElement", "xmlElement", ".", "object", "=", "archivableObject", "archivableObject", ".", "setToObjectAttributeDictionary", "(", ")", "xmlElement", ".", "parent", ".", "object", ".", "archivableObjects", ".", "append", "(", "archivableObject", ")", "return", "archivableObject" ]
get the archivable object and add it to the parent object .
train
false
40,639
def sgml_extract(text_data): return {'docid': _get_one(docid_re, text_data, required=True), 'doctype': _get_one(doctype_re, text_data, required=True), 'datetime': _get_one(datetime_re, text_data, required=True), 'headline': _get_one(headline_re, text_data, required=True), 'poster': _get_one(poster_re, _get_one(post_re, text_data)), 'postdate': _get_one(postdate_re, _get_one(post_re, text_data)), 'text': _get_text(_get_one(post_re, text_data)).strip()}
[ "def", "sgml_extract", "(", "text_data", ")", ":", "return", "{", "'docid'", ":", "_get_one", "(", "docid_re", ",", "text_data", ",", "required", "=", "True", ")", ",", "'doctype'", ":", "_get_one", "(", "doctype_re", ",", "text_data", ",", "required", "=", "True", ")", ",", "'datetime'", ":", "_get_one", "(", "datetime_re", ",", "text_data", ",", "required", "=", "True", ")", ",", "'headline'", ":", "_get_one", "(", "headline_re", ",", "text_data", ",", "required", "=", "True", ")", ",", "'poster'", ":", "_get_one", "(", "poster_re", ",", "_get_one", "(", "post_re", ",", "text_data", ")", ")", ",", "'postdate'", ":", "_get_one", "(", "postdate_re", ",", "_get_one", "(", "post_re", ",", "text_data", ")", ")", ",", "'text'", ":", "_get_text", "(", "_get_one", "(", "post_re", ",", "text_data", ")", ")", ".", "strip", "(", ")", "}" ]
extract text from the ontonotes web documents .
train
false
40,642
def dictionary_merge(a, b): for (key, value) in b.items(): if ((key in a) and isinstance(a[key], dict) and isinstance(value, dict)): dictionary_merge(a[key], b[key]) continue a[key] = b[key] return a
[ "def", "dictionary_merge", "(", "a", ",", "b", ")", ":", "for", "(", "key", ",", "value", ")", "in", "b", ".", "items", "(", ")", ":", "if", "(", "(", "key", "in", "a", ")", "and", "isinstance", "(", "a", "[", "key", "]", ",", "dict", ")", "and", "isinstance", "(", "value", ",", "dict", ")", ")", ":", "dictionary_merge", "(", "a", "[", "key", "]", ",", "b", "[", "key", "]", ")", "continue", "a", "[", "key", "]", "=", "b", "[", "key", "]", "return", "a" ]
merges dictionary b into a like dict .
train
true
40,643
def bulk_update_private(context, data_dict): _check_access('bulk_update_private', context, data_dict) _bulk_update_dataset(context, data_dict, {'private': True})
[ "def", "bulk_update_private", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'bulk_update_private'", ",", "context", ",", "data_dict", ")", "_bulk_update_dataset", "(", "context", ",", "data_dict", ",", "{", "'private'", ":", "True", "}", ")" ]
make a list of datasets private .
train
false
40,644
def update_preferred_site_language_code(user_id, preferred_site_language_code): user_settings = get_user_settings(user_id, strict=True) user_settings.preferred_site_language_code = preferred_site_language_code _save_user_settings(user_settings)
[ "def", "update_preferred_site_language_code", "(", "user_id", ",", "preferred_site_language_code", ")", ":", "user_settings", "=", "get_user_settings", "(", "user_id", ",", "strict", "=", "True", ")", "user_settings", ".", "preferred_site_language_code", "=", "preferred_site_language_code", "_save_user_settings", "(", "user_settings", ")" ]
updates preferred_site_language_code of user with given user_id .
train
false
40,645
def _get_md5(name, path, run_func): output = run_func(name, 'md5sum {0}'.format(pipes.quote(path)), ignore_retcode=True)['stdout'] try: return output.split()[0] except IndexError: return None
[ "def", "_get_md5", "(", "name", ",", "path", ",", "run_func", ")", ":", "output", "=", "run_func", "(", "name", ",", "'md5sum {0}'", ".", "format", "(", "pipes", ".", "quote", "(", "path", ")", ")", ",", "ignore_retcode", "=", "True", ")", "[", "'stdout'", "]", "try", ":", "return", "output", ".", "split", "(", ")", "[", "0", "]", "except", "IndexError", ":", "return", "None" ]
get the md5 checksum of a file from a container .
train
true
40,647
def find_dependent_tables(tables, graph=None): if (graph is None): graph = _pokedex_graph tables = list(tables) dependents = set() def add_dependents_of(table): for dependent_table in graph.get(table, []): if (dependent_table not in dependents): dependents.add(dependent_table) add_dependents_of(dependent_table) for table in tables: add_dependents_of(table) dependents -= set(tables) return dependents
[ "def", "find_dependent_tables", "(", "tables", ",", "graph", "=", "None", ")", ":", "if", "(", "graph", "is", "None", ")", ":", "graph", "=", "_pokedex_graph", "tables", "=", "list", "(", "tables", ")", "dependents", "=", "set", "(", ")", "def", "add_dependents_of", "(", "table", ")", ":", "for", "dependent_table", "in", "graph", ".", "get", "(", "table", ",", "[", "]", ")", ":", "if", "(", "dependent_table", "not", "in", "dependents", ")", ":", "dependents", ".", "add", "(", "dependent_table", ")", "add_dependents_of", "(", "dependent_table", ")", "for", "table", "in", "tables", ":", "add_dependents_of", "(", "table", ")", "dependents", "-=", "set", "(", "tables", ")", "return", "dependents" ]
recursively find all tables which depend on the given tables .
train
false
40,648
def _read_exactly(sock, amt): data = '' while (amt > 0): chunk = sock.recv(amt) data += chunk amt -= len(chunk) return data
[ "def", "_read_exactly", "(", "sock", ",", "amt", ")", ":", "data", "=", "''", "while", "(", "amt", ">", "0", ")", ":", "chunk", "=", "sock", ".", "recv", "(", "amt", ")", "data", "+=", "chunk", "amt", "-=", "len", "(", "chunk", ")", "return", "data" ]
read *exactly* amt bytes from the socket sock .
train
false
40,649
@no_auto_transaction def meeting_hook(): message = ConferenceMessage() try: message.verify() except ConferenceError as error: logger.error(error) raise HTTPError(httplib.NOT_ACCEPTABLE) try: conference = Conference.get_by_endpoint(message.conference_name, active=False) except ConferenceError as error: logger.error(error) raise HTTPError(httplib.NOT_ACCEPTABLE) if (not conference.active): send_mail(message.sender_email, CONFERENCE_INACTIVE, fullname=message.sender_display, presentations_url=web_url_for('conference_view', _absolute=True)) raise HTTPError(httplib.NOT_ACCEPTABLE) add_poster_by_email(conference=conference, message=message)
[ "@", "no_auto_transaction", "def", "meeting_hook", "(", ")", ":", "message", "=", "ConferenceMessage", "(", ")", "try", ":", "message", ".", "verify", "(", ")", "except", "ConferenceError", "as", "error", ":", "logger", ".", "error", "(", "error", ")", "raise", "HTTPError", "(", "httplib", ".", "NOT_ACCEPTABLE", ")", "try", ":", "conference", "=", "Conference", ".", "get_by_endpoint", "(", "message", ".", "conference_name", ",", "active", "=", "False", ")", "except", "ConferenceError", "as", "error", ":", "logger", ".", "error", "(", "error", ")", "raise", "HTTPError", "(", "httplib", ".", "NOT_ACCEPTABLE", ")", "if", "(", "not", "conference", ".", "active", ")", ":", "send_mail", "(", "message", ".", "sender_email", ",", "CONFERENCE_INACTIVE", ",", "fullname", "=", "message", ".", "sender_display", ",", "presentations_url", "=", "web_url_for", "(", "'conference_view'", ",", "_absolute", "=", "True", ")", ")", "raise", "HTTPError", "(", "httplib", ".", "NOT_ACCEPTABLE", ")", "add_poster_by_email", "(", "conference", "=", "conference", ",", "message", "=", "message", ")" ]
view function for email conference submission .
train
false
40,650
def is_device(device_name, allow_virtual): device_name = re.sub('/', '!', device_name) if allow_virtual: devicename = (('/sys/block/' + device_name) + '/device') else: devicename = ('/sys/block/' + device_name) return os.access(devicename, os.F_OK)
[ "def", "is_device", "(", "device_name", ",", "allow_virtual", ")", ":", "device_name", "=", "re", ".", "sub", "(", "'/'", ",", "'!'", ",", "device_name", ")", "if", "allow_virtual", ":", "devicename", "=", "(", "(", "'/sys/block/'", "+", "device_name", ")", "+", "'/device'", ")", "else", ":", "devicename", "=", "(", "'/sys/block/'", "+", "device_name", ")", "return", "os", ".", "access", "(", "devicename", ",", "os", ".", "F_OK", ")" ]
test whether given name is a device or a partition .
train
false
40,651
def _incremental_mean_and_var(X, last_mean=0.0, last_variance=None, last_sample_count=0): last_sum = (last_mean * last_sample_count) new_sum = X.sum(axis=0) new_sample_count = X.shape[0] updated_sample_count = (last_sample_count + new_sample_count) updated_mean = ((last_sum + new_sum) / updated_sample_count) if (last_variance is None): updated_variance = None else: new_unnormalized_variance = (X.var(axis=0) * new_sample_count) if (last_sample_count == 0): updated_unnormalized_variance = new_unnormalized_variance else: last_over_new_count = (last_sample_count / new_sample_count) last_unnormalized_variance = (last_variance * last_sample_count) updated_unnormalized_variance = ((last_unnormalized_variance + new_unnormalized_variance) + ((last_over_new_count / updated_sample_count) * (((last_sum / last_over_new_count) - new_sum) ** 2))) updated_variance = (updated_unnormalized_variance / updated_sample_count) return (updated_mean, updated_variance, updated_sample_count)
[ "def", "_incremental_mean_and_var", "(", "X", ",", "last_mean", "=", "0.0", ",", "last_variance", "=", "None", ",", "last_sample_count", "=", "0", ")", ":", "last_sum", "=", "(", "last_mean", "*", "last_sample_count", ")", "new_sum", "=", "X", ".", "sum", "(", "axis", "=", "0", ")", "new_sample_count", "=", "X", ".", "shape", "[", "0", "]", "updated_sample_count", "=", "(", "last_sample_count", "+", "new_sample_count", ")", "updated_mean", "=", "(", "(", "last_sum", "+", "new_sum", ")", "/", "updated_sample_count", ")", "if", "(", "last_variance", "is", "None", ")", ":", "updated_variance", "=", "None", "else", ":", "new_unnormalized_variance", "=", "(", "X", ".", "var", "(", "axis", "=", "0", ")", "*", "new_sample_count", ")", "if", "(", "last_sample_count", "==", "0", ")", ":", "updated_unnormalized_variance", "=", "new_unnormalized_variance", "else", ":", "last_over_new_count", "=", "(", "last_sample_count", "/", "new_sample_count", ")", "last_unnormalized_variance", "=", "(", "last_variance", "*", "last_sample_count", ")", "updated_unnormalized_variance", "=", "(", "(", "last_unnormalized_variance", "+", "new_unnormalized_variance", ")", "+", "(", "(", "last_over_new_count", "/", "updated_sample_count", ")", "*", "(", "(", "(", "last_sum", "/", "last_over_new_count", ")", "-", "new_sum", ")", "**", "2", ")", ")", ")", "updated_variance", "=", "(", "updated_unnormalized_variance", "/", "updated_sample_count", ")", "return", "(", "updated_mean", ",", "updated_variance", ",", "updated_sample_count", ")" ]
calculate mean update and a youngs and cramer variance update .
train
false
40,652
def hdfs_uri_to_real_path(hdfs_uri, environ): components = urlparse(hdfs_uri) scheme = components.scheme path = components.path if ((not scheme) and (not path.startswith('/'))): path = ('/user/%s/%s' % (environ['USER'], path)) return os.path.join(get_mock_hdfs_root(environ=environ), path.lstrip('/'))
[ "def", "hdfs_uri_to_real_path", "(", "hdfs_uri", ",", "environ", ")", ":", "components", "=", "urlparse", "(", "hdfs_uri", ")", "scheme", "=", "components", ".", "scheme", "path", "=", "components", ".", "path", "if", "(", "(", "not", "scheme", ")", "and", "(", "not", "path", ".", "startswith", "(", "'/'", ")", ")", ")", ":", "path", "=", "(", "'/user/%s/%s'", "%", "(", "environ", "[", "'USER'", "]", ",", "path", ")", ")", "return", "os", ".", "path", ".", "join", "(", "get_mock_hdfs_root", "(", "environ", "=", "environ", ")", ",", "path", ".", "lstrip", "(", "'/'", ")", ")" ]
map an hdfs uri to a path on the filesystem .
train
false
40,653
def check_callbacks(bot, trigger, url, run=True): matched = any((regex.search(url) for regex in bot.memory[u'url_exclude'])) for (regex, function) in tools.iteritems(bot.memory[u'url_callbacks']): match = regex.search(url) if match: if (run or hasattr(function, u'url_regex')): function(bot, trigger, match) matched = True return matched
[ "def", "check_callbacks", "(", "bot", ",", "trigger", ",", "url", ",", "run", "=", "True", ")", ":", "matched", "=", "any", "(", "(", "regex", ".", "search", "(", "url", ")", "for", "regex", "in", "bot", ".", "memory", "[", "u'url_exclude'", "]", ")", ")", "for", "(", "regex", ",", "function", ")", "in", "tools", ".", "iteritems", "(", "bot", ".", "memory", "[", "u'url_callbacks'", "]", ")", ":", "match", "=", "regex", ".", "search", "(", "url", ")", "if", "match", ":", "if", "(", "run", "or", "hasattr", "(", "function", ",", "u'url_regex'", ")", ")", ":", "function", "(", "bot", ",", "trigger", ",", "match", ")", "matched", "=", "True", "return", "matched" ]
check the given url against the callbacks list .
train
false
40,654
@pytest.fixture(scope='session') def user_dir(tmpdir_factory): return tmpdir_factory.mktemp('user_dir')
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'session'", ")", "def", "user_dir", "(", "tmpdir_factory", ")", ":", "return", "tmpdir_factory", ".", "mktemp", "(", "'user_dir'", ")" ]
fixture that simulates the users home directory .
train
false
40,656
def get_selected_inferior(): return gdb.inferiors()[0] selected_thread = gdb.selected_thread() for inferior in gdb.inferiors(): for thread in inferior.threads(): if (thread == selected_thread): return inferior
[ "def", "get_selected_inferior", "(", ")", ":", "return", "gdb", ".", "inferiors", "(", ")", "[", "0", "]", "selected_thread", "=", "gdb", ".", "selected_thread", "(", ")", "for", "inferior", "in", "gdb", ".", "inferiors", "(", ")", ":", "for", "thread", "in", "inferior", ".", "threads", "(", ")", ":", "if", "(", "thread", "==", "selected_thread", ")", ":", "return", "inferior" ]
return the selected inferior in gdb .
train
false
40,659
def save_hdf5(filename, obj, compression=4): _check_available() with h5py.File(filename, 'w') as f: s = HDF5Serializer(f, compression=compression) s.save(obj)
[ "def", "save_hdf5", "(", "filename", ",", "obj", ",", "compression", "=", "4", ")", ":", "_check_available", "(", ")", "with", "h5py", ".", "File", "(", "filename", ",", "'w'", ")", "as", "f", ":", "s", "=", "HDF5Serializer", "(", "f", ",", "compression", "=", "compression", ")", "s", ".", "save", "(", "obj", ")" ]
saves an object to the file in hdf5 format .
train
false
40,660
def connectToRootNS(network, switch, ip, routes): root = Node('root', inNamespace=False) intf = network.addLink(root, switch).intf1 root.setIP(ip, intf=intf) network.start() for route in routes: root.cmd(((('route add -net ' + route) + ' dev ') + str(intf)))
[ "def", "connectToRootNS", "(", "network", ",", "switch", ",", "ip", ",", "routes", ")", ":", "root", "=", "Node", "(", "'root'", ",", "inNamespace", "=", "False", ")", "intf", "=", "network", ".", "addLink", "(", "root", ",", "switch", ")", ".", "intf1", "root", ".", "setIP", "(", "ip", ",", "intf", "=", "intf", ")", "network", ".", "start", "(", ")", "for", "route", "in", "routes", ":", "root", ".", "cmd", "(", "(", "(", "(", "'route add -net '", "+", "route", ")", "+", "' dev '", ")", "+", "str", "(", "intf", ")", ")", ")" ]
connect hosts to root namespace via switch .
train
false
40,661
def getOverhangSupportAngle(xmlElement): return math.radians(xmlElement.getCascadeFloat(45.0, 'overhangSupportAngle'))
[ "def", "getOverhangSupportAngle", "(", "xmlElement", ")", ":", "return", "math", ".", "radians", "(", "xmlElement", ".", "getCascadeFloat", "(", "45.0", ",", "'overhangSupportAngle'", ")", ")" ]
get the overhang support angle in radians .
train
false
40,663
def test_xdawn_apply_transform(): (raw, events, picks) = _get_data() raw.pick_types(eeg=True, meg=False) epochs = Epochs(raw, events, event_id, tmin, tmax, proj=False, preload=True, baseline=None, verbose=False) n_components = 2 xd = Xdawn(n_components=n_components, correct_overlap=False) xd.fit(epochs) for inst in [raw, epochs.average(), epochs]: denoise = xd.apply(inst) assert_raises(ValueError, xd.apply, 42) xd.transform(epochs) xd.transform(epochs._data) assert_raises(ValueError, xd.transform, 42) np.random.seed(0) idx = np.arange(len(epochs)) np.random.shuffle(idx) xd.fit(epochs[idx]) denoise_shfl = xd.apply(epochs) assert_array_almost_equal(denoise['cond2']._data, denoise_shfl['cond2']._data)
[ "def", "test_xdawn_apply_transform", "(", ")", ":", "(", "raw", ",", "events", ",", "picks", ")", "=", "_get_data", "(", ")", "raw", ".", "pick_types", "(", "eeg", "=", "True", ",", "meg", "=", "False", ")", "epochs", "=", "Epochs", "(", "raw", ",", "events", ",", "event_id", ",", "tmin", ",", "tmax", ",", "proj", "=", "False", ",", "preload", "=", "True", ",", "baseline", "=", "None", ",", "verbose", "=", "False", ")", "n_components", "=", "2", "xd", "=", "Xdawn", "(", "n_components", "=", "n_components", ",", "correct_overlap", "=", "False", ")", "xd", ".", "fit", "(", "epochs", ")", "for", "inst", "in", "[", "raw", ",", "epochs", ".", "average", "(", ")", ",", "epochs", "]", ":", "denoise", "=", "xd", ".", "apply", "(", "inst", ")", "assert_raises", "(", "ValueError", ",", "xd", ".", "apply", ",", "42", ")", "xd", ".", "transform", "(", "epochs", ")", "xd", ".", "transform", "(", "epochs", ".", "_data", ")", "assert_raises", "(", "ValueError", ",", "xd", ".", "transform", ",", "42", ")", "np", ".", "random", ".", "seed", "(", "0", ")", "idx", "=", "np", ".", "arange", "(", "len", "(", "epochs", ")", ")", "np", ".", "random", ".", "shuffle", "(", "idx", ")", "xd", ".", "fit", "(", "epochs", "[", "idx", "]", ")", "denoise_shfl", "=", "xd", ".", "apply", "(", "epochs", ")", "assert_array_almost_equal", "(", "denoise", "[", "'cond2'", "]", ".", "_data", ",", "denoise_shfl", "[", "'cond2'", "]", ".", "_data", ")" ]
test xdawn apply and transform .
train
false
40,664
def hdmedian(data, axis=(-1), var=False): result = hdquantiles(data, [0.5], axis=axis, var=var) return result.squeeze()
[ "def", "hdmedian", "(", "data", ",", "axis", "=", "(", "-", "1", ")", ",", "var", "=", "False", ")", ":", "result", "=", "hdquantiles", "(", "data", ",", "[", "0.5", "]", ",", "axis", "=", "axis", ",", "var", "=", "var", ")", "return", "result", ".", "squeeze", "(", ")" ]
returns the harrell-davis estimate of the median along the given axis .
train
false
40,666
def time_diff(): now = time_utcnow() diff = (now - timedelta(minutes=flaskbb_config['ONLINE_LAST_MINUTES'])) return diff
[ "def", "time_diff", "(", ")", ":", "now", "=", "time_utcnow", "(", ")", "diff", "=", "(", "now", "-", "timedelta", "(", "minutes", "=", "flaskbb_config", "[", "'ONLINE_LAST_MINUTES'", "]", ")", ")", "return", "diff" ]
calculates the time difference between now and the online_last_minutes variable from the configuration .
train
false