id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
30,286
def wait_for_machine_to_recover(host, hours_to_wait=HOURS_TO_WAIT): current_time = time.strftime('%b %d %H:%M:%S', time.localtime()) if host.is_up(): logging.info('%s already up, collecting crash info', host.hostname) return True if (hours_to_wait > 0): logging.info('Waiting %s hours for %s to come up (%s)', hours_to_wait, host.hostname, current_time) if (not host.wait_up(timeout=(hours_to_wait * 3600))): logging.warning('%s down, unable to collect crash info', host.hostname) return False else: logging.info('%s is back up, collecting crash info', host.hostname) return True else: logging.info('Skipping crash info collection') return False
[ "def", "wait_for_machine_to_recover", "(", "host", ",", "hours_to_wait", "=", "HOURS_TO_WAIT", ")", ":", "current_time", "=", "time", ".", "strftime", "(", "'%b %d %H:%M:%S'", ",", "time", ".", "localtime", "(", ")", ")", "if", "host", ".", "is_up", "(", ")"...
wait for a machine to become accessible again .
train
false
30,288
def guard_pause_on_pp(): if cfg.pause_on_post_processing(): pass else: Downloader.do.resume_from_postproc()
[ "def", "guard_pause_on_pp", "(", ")", ":", "if", "cfg", ".", "pause_on_post_processing", "(", ")", ":", "pass", "else", ":", "Downloader", ".", "do", ".", "resume_from_postproc", "(", ")" ]
callback for change of pause-download-on-pp .
train
false
30,290
def test_rus_fit_single_class(): rus = RandomUnderSampler(random_state=RND_SEED) y_single_class = np.zeros((X.shape[0],)) assert_warns(UserWarning, rus.fit, X, y_single_class)
[ "def", "test_rus_fit_single_class", "(", ")", ":", "rus", "=", "RandomUnderSampler", "(", "random_state", "=", "RND_SEED", ")", "y_single_class", "=", "np", ".", "zeros", "(", "(", "X", ".", "shape", "[", "0", "]", ",", ")", ")", "assert_warns", "(", "Us...
test either if an error when there is a single class .
train
false
30,293
def pred_probs(f_log_probs, options, worddict, prepare_data, data, iterator, verbose=False): n_samples = len(data[0]) probs = numpy.zeros((n_samples, 1)).astype('float32') n_done = 0 for (_, valid_index) in iterator: (x, mask, ctx) = prepare_data([data[0][t] for t in valid_index], data[1], worddict, maxlen=None, n_words=options['n_words']) pred_probs = f_log_probs(x, mask, ctx) probs[valid_index] = pred_probs[:, None] n_done += len(valid_index) if verbose: print ('%d/%d samples computed' % (n_done, n_samples)) return probs
[ "def", "pred_probs", "(", "f_log_probs", ",", "options", ",", "worddict", ",", "prepare_data", ",", "data", ",", "iterator", ",", "verbose", "=", "False", ")", ":", "n_samples", "=", "len", "(", "data", "[", "0", "]", ")", "probs", "=", "numpy", ".", ...
if you want to use a trained model .
train
false
30,294
def getPhoneNumber(): try: mContext = autoclass('android.content.Context') pythonActivity = autoclass('org.renpy.android.PythonService') telephonyManager = cast('android.telephony.TelephonyManager', pythonActivity.mService.getSystemService(mContext.TELEPHONY_SERVICE)) phoneNumber = telephonyManager.getLine1Number() return phoneNumber except Exception as e: return None
[ "def", "getPhoneNumber", "(", ")", ":", "try", ":", "mContext", "=", "autoclass", "(", "'android.content.Context'", ")", "pythonActivity", "=", "autoclass", "(", "'org.renpy.android.PythonService'", ")", "telephonyManager", "=", "cast", "(", "'android.telephony.Telephon...
returns none if an error .
train
false
30,295
def in_for_else_branch(parent, stmt): return (isinstance(parent, astroid.For) and any((else_stmt.parent_of(stmt) for else_stmt in parent.orelse)))
[ "def", "in_for_else_branch", "(", "parent", ",", "stmt", ")", ":", "return", "(", "isinstance", "(", "parent", ",", "astroid", ".", "For", ")", "and", "any", "(", "(", "else_stmt", ".", "parent_of", "(", "stmt", ")", "for", "else_stmt", "in", "parent", ...
returns true if stmt in inside the else branch for a parent for stmt .
train
true
30,297
def test_adagrad(): (cost, model, dataset, sgd, state) = prepare_adagrad_test() def adagrad_manual(model, state): rval = [] for (scale, param) in izip(scales, model.get_params()): pstate = state[param] param_val = param.get_value() pstate['sg2'] += (param_val ** 2) dx_t = (- (((scale * learning_rate) / np.sqrt(pstate['sg2'])) * param_val)) rval += [(param_val + dx_t)] return rval manual = adagrad_manual(model, state) sgd.train(dataset=dataset) assert all((np.allclose(manual_param, sgd_param.get_value()) for (manual_param, sgd_param) in izip(manual, model.get_params()))) manual = adagrad_manual(model, state) sgd.train(dataset=dataset) assert all((np.allclose(manual_param, sgd_param.get_value()) for (manual_param, sgd_param) in izip(manual, model.get_params())))
[ "def", "test_adagrad", "(", ")", ":", "(", "cost", ",", "model", ",", "dataset", ",", "sgd", ",", "state", ")", "=", "prepare_adagrad_test", "(", ")", "def", "adagrad_manual", "(", "model", ",", "state", ")", ":", "rval", "=", "[", "]", "for", "(", ...
make sure that learning_rule .
train
false
30,298
def _execute_pep8(pep8_options, source): class QuietReport(pep8.BaseReport, ): u'Version of checker that does not print.' def __init__(self, options): super(QuietReport, self).__init__(options) self.__full_error_results = [] def error(self, line_number, offset, text, check): u'Collect errors.' code = super(QuietReport, self).error(line_number, offset, text, check) if code: self.__full_error_results.append({u'id': code, u'line': line_number, u'column': (offset + 1), u'info': text}) def full_error_results(self): u"Return error results in detail.\n\n Results are in the form of a list of dictionaries. Each\n dictionary contains 'id', 'line', 'column', and 'info'.\n\n " return self.__full_error_results checker = pep8.Checker(u'', lines=source, reporter=QuietReport, **pep8_options) checker.check_all() return checker.report.full_error_results()
[ "def", "_execute_pep8", "(", "pep8_options", ",", "source", ")", ":", "class", "QuietReport", "(", "pep8", ".", "BaseReport", ",", ")", ":", "def", "__init__", "(", "self", ",", "options", ")", ":", "super", "(", "QuietReport", ",", "self", ")", ".", "...
execute pep8 via python method calls .
train
false
30,299
def publish_display_data(data, source='bokeh'): import IPython.core.displaypub as displaypub try: displaypub.publish_display_data(source, data) except TypeError: displaypub.publish_display_data(data)
[ "def", "publish_display_data", "(", "data", ",", "source", "=", "'bokeh'", ")", ":", "import", "IPython", ".", "core", ".", "displaypub", "as", "displaypub", "try", ":", "displaypub", ".", "publish_display_data", "(", "source", ",", "data", ")", "except", "T...
compatibility wrapper for ipython publish_display_data later versions of ipython remove the source argument .
train
false
30,300
def _get_sysv_services(): try: sysv_services = os.listdir(INITSCRIPT_PATH) except OSError as exc: if (exc.errno == errno.EEXIST): pass elif (exc.errno == errno.EACCES): log.error('Unable to check sysvinit scripts, permission denied to %s', INITSCRIPT_PATH) else: log.error('Error %d encountered trying to check sysvinit scripts: %s', exc.errno, exc.strerror) return [] systemd_services = _get_systemd_services() ret = [] for sysv_service in sysv_services: if os.access(os.path.join(INITSCRIPT_PATH, sysv_service), os.X_OK): if (sysv_service in systemd_services): log.debug("sysvinit script '%s' found, but systemd unit '%s.service' already exists", sysv_service, sysv_service) continue ret.append(sysv_service) return ret
[ "def", "_get_sysv_services", "(", ")", ":", "try", ":", "sysv_services", "=", "os", ".", "listdir", "(", "INITSCRIPT_PATH", ")", "except", "OSError", "as", "exc", ":", "if", "(", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ")", ":", "pass", "eli...
use os .
train
true
30,301
def wait_for_xmodules_to_load(): callback = '\n if (modules[0] && modules[0].done) {{\n modules[0].done(function () {{callback(true)}});\n }}\n ' return load_requrejs_modules(['xmodule'], callback)
[ "def", "wait_for_xmodules_to_load", "(", ")", ":", "callback", "=", "'\\n if (modules[0] && modules[0].done) {{\\n modules[0].done(function () {{callback(true)}});\\n }}\\n '", "return", "load_requrejs_modules", "(", "[", "'xmodule'", "]", ",", "callback", ...
if requirejs is loaded on the page .
train
false
30,304
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
create a new des cipher .
train
false
30,305
def growl(registry, xml_parent, data): growl = XML.SubElement(xml_parent, 'hudson.plugins.growl.GrowlPublisher') growl.set('plugin', 'growl') mapping = [('ip', 'IP', None), ('notify-only-on-fail-or-recovery', 'onlyOnFailureOrRecovery', False)] helpers.convert_mapping_to_xml(growl, data, mapping, fail_required=True)
[ "def", "growl", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "growl", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.growl.GrowlPublisher'", ")", "growl", ".", "set", "(", "'plugin'", ",", "'growl'", ")", "mapping", "="...
yaml: growl push notifications to growl client .
train
false
30,306
def homogenize(series_dict): index = None need_reindex = False for (_, series) in compat.iteritems(series_dict): if (not np.isnan(series.fill_value)): raise TypeError('this method is only valid with NaN fill values') if (index is None): index = series.sp_index elif (not series.sp_index.equals(index)): need_reindex = True index = index.intersect(series.sp_index) if need_reindex: output = {} for (name, series) in compat.iteritems(series_dict): if (not series.sp_index.equals(index)): series = series.sparse_reindex(index) output[name] = series else: output = series_dict return output
[ "def", "homogenize", "(", "series_dict", ")", ":", "index", "=", "None", "need_reindex", "=", "False", "for", "(", "_", ",", "series", ")", "in", "compat", ".", "iteritems", "(", "series_dict", ")", ":", "if", "(", "not", "np", ".", "isnan", "(", "se...
conform a set of sparseseries to a common sparseindex corresponding to the locations where they all have data parameters series_dict : dict or dataframe notes using the dumbest algorithm i could think of .
train
true
30,308
def read_fixture_files(): fixture_dict = {} current_dir = os.path.dirname(__file__) fixture_dir = os.path.join(current_dir, u'fixture_data/') for filename in os.listdir(fixture_dir): if (filename not in [u'.', u'..']): fullname = os.path.join(fixture_dir, filename) fixture_dict[filename] = read_fixture_lines(fullname) return fixture_dict
[ "def", "read_fixture_files", "(", ")", ":", "fixture_dict", "=", "{", "}", "current_dir", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "fixture_dir", "=", "os", ".", "path", ".", "join", "(", "current_dir", ",", "u'fixture_data/'", ")", ...
read all files inside fixture_data directory .
train
false
30,310
@contextmanager def exception_logging(logger, msg): try: (yield) except Exception: logger.exception(msg) raise
[ "@", "contextmanager", "def", "exception_logging", "(", "logger", ",", "msg", ")", ":", "try", ":", "(", "yield", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "msg", ")", "raise" ]
provides exception logging via logger .
train
false
30,311
def delete_serving_url_async(blob_key, rpc=None): if (not blob_key): raise BlobKeyRequiredError('A Blobkey is required for this operation.') request = images_service_pb.ImagesDeleteUrlBaseRequest() response = images_service_pb.ImagesDeleteUrlBaseResponse() request.set_blob_key(_extract_blob_key(blob_key)) def delete_serving_url_hook(rpc): 'Checks success, handles exceptions and returns the converted RPC result.\n\n Args:\n rpc: A UserRPC object.\n\n Raises:\n See docstring for delete_serving_url_async for more details.\n ' try: rpc.check_success() except apiproxy_errors.ApplicationError as e: raise _ToImagesError(e, blob_key) return _make_async_call(rpc, 'DeleteUrlBase', request, response, delete_serving_url_hook, None)
[ "def", "delete_serving_url_async", "(", "blob_key", ",", "rpc", "=", "None", ")", ":", "if", "(", "not", "blob_key", ")", ":", "raise", "BlobKeyRequiredError", "(", "'A Blobkey is required for this operation.'", ")", "request", "=", "images_service_pb", ".", "Images...
delete a serving url created using get_serving_url - async version .
train
false
30,312
def map_histogram(hist, min_val, max_val, n_pixels): out = np.cumsum(hist).astype(float) scale = (float((max_val - min_val)) / n_pixels) out *= scale out += min_val out[(out > max_val)] = max_val return out.astype(int)
[ "def", "map_histogram", "(", "hist", ",", "min_val", ",", "max_val", ",", "n_pixels", ")", ":", "out", "=", "np", ".", "cumsum", "(", "hist", ")", ".", "astype", "(", "float", ")", "scale", "=", "(", "float", "(", "(", "max_val", "-", "min_val", ")...
calculate the equalized lookup table .
train
false
30,313
def list_record_types(profile): conn = _get_driver(profile=profile) return conn.list_record_types()
[ "def", "list_record_types", "(", "profile", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "return", "conn", ".", "list_record_types", "(", ")" ]
list available record types for the given profile .
train
false
30,315
def addPyListings(document, dir): for node in domhelpers.findElementsWithAttribute(document, 'class', 'py-listing'): filename = node.getAttribute('href') outfile = cStringIO.StringIO() lines = map(string.rstrip, open(os.path.join(dir, filename)).readlines()) skip = (node.getAttribute('skipLines') or 0) lines = lines[int(skip):] howManyLines = len(lines) data = '\n'.join(lines) data = cStringIO.StringIO(text.removeLeadingTrailingBlanks(data)) htmlizer.filter(data, outfile, writer=htmlizer.SmallerHTMLWriter) sourceNode = dom.parseString(outfile.getvalue()).documentElement sourceNode.insertBefore(_makeLineNumbers(howManyLines), sourceNode.firstChild) _replaceWithListing(node, sourceNode.toxml(), filename, 'py-listing')
[ "def", "addPyListings", "(", "document", ",", "dir", ")", ":", "for", "node", "in", "domhelpers", ".", "findElementsWithAttribute", "(", "document", ",", "'class'", ",", "'py-listing'", ")", ":", "filename", "=", "node", ".", "getAttribute", "(", "'href'", "...
insert python source listings into the given document from files in the given directory based on c{py-listing} nodes .
train
false
30,316
def register_element_cls(tag, cls): (nspfx, tagroot) = tag.split(':') namespace = element_class_lookup.get_namespace(nsmap[nspfx]) namespace[tagroot] = cls
[ "def", "register_element_cls", "(", "tag", ",", "cls", ")", ":", "(", "nspfx", ",", "tagroot", ")", "=", "tag", ".", "split", "(", "':'", ")", "namespace", "=", "element_class_lookup", ".", "get_namespace", "(", "nsmap", "[", "nspfx", "]", ")", "namespac...
register *cls* to be constructed when the oxml parser encounters an element with matching *tag* .
train
true
30,317
def recarray_fromrecords(rec_list): nfields = len(rec_list[0]) obj = np.array(rec_list, dtype=object) array_list = [np.array(obj[..., i].tolist()) for i in range(nfields)] formats = [] for obj in array_list: formats.append(obj.dtype.str) formats = u','.join(formats) return np.rec.fromarrays(array_list, formats=formats)
[ "def", "recarray_fromrecords", "(", "rec_list", ")", ":", "nfields", "=", "len", "(", "rec_list", "[", "0", "]", ")", "obj", "=", "np", ".", "array", "(", "rec_list", ",", "dtype", "=", "object", ")", "array_list", "=", "[", "np", ".", "array", "(", ...
partial replacement for ~numpy .
train
false
30,318
def to_painting(image, saturation=1.4, black=0.006): edges = sobel(image.mean(axis=2)) darkening = (black * (255 * np.dstack((3 * [edges])))) painting = ((saturation * image) - darkening) return np.maximum(0, np.minimum(255, painting)).astype('uint8')
[ "def", "to_painting", "(", "image", ",", "saturation", "=", "1.4", ",", "black", "=", "0.006", ")", ":", "edges", "=", "sobel", "(", "image", ".", "mean", "(", "axis", "=", "2", ")", ")", "darkening", "=", "(", "black", "*", "(", "255", "*", "np"...
transforms any photo into some kind of painting .
train
false
30,319
def instance_topology_from_instance(instance): if isinstance(instance, obj_instance.Instance): instance_numa_topology = instance.numa_topology elif ('numa_topology' in instance): instance_numa_topology = instance['numa_topology'] elif ('uuid' in instance): try: instance_numa_topology = objects.InstanceNUMATopology.get_by_instance_uuid(context.get_admin_context(), instance['uuid']) except exception.NumaTopologyNotFound: instance_numa_topology = None else: instance_numa_topology = None if instance_numa_topology: if isinstance(instance_numa_topology, six.string_types): instance_numa_topology = objects.InstanceNUMATopology.obj_from_primitive(jsonutils.loads(instance_numa_topology)) elif isinstance(instance_numa_topology, dict): dict_cells = instance_numa_topology.get('cells') if dict_cells: cells = [objects.InstanceNUMACell(id=cell['id'], cpuset=set(cell['cpuset']), memory=cell['memory'], pagesize=cell.get('pagesize'), cpu_pinning=cell.get('cpu_pinning_raw'), cpu_policy=cell.get('cpu_policy'), cpu_thread_policy=cell.get('cpu_thread_policy')) for cell in dict_cells] instance_numa_topology = objects.InstanceNUMATopology(cells=cells) return instance_numa_topology
[ "def", "instance_topology_from_instance", "(", "instance", ")", ":", "if", "isinstance", "(", "instance", ",", "obj_instance", ".", "Instance", ")", ":", "instance_numa_topology", "=", "instance", ".", "numa_topology", "elif", "(", "'numa_topology'", "in", "instance...
extract numa topology from myriad instance representations .
train
false
30,323
def process_rst_and_summaries(content_generators): for generator in content_generators: if isinstance(generator, generators.ArticlesGenerator): for article in ((generator.articles + generator.translations) + generator.drafts): rst_add_mathjax(article) if (process_summary.mathjax_script is not None): process_summary(article) elif isinstance(generator, generators.PagesGenerator): for page in generator.pages: rst_add_mathjax(page)
[ "def", "process_rst_and_summaries", "(", "content_generators", ")", ":", "for", "generator", "in", "content_generators", ":", "if", "isinstance", "(", "generator", ",", "generators", ".", "ArticlesGenerator", ")", ":", "for", "article", "in", "(", "(", "generator"...
ensure mathjax script is applied to rst and summaries are corrected if specified in user settings .
train
true
30,324
@frappe.whitelist() def apply_pricing_rule(args): if isinstance(args, basestring): args = json.loads(args) args = frappe._dict(args) if (not args.transaction_type): set_transaction_type(args) out = [] if (args.get(u'doctype') == u'Material Request'): return out item_list = args.get(u'items') args.pop(u'items') set_serial_nos_based_on_fifo = frappe.db.get_single_value(u'Stock Settings', u'automatically_set_serial_nos_based_on_fifo') for item in item_list: args_copy = copy.deepcopy(args) args_copy.update(item) out.append(get_pricing_rule_for_item(args_copy)) if set_serial_nos_based_on_fifo: out.append(get_serial_no_for_item(args_copy)) return out
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "apply_pricing_rule", "(", "args", ")", ":", "if", "isinstance", "(", "args", ",", "basestring", ")", ":", "args", "=", "json", ".", "loads", "(", "args", ")", "args", "=", "frappe", ".", "_dict", "("...
args = { "items": [{"doctype": "" .
train
false
30,325
def verify_length_and_trunc_password(password): max_length = CONF.identity.max_password_length try: if (len(password) > max_length): if CONF.strict_password_check: raise exception.PasswordVerificationError(size=max_length) else: msg = _LW('Truncating user password to %d characters.') LOG.warning(msg, max_length) return password[:max_length] else: return password except TypeError: raise exception.ValidationError(attribute='string', target='password')
[ "def", "verify_length_and_trunc_password", "(", "password", ")", ":", "max_length", "=", "CONF", ".", "identity", ".", "max_password_length", "try", ":", "if", "(", "len", "(", "password", ")", ">", "max_length", ")", ":", "if", "CONF", ".", "strict_password_c...
verify and truncate the provided password to the max_password_length .
train
false
30,326
def inject_enable(output): if ('form' in output): id = 'layer_enable' label = LABEL(('%s:' % T('Enable in Default Config?')), _for='enable') widget = INPUT(_name='enable', _type='checkbox', _value='on', _id='layer_enable', _class='boolean') comment = '' if (s3_formstyle == 'bootstrap'): _controls = DIV(widget, comment, _class='controls') row = DIV(label, _controls, _class='control-group', _id=('%s__row' % id)) elif callable(s3_formstyle): row = s3_formstyle(id, label, widget, comment) else: raise output['form'][0][(-2)].append(row)
[ "def", "inject_enable", "(", "output", ")", ":", "if", "(", "'form'", "in", "output", ")", ":", "id", "=", "'layer_enable'", "label", "=", "LABEL", "(", "(", "'%s:'", "%", "T", "(", "'Enable in Default Config?'", ")", ")", ",", "_for", "=", "'enable'", ...
inject an enable in default config? checkbox into the form .
train
false
30,327
def decode_htmlentities(text): def substitute_entity(match): try: ent = match.group(3) if (match.group(1) == '#'): if (match.group(2) == ''): return safe_unichr(int(ent)) elif (match.group(2) in ['x', 'X']): return safe_unichr(int(ent, 16)) else: cp = n2cp.get(ent) if cp: return safe_unichr(cp) else: return match.group() except: return match.group() return RE_HTML_ENTITY.sub(substitute_entity, text)
[ "def", "decode_htmlentities", "(", "text", ")", ":", "def", "substitute_entity", "(", "match", ")", ":", "try", ":", "ent", "=", "match", ".", "group", "(", "3", ")", "if", "(", "match", ".", "group", "(", "1", ")", "==", "'#'", ")", ":", "if", "...
decode html entities in text .
train
false
30,328
def ror(n, k, word_size=None): return rol(n, (- k), word_size)
[ "def", "ror", "(", "n", ",", "k", ",", "word_size", "=", "None", ")", ":", "return", "rol", "(", "n", ",", "(", "-", "k", ")", ",", "word_size", ")" ]
a simple wrapper around :func:rol .
train
false
30,329
def env_default(key): return os.environ.get('prawtest_{}'.format(key), 'placeholder_{}'.format(key))
[ "def", "env_default", "(", "key", ")", ":", "return", "os", ".", "environ", ".", "get", "(", "'prawtest_{}'", ".", "format", "(", "key", ")", ",", "'placeholder_{}'", ".", "format", "(", "key", ")", ")" ]
return environment variable or placeholder string .
train
false
30,330
def execute_query(request, design_id=None, query_history_id=None): action = 'query' if query_history_id: query_history = authorized_get_query_history(request, query_history_id, must_exist=True) design = query_history.design try: if (query_history.server_id and query_history.server_guid): (handle, state) = _get_query_handle_and_state(query_history) if ('on_success_url' in request.GET): if (request.GET.get('on_success_url') and any([regexp.match(request.GET.get('on_success_url')) for regexp in REDIRECT_WHITELIST.get()])): action = 'watch-redirect' else: action = 'watch-results' else: action = 'editor-results' except QueryServerException as e: if (('Invalid query handle' in e.message) or ('Invalid OperationHandle' in e.message)): query_history.save_state(QueryHistory.STATE.expired) LOG.warn('Invalid query handle', exc_info=sys.exc_info()) action = 'editor-expired-results' else: raise e else: authorized_get_design(request, design_id) app_name = get_app_name(request) query_type = SavedQuery.TYPES_MAPPING[app_name] design = safe_get_design(request, query_type, design_id) query_history = None doc = (design and design.id and design.doc.get()) context = {'design': design, 'query': query_history, 'query_history': query_history, 'autocomplete_base_url': reverse((get_app_name(request) + ':api_autocomplete_databases'), kwargs={}), 'autocomplete_base_url_hive': reverse('beeswax:api_autocomplete_databases', kwargs={}), 'can_edit_name': (design and design.id and (not design.is_auto)), 'doc_id': ((doc and doc.id) or (-1)), 'can_edit': (doc and doc.can_write(request.user)), 'action': action, 'on_success_url': request.GET.get('on_success_url'), 'has_metastore': ('metastore' in get_apps_dict(request.user))} return render('execute.mako', request, context)
[ "def", "execute_query", "(", "request", ",", "design_id", "=", "None", ",", "query_history_id", "=", "None", ")", ":", "action", "=", "'query'", "if", "query_history_id", ":", "query_history", "=", "authorized_get_query_history", "(", "request", ",", "query_histor...
view function for executing an arbitrary synchronously query .
train
false
30,331
def getDurationString(seconds): secondsRounded = int(round(seconds)) durationString = getPluralString((secondsRounded % 60), 'second') if (seconds < 60): return durationString durationString = ('%s %s' % (getPluralString(((secondsRounded / 60) % 60), 'minute'), durationString)) if (seconds < 3600): return durationString return ('%s %s' % (getPluralString((secondsRounded / 3600), 'hour'), durationString))
[ "def", "getDurationString", "(", "seconds", ")", ":", "secondsRounded", "=", "int", "(", "round", "(", "seconds", ")", ")", "durationString", "=", "getPluralString", "(", "(", "secondsRounded", "%", "60", ")", ",", "'second'", ")", "if", "(", "seconds", "<...
get the duration string .
train
false
30,332
def rgb2gray(rgb): if (rgb.ndim == 2): return np.ascontiguousarray(rgb) rgb = _prepare_colorarray(rgb[..., :3]) gray = (0.2125 * rgb[..., 0]) gray[:] += (0.7154 * rgb[..., 1]) gray[:] += (0.0721 * rgb[..., 2]) return gray
[ "def", "rgb2gray", "(", "rgb", ")", ":", "if", "(", "rgb", ".", "ndim", "==", "2", ")", ":", "return", "np", ".", "ascontiguousarray", "(", "rgb", ")", "rgb", "=", "_prepare_colorarray", "(", "rgb", "[", "...", ",", ":", "3", "]", ")", "gray", "=...
compute luminance of an rgb image .
train
false
30,333
def isNoneValue(value): if isinstance(value, basestring): return (value in ('None', '')) elif isListLike(value): return all((isNoneValue(_) for _ in value)) elif isinstance(value, dict): return (not any(value)) else: return (value is None)
[ "def", "isNoneValue", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "basestring", ")", ":", "return", "(", "value", "in", "(", "'None'", ",", "''", ")", ")", "elif", "isListLike", "(", "value", ")", ":", "return", "all", "(", "(", ...
returns whether the value is unusable .
train
false
30,335
def scroll_half_page_up(event): scroll_backward(event, half=True)
[ "def", "scroll_half_page_up", "(", "event", ")", ":", "scroll_backward", "(", "event", ",", "half", "=", "True", ")" ]
same as controlb .
train
false
30,336
def ComputeOutputDir(params): generator_dir = os.path.relpath((params['options'].generator_output or '.')) output_dir = params.get('generator_flags', {}).get('output_dir', 'out') return os.path.normpath(os.path.join(generator_dir, output_dir))
[ "def", "ComputeOutputDir", "(", "params", ")", ":", "generator_dir", "=", "os", ".", "path", ".", "relpath", "(", "(", "params", "[", "'options'", "]", ".", "generator_output", "or", "'.'", ")", ")", "output_dir", "=", "params", ".", "get", "(", "'genera...
returns the path from the toplevel_dir to the build output directory .
train
false
30,339
def is_binary_string(obj): if PY2: return isinstance(obj, str) else: return isinstance(obj, bytes)
[ "def", "is_binary_string", "(", "obj", ")", ":", "if", "PY2", ":", "return", "isinstance", "(", "obj", ",", "str", ")", "else", ":", "return", "isinstance", "(", "obj", ",", "bytes", ")" ]
return true if obj is a binary string .
train
false
30,340
def _justify_and_indent(text, level, munge=0, width=72): indent = (' ' * level) if munge: lines = [] line = indent text = text.split() for word in text: line = ' '.join([line, word]) if (len(line) > width): lines.append(line) line = indent else: lines.append(line) return '\n'.join(lines) else: return (indent + text.strip().replace('\r\n', '\n').replace('\n', ('\n' + indent)))
[ "def", "_justify_and_indent", "(", "text", ",", "level", ",", "munge", "=", "0", ",", "width", "=", "72", ")", ":", "indent", "=", "(", "' '", "*", "level", ")", "if", "munge", ":", "lines", "=", "[", "]", "line", "=", "indent", "text", "=", "tex...
indent and justify text .
train
false
30,341
def is_loaded(): global _lib return (_lib is not None)
[ "def", "is_loaded", "(", ")", ":", "global", "_lib", "return", "(", "_lib", "is", "not", "None", ")" ]
check to see if the specified kernel module is loaded cli example: .
train
false
30,342
def reshape_t(x, shape): if (shape != ()): return x.reshape(shape) else: return x[0]
[ "def", "reshape_t", "(", "x", ",", "shape", ")", ":", "if", "(", "shape", "!=", "(", ")", ")", ":", "return", "x", ".", "reshape", "(", "shape", ")", "else", ":", "return", "x", "[", "0", "]" ]
work around fact that x .
train
false
30,344
def test_aware_datetime(settings): datetime_object = aware_datetime(2016, 1, 2, 21, 52, 25) assert timezone.is_aware(datetime_object) assert (datetime_object.tzinfo.zone == settings.TIME_ZONE)
[ "def", "test_aware_datetime", "(", "settings", ")", ":", "datetime_object", "=", "aware_datetime", "(", "2016", ",", "1", ",", "2", ",", "21", ",", "52", ",", "25", ")", "assert", "timezone", ".", "is_aware", "(", "datetime_object", ")", "assert", "(", "...
tests the creation of a timezone-aware datetime .
train
false
30,345
def test_none(value): return (value is None)
[ "def", "test_none", "(", "value", ")", ":", "return", "(", "value", "is", "None", ")" ]
return true if the variable is none .
train
false
30,347
def download_manifest_files(filepath): validate_manifest(filepath) manifest_data = return_json(filepath) dependencies = manifest_data['dependencies'] for (data, dependency) in dependencies.items(): for (_, dependency_contents) in dependency.items(): dependency_rev = dependency_contents['version'] dependency_url = dependency_contents['url'] download_format = dependency_contents['downloadFormat'] if (download_format == _DOWNLOAD_FORMAT_FILES): dependency_files = dependency_contents['files'] target_dirname = (dependency_contents['targetDirPrefix'] + dependency_rev) dependency_dst = os.path.join(TARGET_DOWNLOAD_DIRS[data], target_dirname) download_files(dependency_url, dependency_dst, dependency_files) elif (download_format == _DOWNLOAD_FORMAT_ZIP): if ('rootDir' in dependency_contents): dependency_zip_root_name = dependency_contents['rootDir'] else: dependency_zip_root_name = (dependency_contents['rootDirPrefix'] + dependency_rev) if ('targetDir' in dependency_contents): dependency_target_root_name = dependency_contents['targetDir'] else: dependency_target_root_name = (dependency_contents['targetDirPrefix'] + dependency_rev) download_and_unzip_files(dependency_url, TARGET_DOWNLOAD_DIRS[data], dependency_zip_root_name, dependency_target_root_name) elif (download_format == _DOWNLOAD_FORMAT_TAR): dependency_tar_root_name = (dependency_contents['tarRootDirPrefix'] + dependency_rev) dependency_target_root_name = (dependency_contents['targetDirPrefix'] + dependency_rev) download_and_untar_files(dependency_url, TARGET_DOWNLOAD_DIRS[data], dependency_tar_root_name, dependency_target_root_name)
[ "def", "download_manifest_files", "(", "filepath", ")", ":", "validate_manifest", "(", "filepath", ")", "manifest_data", "=", "return_json", "(", "filepath", ")", "dependencies", "=", "manifest_data", "[", "'dependencies'", "]", "for", "(", "data", ",", "dependenc...
this download all files to the required folders args: filepath: the path to the json file .
train
false
30,348
def ping_listener(dbapi_conn, connection_rec, connection_proxy): try: dbapi_conn.cursor().execute('select 1') except dbapi_conn.OperationalError as ex: if (ex.args[0] in (2006, 2013, 2014, 2045, 2055)): msg = ('Got mysql server has gone away: %s' % ex) LOG.warn(msg) raise sqlalchemy.exc.DisconnectionError(msg) else: raise
[ "def", "ping_listener", "(", "dbapi_conn", ",", "connection_rec", ",", "connection_proxy", ")", ":", "try", ":", "dbapi_conn", ".", "cursor", "(", ")", ".", "execute", "(", "'select 1'", ")", "except", "dbapi_conn", ".", "OperationalError", "as", "ex", ":", ...
ensures that mysql connections checked out of the pool are alive .
train
false
30,349
def jit_graph(data): G = nx.Graph() for node in data: G.add_node(node['id'], **node['data']) if (node.get('adjacencies') is not None): for adj in node['adjacencies']: G.add_edge(node['id'], adj['nodeTo'], **adj['data']) return G
[ "def", "jit_graph", "(", "data", ")", ":", "G", "=", "nx", ".", "Graph", "(", ")", "for", "node", "in", "data", ":", "G", ".", "add_node", "(", "node", "[", "'id'", "]", ",", "**", "node", "[", "'data'", "]", ")", "if", "(", "node", ".", "get...
read a graph from jit json .
train
false
30,350
def mapped_collection(keyfunc): return (lambda : MappedCollection(keyfunc))
[ "def", "mapped_collection", "(", "keyfunc", ")", ":", "return", "(", "lambda", ":", "MappedCollection", "(", "keyfunc", ")", ")" ]
a dictionary-based collection type with arbitrary keying .
train
false
30,351
@pytest.fixture def make_fake_project_dir(request): os.makedirs('fake-project')
[ "@", "pytest", ".", "fixture", "def", "make_fake_project_dir", "(", "request", ")", ":", "os", ".", "makedirs", "(", "'fake-project'", ")" ]
create a fake project to be overwritten in the according tests .
train
false
30,354
def device_pointer_type(devmem): ptrtype = c_int(0) _device_pointer_attr(devmem, enums.CU_POINTER_ATTRIBUTE_MEMORY_TYPE, ptrtype) map = {enums.CU_MEMORYTYPE_HOST: 'host', enums.CU_MEMORYTYPE_DEVICE: 'device', enums.CU_MEMORYTYPE_ARRAY: 'array', enums.CU_MEMORYTYPE_UNIFIED: 'unified'} return map[ptrtype.value]
[ "def", "device_pointer_type", "(", "devmem", ")", ":", "ptrtype", "=", "c_int", "(", "0", ")", "_device_pointer_attr", "(", "devmem", ",", "enums", ".", "CU_POINTER_ATTRIBUTE_MEMORY_TYPE", ",", "ptrtype", ")", "map", "=", "{", "enums", ".", "CU_MEMORYTYPE_HOST",...
query the device pointer type: host .
train
false
30,356
def server(host, port, func): def handler(conn): try: (yield func(conn)) finally: conn.close() listener = Listener(host, port) try: while True: conn = (yield listener.accept()) (yield spawn(handler(conn))) except KeyboardInterrupt: pass finally: listener.close()
[ "def", "server", "(", "host", ",", "port", ",", "func", ")", ":", "def", "handler", "(", "conn", ")", ":", "try", ":", "(", "yield", "func", "(", "conn", ")", ")", "finally", ":", "conn", ".", "close", "(", ")", "listener", "=", "Listener", "(", ...
require the nginx web server to be installed and running .
train
false
30,357
def _convert_datetime_to_stata_type(fmt): if (fmt in ['tc', '%tc', 'td', '%td', 'tw', '%tw', 'tm', '%tm', 'tq', '%tq', 'th', '%th', 'ty', '%ty']): return np.float64 else: raise ValueError(('fmt %s not understood' % fmt))
[ "def", "_convert_datetime_to_stata_type", "(", "fmt", ")", ":", "if", "(", "fmt", "in", "[", "'tc'", ",", "'%tc'", ",", "'td'", ",", "'%td'", ",", "'tw'", ",", "'%tw'", ",", "'tm'", ",", "'%tm'", ",", "'tq'", ",", "'%tq'", ",", "'th'", ",", "'%th'", ...
converts from one of the stata date formats to a type in type_map .
train
false
30,358
def crop(image_data, left_x, top_y, right_x, bottom_y, output_encoding=PNG, quality=None, correct_orientation=UNCHANGED_ORIENTATION, rpc=None, transparent_substitution_rgb=None): rpc = crop_async(image_data, left_x, top_y, right_x, bottom_y, output_encoding=output_encoding, quality=quality, correct_orientation=correct_orientation, rpc=rpc, transparent_substitution_rgb=transparent_substitution_rgb) return rpc.get_result()
[ "def", "crop", "(", "image_data", ",", "left_x", ",", "top_y", ",", "right_x", ",", "bottom_y", ",", "output_encoding", "=", "PNG", ",", "quality", "=", "None", ",", "correct_orientation", "=", "UNCHANGED_ORIENTATION", ",", "rpc", "=", "None", ",", "transpar...
randomly or centrally crop an image .
train
false
30,360
@with_setup(prepare_stdout, registry.clear) def test_xunit_output_with_no_steps(): called = [] def assert_correct_xml(filename, content): print filename print content called.append(True) assert_xsd_valid(filename, content) root = etree.fromstring(content) assert_equals(root.get('tests'), '1') assert_equals(root.find('testcase').get('name'), 'Given I do nothing') assert_equals(len(root.getchildren()), 1) assert_equals(root.find('testcase/skipped').get('type'), 'UndefinedStep(Given I do nothing)') assert_equals(float(root.find('testcase').get('time')), 0) old = xunit_output.wrt_output xunit_output.wrt_output = assert_correct_xml runner = Runner(feature_name('no_steps_defined'), enable_xunit=True) runner.run() assert_equals(1, len(called), 'Function not called') xunit_output.wrt_output = old
[ "@", "with_setup", "(", "prepare_stdout", ",", "registry", ".", "clear", ")", "def", "test_xunit_output_with_no_steps", "(", ")", ":", "called", "=", "[", "]", "def", "assert_correct_xml", "(", "filename", ",", "content", ")", ":", "print", "filename", "print"...
test xunit output with no steps .
train
false
30,361
def params_transform_univariate(params, cov_params, link=None, transform=None, row_labels=None): from statsmodels.genmod.families import links if ((link is None) and (transform is None)): link = links.Log() if ((row_labels is None) and hasattr(params, 'index')): row_labels = params.index params = np.asarray(params) predicted_mean = link.inverse(params) link_deriv = link.inverse_deriv(params) var_pred_mean = ((link_deriv ** 2) * np.diag(cov_params)) dist = stats.norm linpred = PredictionResults(params, np.diag(cov_params), dist=dist, row_labels=row_labels, link=links.identity()) res = PredictionResults(predicted_mean, var_pred_mean, dist=dist, row_labels=row_labels, linpred=linpred, link=link) return res
[ "def", "params_transform_univariate", "(", "params", ",", "cov_params", ",", "link", "=", "None", ",", "transform", "=", "None", ",", "row_labels", "=", "None", ")", ":", "from", "statsmodels", ".", "genmod", ".", "families", "import", "links", "if", "(", ...
results for univariate .
train
false
30,362
def _sprinkle(config_str): parts = [x for sub in config_str.split('{') for x in sub.split('}')] for i in range(1, len(parts), 2): parts[i] = str(__grains__.get(parts[i], '')) return ''.join(parts)
[ "def", "_sprinkle", "(", "config_str", ")", ":", "parts", "=", "[", "x", "for", "sub", "in", "config_str", ".", "split", "(", "'{'", ")", "for", "x", "in", "sub", ".", "split", "(", "'}'", ")", "]", "for", "i", "in", "range", "(", "1", ",", "le...
sprinkle with grains of salt .
train
true
30,363
def verify_hostname(ssl_sock, server_hostname): if isinstance(server_hostname, bytes): server_hostname = server_hostname.decode('ascii') return _verify(ssl_sock._conn, server_hostname)
[ "def", "verify_hostname", "(", "ssl_sock", ",", "server_hostname", ")", ":", "if", "isinstance", "(", "server_hostname", ",", "bytes", ")", ":", "server_hostname", "=", "server_hostname", ".", "decode", "(", "'ascii'", ")", "return", "_verify", "(", "ssl_sock", ...
a method nearly compatible with the stdlibs match_hostname .
train
false
30,364
def is_root(directory): return (os.path.dirname(directory) == directory)
[ "def", "is_root", "(", "directory", ")", ":", "return", "(", "os", ".", "path", ".", "dirname", "(", "directory", ")", "==", "directory", ")" ]
check if uri is s3 root .
train
false
30,368
def iscsi_target_count_by_host(context, host): return IMPL.iscsi_target_count_by_host(context, host)
[ "def", "iscsi_target_count_by_host", "(", "context", ",", "host", ")", ":", "return", "IMPL", ".", "iscsi_target_count_by_host", "(", "context", ",", "host", ")" ]
return count of export devices .
train
false
30,369
def mutator_stage(func): def coro(*args): task = None while True: task = (yield task) func(*(args + (task,))) return coro
[ "def", "mutator_stage", "(", "func", ")", ":", "def", "coro", "(", "*", "args", ")", ":", "task", "=", "None", "while", "True", ":", "task", "=", "(", "yield", "task", ")", "func", "(", "*", "(", "args", "+", "(", "task", ",", ")", ")", ")", ...
decorate a function that manipulates items in a coroutine to become a simple stage .
train
false
30,372
def has_tasks(dsk, x): if istask(x): return True try: if (x in dsk): return True except: pass if isinstance(x, list): for i in x: if has_tasks(dsk, i): return True return False
[ "def", "has_tasks", "(", "dsk", ",", "x", ")", ":", "if", "istask", "(", "x", ")", ":", "return", "True", "try", ":", "if", "(", "x", "in", "dsk", ")", ":", "return", "True", "except", ":", "pass", "if", "isinstance", "(", "x", ",", "list", ")"...
whether x has anything to compute .
train
false
30,373
def _repo_changes(realrepo, repocmp): for k in repocmp: if (repocmp[k] and (k not in realrepo)): return True for (k, v) in realrepo.items(): if ((k in repocmp) and repocmp[k]): valold = str((repocmp[k] or '')) valnew = (v or '') if (k == 'url'): (valold, valnew) = (valold.rstrip('/'), valnew.rstrip('/')) if (valold != valnew): return True return False
[ "def", "_repo_changes", "(", "realrepo", ",", "repocmp", ")", ":", "for", "k", "in", "repocmp", ":", "if", "(", "repocmp", "[", "k", "]", "and", "(", "k", "not", "in", "realrepo", ")", ")", ":", "return", "True", "for", "(", "k", ",", "v", ")", ...
check whether the 2 given repos have different settings .
train
false
30,374
def init_notebook_mode(connected=False): if (not ipython): raise ImportError('`iplot` can only run inside an IPython Notebook.') global __PLOTLY_OFFLINE_INITIALIZED if connected: script_inject = "<script>requirejs.config({paths: { 'plotly': ['https://cdn.plot.ly/plotly-latest.min']},});if(!window.Plotly) {{require(['plotly'],function(plotly) {window.Plotly=plotly;});}}</script>" else: script_inject = "<script type='text/javascript'>if(!window.Plotly){{define('plotly', function(require, exports, module) {{{script}}});require(['plotly'], function(Plotly) {{window.Plotly = Plotly;}});}}</script>".format(script=get_plotlyjs()) ipython_display.display(ipython_display.HTML(script_inject)) __PLOTLY_OFFLINE_INITIALIZED = True
[ "def", "init_notebook_mode", "(", "connected", "=", "False", ")", ":", "if", "(", "not", "ipython", ")", ":", "raise", "ImportError", "(", "'`iplot` can only run inside an IPython Notebook.'", ")", "global", "__PLOTLY_OFFLINE_INITIALIZED", "if", "connected", ":", "scr...
initialize plotly .
train
false
30,375
def load_fabfile(path, importer=None): if (importer is None): importer = __import__ (directory, fabfile) = os.path.split(path) added_to_path = False index = None if (directory not in sys.path): sys.path.insert(0, directory) added_to_path = True else: i = sys.path.index(directory) if (i != 0): index = i sys.path.insert(0, directory) del sys.path[(i + 1)] imported = importer(os.path.splitext(fabfile)[0]) if added_to_path: del sys.path[0] if (index is not None): sys.path.insert((index + 1), directory) del sys.path[0] (docstring, new_style, classic, default) = load_tasks_from_module(imported) tasks = (new_style if state.env.new_style_tasks else classic) _seen.clear() return (docstring, tasks, default)
[ "def", "load_fabfile", "(", "path", ",", "importer", "=", "None", ")", ":", "if", "(", "importer", "is", "None", ")", ":", "importer", "=", "__import__", "(", "directory", ",", "fabfile", ")", "=", "os", ".", "path", ".", "split", "(", "path", ")", ...
import given fabfile path and return .
train
false
30,376
def _walk(top, topdown=True, onerror=None, followlinks=False): dirs = [] nondirs = [] try: scandir_it = scandir(top) except OSError as error: if (onerror is not None): onerror(error) return while True: try: try: entry = next(scandir_it) except StopIteration: break except OSError as error: if (onerror is not None): onerror(error) return try: is_dir = entry.is_dir() except OSError: is_dir = False if is_dir: dirs.append(entry.name) else: nondirs.append(entry.name) if ((not topdown) and is_dir): if followlinks: walk_into = True else: try: is_symlink = entry.is_symlink() except OSError: is_symlink = False walk_into = (not is_symlink) if walk_into: for entry in walk(entry.path, topdown, onerror, followlinks): (yield entry) if topdown: (yield (top, dirs, nondirs)) for name in dirs: new_path = join(top, name) if (followlinks or (not islink(new_path))): for entry in walk(new_path, topdown, onerror, followlinks): (yield entry) else: (yield (top, dirs, nondirs))
[ "def", "_walk", "(", "top", ",", "topdown", "=", "True", ",", "onerror", "=", "None", ",", "followlinks", "=", "False", ")", ":", "dirs", "=", "[", "]", "nondirs", "=", "[", "]", "try", ":", "scandir_it", "=", "scandir", "(", "top", ")", "except", ...
recursively include metrics from *value* .
train
true
30,378
def sshd(network, cmd='/usr/sbin/sshd', opts='-D', ip='10.123.123.1/32', routes=None, switch=None): if (not switch): switch = network['s1'] if (not routes): routes = ['10.0.0.0/24'] connectToRootNS(network, switch, ip, routes) for host in network.hosts: host.cmd((((cmd + ' ') + opts) + '&')) info('*** Waiting for ssh daemons to start\n') for server in network.hosts: waitListening(server=server, port=22, timeout=5) info('\n*** Hosts are running sshd at the following addresses:\n') for host in network.hosts: info(host.name, host.IP(), '\n') info("\n*** Type 'exit' or control-D to shut down network\n") CLI(network) for host in network.hosts: host.cmd(('kill %' + cmd)) network.stop()
[ "def", "sshd", "(", "network", ",", "cmd", "=", "'/usr/sbin/sshd'", ",", "opts", "=", "'-D'", ",", "ip", "=", "'10.123.123.1/32'", ",", "routes", "=", "None", ",", "switch", "=", "None", ")", ":", "if", "(", "not", "switch", ")", ":", "switch", "=", ...
start a network .
train
false
30,379
def join_domain(name, username=None, password=None, account_ou=None, account_exists=False, restart=False): ret = {'name': name, 'changes': {}, 'result': True, 'comment': "Computer already added to '{0}'".format(name)} domain = name current_domain_dic = __salt__['system.get_domain_workgroup']() if ('Domain' in current_domain_dic): current_domain = current_domain_dic['Domain'] elif ('Workgroup' in current_domain_dic): current_domain = 'Workgroup' else: current_domain = None if (domain == current_domain): ret['comment'] = "Computer already added to '{0}'".format(name) return ret if __opts__['test']: ret['result'] = None ret['comment'] = "Computer will be added to '{0}'".format(name) return ret result = __salt__['system.join_domain'](domain, username, password, account_ou, account_exists, restart) if (result is not False): ret['comment'] = "Computer added to '{0}'".format(name) else: ret['comment'] = "Computer failed to join '{0}'".format(name) ret['result'] = False return ret
[ "def", "join_domain", "(", "name", ",", "username", "=", "None", ",", "password", "=", "None", ",", "account_ou", "=", "None", ",", "account_exists", "=", "False", ",", "restart", "=", "False", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "...
checks if a computer is joined to the domain .
train
true
30,380
def _tx_resource_for_name(name): if (name == 'core'): return 'django.core' else: return ('django.contrib-%s' % name)
[ "def", "_tx_resource_for_name", "(", "name", ")", ":", "if", "(", "name", "==", "'core'", ")", ":", "return", "'django.core'", "else", ":", "return", "(", "'django.contrib-%s'", "%", "name", ")" ]
return the transifex resource name .
train
false
30,381
def dmp_mul(f, g, u, K): if (not u): return dup_mul(f, g, K) if (f == g): return dmp_sqr(f, u, K) df = dmp_degree(f, u) if (df < 0): return f dg = dmp_degree(g, u) if (dg < 0): return g (h, v) = ([], (u - 1)) for i in range(0, ((df + dg) + 1)): coeff = dmp_zero(v) for j in range(max(0, (i - dg)), (min(df, i) + 1)): coeff = dmp_add(coeff, dmp_mul(f[j], g[(i - j)], v, K), v, K) h.append(coeff) return dmp_strip(h, u)
[ "def", "dmp_mul", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_mul", "(", "f", ",", "g", ",", "K", ")", "if", "(", "f", "==", "g", ")", ":", "return", "dmp_sqr", "(", "f", ",", "u", "...
multiply dense polynomials in k[x] .
train
false
30,382
@library.global_function def group_avatar(group_profile): if group_profile.avatar: return group_profile.avatar.url else: return (settings.STATIC_URL + settings.DEFAULT_AVATAR)
[ "@", "library", ".", "global_function", "def", "group_avatar", "(", "group_profile", ")", ":", "if", "group_profile", ".", "avatar", ":", "return", "group_profile", ".", "avatar", ".", "url", "else", ":", "return", "(", "settings", ".", "STATIC_URL", "+", "s...
return a url to the groups avatar .
train
false
30,383
def _is_auth_info_available(): return (((_ENV_AUTH_EMAIL in os.environ) and (_ENV_AUTH_DOMAIN in os.environ)) or (_ENV_USE_OAUTH_SCOPE in os.environ))
[ "def", "_is_auth_info_available", "(", ")", ":", "return", "(", "(", "(", "_ENV_AUTH_EMAIL", "in", "os", ".", "environ", ")", "and", "(", "_ENV_AUTH_DOMAIN", "in", "os", ".", "environ", ")", ")", "or", "(", "_ENV_USE_OAUTH_SCOPE", "in", "os", ".", "environ...
check if user auth info has been set in environment variables .
train
true
30,384
def valid_name(name): if isinstance(name, bytes): name = name.decode('ascii') if (not Definitions.COOKIE_NAME_RE.match(name)): return False if (name[0] == '$'): return False return True
[ "def", "valid_name", "(", "name", ")", ":", "if", "isinstance", "(", "name", ",", "bytes", ")", ":", "name", "=", "name", ".", "decode", "(", "'ascii'", ")", "if", "(", "not", "Definitions", ".", "COOKIE_NAME_RE", ".", "match", "(", "name", ")", ")",...
validate a cookie name string .
train
true
30,385
def remove_mappings(quickstart_xml, webdefault_xml): tags_to_examine = ['filter-mapping', 'servlet-mapping'] default_urls = set() default_root = xml.etree.ElementTree.fromstring(webdefault_xml) for child in _children_with_tags(default_root, tags_to_examine): for grandchild in _children_with_tag(child, 'url-pattern'): url = grandchild.text.strip() if url.startswith('/'): default_urls.add(url) to_remove = [] quickstart_root = xml.etree.ElementTree.fromstring(quickstart_xml) for child in _children_with_tags(quickstart_root, tags_to_examine): for grandchild in _children_with_tag(child, 'url-pattern'): if (grandchild.text.strip() in default_urls): to_remove.append(child) for child in to_remove: quickstart_root.remove(child) output = cStringIO.StringIO() xml.etree.ElementTree.ElementTree(quickstart_root).write(output) return output.getvalue()
[ "def", "remove_mappings", "(", "quickstart_xml", ",", "webdefault_xml", ")", ":", "tags_to_examine", "=", "[", "'filter-mapping'", ",", "'servlet-mapping'", "]", "default_urls", "=", "set", "(", ")", "default_root", "=", "xml", ".", "etree", ".", "ElementTree", ...
removes mappings from quickstart-web .
train
false
30,387
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
returns a form for a new genericdatasetjob .
train
false
30,388
def Grouper(iterable, n): items = [] for (count, item) in enumerate(iterable): items.append(item) if (((count + 1) % n) == 0): (yield items) items = [] if items: (yield items)
[ "def", "Grouper", "(", "iterable", ",", "n", ")", ":", "items", "=", "[", "]", "for", "(", "count", ",", "item", ")", "in", "enumerate", "(", "iterable", ")", ":", "items", ".", "append", "(", "item", ")", "if", "(", "(", "(", "count", "+", "1"...
group iterable into lists of size n .
train
false
30,389
def _fix_clear_tags(x): _fix = _fix_clear_tags if isinstance(x, list): return [_fix(_strip_clear_tag(item)) for item in x] elif isinstance(x, dict): d = dict(((_fix(k), _fix(v)) for (k, v) in x.items())) for (k, v) in list(d.items()): if isinstance(k, ClearedValue): del d[k] d[_strip_clear_tag(k)] = ClearedValue(_strip_clear_tag(v)) return d elif isinstance(x, ClearedValue): return ClearedValue(_fix(x.value)) else: return x
[ "def", "_fix_clear_tags", "(", "x", ")", ":", "_fix", "=", "_fix_clear_tags", "if", "isinstance", "(", "x", ",", "list", ")", ":", "return", "[", "_fix", "(", "_strip_clear_tag", "(", "item", ")", ")", "for", "item", "in", "x", "]", "elif", "isinstance...
recursively resolve :py:class:clearedvalue wrappers so that clearedvalue can only wrap values in dicts .
train
false
30,390
@image_comparison(baseline_images=[u'legend_stackplot'], extensions=[u'png']) def test_legend_stackplot(): fig = plt.figure() ax = fig.add_subplot(111) x = np.linspace(0, 10, 10) y1 = (1.0 * x) y2 = ((2.0 * x) + 1) y3 = ((3.0 * x) + 2) ax.stackplot(x, y1, y2, y3, labels=[u'y1', u'y2', u'y3']) ax.set_xlim((0, 10)) ax.set_ylim((0, 70)) ax.legend(loc=0)
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'legend_stackplot'", "]", ",", "extensions", "=", "[", "u'png'", "]", ")", "def", "test_legend_stackplot", "(", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add...
test legend for polycollection using stackplot .
train
false
30,392
def get_site_packages(venv): bin_path = _verify_virtualenv(venv) ret = __salt__['cmd.exec_code_all'](bin_path, 'from distutils import sysconfig; print sysconfig.get_python_lib()') if (ret['retcode'] != 0): raise CommandExecutionError('{stdout}\n{stderr}'.format(**ret)) return ret['stdout']
[ "def", "get_site_packages", "(", "venv", ")", ":", "bin_path", "=", "_verify_virtualenv", "(", "venv", ")", "ret", "=", "__salt__", "[", "'cmd.exec_code_all'", "]", "(", "bin_path", ",", "'from distutils import sysconfig; print sysconfig.get_python_lib()'", ")", "if", ...
return the path to the site-packages directory of a virtualenv venv path to the virtualenv .
train
true
30,393
def set_state(name, backend, state, socket='/var/run/haproxy.sock'): class setServerState(haproxy.cmds.Cmd, ): 'Set server state command.' cmdTxt = 'set server %(backend)s/%(server)s state %(value)s\r\n' p_args = ['backend', 'server', 'value'] helpTxt = "Force a server's administrative state to a new state." ha_conn = _get_conn(socket) ha_cmd = setServerState(server=name, backend=backend, value=state) return ha_conn.sendCmd(ha_cmd)
[ "def", "set_state", "(", "name", ",", "backend", ",", "state", ",", "socket", "=", "'/var/run/haproxy.sock'", ")", ":", "class", "setServerState", "(", "haproxy", ".", "cmds", ".", "Cmd", ",", ")", ":", "cmdTxt", "=", "'set server %(backend)s/%(server)s state %(...
force a servers administrative state to a new state .
train
true
30,394
def image_get(request, image_id): return glanceclient(request).images.get(image_id)
[ "def", "image_get", "(", "request", ",", "image_id", ")", ":", "return", "glanceclient", "(", "request", ")", ".", "images", ".", "get", "(", "image_id", ")" ]
returns an image object populated with metadata for image with supplied identifier .
train
false
30,397
def get_properties_for_a_collection_of_objects(vim, type, obj_list, properties): client_factory = vim.client.factory if (len(obj_list) == 0): return [] prop_spec = get_prop_spec(client_factory, type, properties) lst_obj_specs = [] for obj in obj_list: lst_obj_specs.append(get_obj_spec(client_factory, obj)) prop_filter_spec = get_prop_filter_spec(client_factory, lst_obj_specs, [prop_spec]) options = client_factory.create('ns0:RetrieveOptions') options.maxObjects = CONF.vmware.maximum_objects return vim.RetrievePropertiesEx(vim.service_content.propertyCollector, specSet=[prop_filter_spec], options=options)
[ "def", "get_properties_for_a_collection_of_objects", "(", "vim", ",", "type", ",", "obj_list", ",", "properties", ")", ":", "client_factory", "=", "vim", ".", "client", ".", "factory", "if", "(", "len", "(", "obj_list", ")", "==", "0", ")", ":", "return", ...
gets the list of properties for the collection of objects of the type specified .
train
false
30,399
@task @needs('pavelib.i18n.i18n_extract', 'pavelib.i18n.i18n_transifex_push') @timed def i18n_robot_push(): pass
[ "@", "task", "@", "needs", "(", "'pavelib.i18n.i18n_extract'", ",", "'pavelib.i18n.i18n_transifex_push'", ")", "@", "timed", "def", "i18n_robot_push", "(", ")", ":", "pass" ]
extract new strings .
train
false
30,403
def gettempprefixb(): return _os.fsencode(gettempprefix())
[ "def", "gettempprefixb", "(", ")", ":", "return", "_os", ".", "fsencode", "(", "gettempprefix", "(", ")", ")" ]
the default prefix for temporary directories as bytes .
train
false
30,404
def getExperimentDescriptionInterfaceFromModule(module): result = module.descriptionInterface assert isinstance(result, expdescriptionapi.DescriptionIface), ('expected DescriptionIface-based instance, but got %s' % type(result)) return result
[ "def", "getExperimentDescriptionInterfaceFromModule", "(", "module", ")", ":", "result", "=", "module", ".", "descriptionInterface", "assert", "isinstance", "(", "result", ",", "expdescriptionapi", ".", "DescriptionIface", ")", ",", "(", "'expected DescriptionIface-based ...
module: imported description .
train
false
30,405
def x509_extract_rsa_public_key(cert, binary=False): x509 = x509_parse_cert(cert, binary) return x509.get_pubkey().get_rsa().as_pem()
[ "def", "x509_extract_rsa_public_key", "(", "cert", ",", "binary", "=", "False", ")", ":", "x509", "=", "x509_parse_cert", "(", "cert", ",", "binary", ")", "return", "x509", ".", "get_pubkey", "(", ")", ".", "get_rsa", "(", ")", ".", "as_pem", "(", ")" ]
return the public key from a x509 certificate .
train
false
30,406
def get_filter(doctype, f): from frappe.model import default_fields, optional_fields if isinstance(f, dict): (key, value) = f.items()[0] f = make_filter_tuple(doctype, key, value) if (not isinstance(f, (list, tuple))): frappe.throw(u'Filter must be a tuple or list (in a list)') if (len(f) == 3): f = (doctype, f[0], f[1], f[2]) elif (len(f) != 4): frappe.throw(u'Filter must have 4 values (doctype, fieldname, operator, value): {0}'.format(str(f))) f = frappe._dict(doctype=f[0], fieldname=f[1], operator=f[2], value=f[3]) if (not f.operator): f.operator = u'=' valid_operators = (u'=', u'!=', u'>', u'<', u'>=', u'<=', u'like', u'not like', u'in', u'not in', u'Between') if (f.operator not in valid_operators): frappe.throw(u'Operator must be one of {0}'.format(u', '.join(valid_operators))) if (f.doctype and (f.fieldname not in (default_fields + optional_fields))): meta = frappe.get_meta(f.doctype) if (not meta.has_field(f.fieldname)): for df in meta.get_table_fields(): if frappe.get_meta(df.options).has_field(f.fieldname): f.doctype = df.options break return f
[ "def", "get_filter", "(", "doctype", ",", "f", ")", ":", "from", "frappe", ".", "model", "import", "default_fields", ",", "optional_fields", "if", "isinstance", "(", "f", ",", "dict", ")", ":", "(", "key", ",", "value", ")", "=", "f", ".", "items", "...
resolves f to a filter instance .
train
false
30,407
def star(a, dtype=np.uint8): from . import convex_hull_image if (a == 1): bfilter = np.zeros((3, 3), dtype) bfilter[:] = 1 return bfilter m = ((2 * a) + 1) n = (a // 2) selem_square = np.zeros(((m + (2 * n)), (m + (2 * n)))) selem_square[n:(m + n), n:(m + n)] = 1 c = (((m + (2 * n)) - 1) // 2) selem_rotated = np.zeros(((m + (2 * n)), (m + (2 * n)))) selem_rotated[(0, c)] = selem_rotated[((-1), c)] = 1 selem_rotated[(c, 0)] = selem_rotated[(c, (-1))] = 1 selem_rotated = convex_hull_image(selem_rotated).astype(int) selem = (selem_square + selem_rotated) selem[(selem > 0)] = 1 return selem.astype(dtype)
[ "def", "star", "(", "a", ",", "dtype", "=", "np", ".", "uint8", ")", ":", "from", ".", "import", "convex_hull_image", "if", "(", "a", "==", "1", ")", ":", "bfilter", "=", "np", ".", "zeros", "(", "(", "3", ",", "3", ")", ",", "dtype", ")", "b...
renders the code for displaying a star used for starring items .
train
false
30,408
def _layout_to_graph(layout): result = [([0] * len(layout)) for i in range(len(layout))] G = nx.Graph() stack = [] for i in range(len(layout)): i_level = layout[i] if stack: j = stack[(-1)] j_level = layout[j] while (j_level >= i_level): stack.pop() j = stack[(-1)] j_level = layout[j] G.add_edge(i, j) stack.append(i) return G
[ "def", "_layout_to_graph", "(", "layout", ")", ":", "result", "=", "[", "(", "[", "0", "]", "*", "len", "(", "layout", ")", ")", "for", "i", "in", "range", "(", "len", "(", "layout", ")", ")", "]", "G", "=", "nx", ".", "Graph", "(", ")", "sta...
create a networkx graph for the tree specified by the given layout .
train
false
30,410
def is_loaded(): global _lib return (_lib is not None)
[ "def", "is_loaded", "(", ")", ":", "global", "_lib", "return", "(", "_lib", "is", "not", "None", ")" ]
function to check if opus lib is successfully loaded either via the ctypes .
train
false
30,411
def count_cpus(): return os.sysconf('SC_NPROCESSORS_ONLN')
[ "def", "count_cpus", "(", ")", ":", "return", "os", ".", "sysconf", "(", "'SC_NPROCESSORS_ONLN'", ")" ]
total number of online cpus in the local machine .
train
false
30,412
@task def bootstrap_production(): pass
[ "@", "task", "def", "bootstrap_production", "(", ")", ":", "pass" ]
bootstrap in production server .
train
false
30,413
def test_cleanup_req_satisifed_no_name(script, data): dist = data.packages.join('parent-0.1.tar.gz') script.pip('install', dist) script.pip('install', dist) build = (script.venv_path / 'build') assert (not exists(build)), ('unexpected build/ dir exists: %s' % build) script.assert_no_temp()
[ "def", "test_cleanup_req_satisifed_no_name", "(", "script", ",", "data", ")", ":", "dist", "=", "data", ".", "packages", ".", "join", "(", "'parent-0.1.tar.gz'", ")", "script", ".", "pip", "(", "'install'", ",", "dist", ")", "script", ".", "pip", "(", "'in...
test cleanup when req is already satisfied .
train
false
30,416
def MakeNewResponse(): reference = json.loads(kVerifyResponseRenewedExpired) return json.dumps({'status': 0, 'receipt': reference['receipt']})
[ "def", "MakeNewResponse", "(", ")", ":", "reference", "=", "json", ".", "loads", "(", "kVerifyResponseRenewedExpired", ")", "return", "json", ".", "dumps", "(", "{", "'status'", ":", "0", ",", "'receipt'", ":", "reference", "[", "'receipt'", "]", "}", ")" ...
returns a response for a subscription which has not been renewed or expired .
train
false
30,418
def list_jobs_filter(count, filter_find_job=True, ext_source=None, outputter=None, display_progress=False): returner = _get_returner((__opts__['ext_job_cache'], ext_source, __opts__['master_job_cache'])) if display_progress: __jid_event__.fire_event({'message': 'Querying returner {0} for jobs.'.format(returner)}, 'progress') mminion = salt.minion.MasterMinion(__opts__) fun = '{0}.get_jids_filter'.format(returner) if (fun not in mminion.returners): raise NotImplementedError("'{0}' returner function not implemented yet.".format(fun)) ret = mminion.returners[fun](count, filter_find_job) if outputter: return {'outputter': outputter, 'data': ret} else: return ret
[ "def", "list_jobs_filter", "(", "count", ",", "filter_find_job", "=", "True", ",", "ext_source", "=", "None", ",", "outputter", "=", "None", ",", "display_progress", "=", "False", ")", ":", "returner", "=", "_get_returner", "(", "(", "__opts__", "[", "'ext_j...
list all detectable jobs and associated functions ext_source the external job cache to use .
train
true
30,420
def _is_hidden_dot(path): return os.path.basename(path).startswith('.')
[ "def", "_is_hidden_dot", "(", "path", ")", ":", "return", "os", ".", "path", ".", "basename", "(", "path", ")", ".", "startswith", "(", "'.'", ")" ]
return whether or not a file starts with a dot .
train
false
30,421
def single_selection(): return selection_model().single_selection()
[ "def", "single_selection", "(", ")", ":", "return", "selection_model", "(", ")", ".", "single_selection", "(", ")" ]
scan across staged .
train
false
30,422
def JumpToPreviousWindow(): vim.command(u'silent! wincmd p')
[ "def", "JumpToPreviousWindow", "(", ")", ":", "vim", ".", "command", "(", "u'silent! wincmd p'", ")" ]
jump the vim cursor to its previous window position .
train
false
30,423
@shared_task() def subscribe_user_to_basket(instance_id, newsletters=[]): from mozillians.users.models import UserProfile try: instance = UserProfile.objects.get(pk=instance_id) except UserProfile.DoesNotExist: instance = None if ((not BASKET_ENABLED) or (not instance) or (not newsletters) or (not waffle.switch_is_active('BASKET_SWITCH_ENABLED'))): return lookup_subtask = lookup_user_task.subtask((instance.user.email,)) subscribe_subtask = subscribe_user_task.subtask((instance.user.email, newsletters)) chain((lookup_subtask | subscribe_subtask))()
[ "@", "shared_task", "(", ")", "def", "subscribe_user_to_basket", "(", "instance_id", ",", "newsletters", "=", "[", "]", ")", ":", "from", "mozillians", ".", "users", ".", "models", "import", "UserProfile", "try", ":", "instance", "=", "UserProfile", ".", "ob...
subscribe a user to basket .
train
false
30,424
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_pause(cs, args): _find_server(cs, args.server).pause()
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_pause", "(", "cs", ",", "args", ")", ":", "_find_server", "(", "cs", ",", "args", ".", "server", ...
pause a server .
train
false
30,425
def has_installed(app): return (app in getattr(settings, 'INSTALLED_APPS', []))
[ "def", "has_installed", "(", "app", ")", ":", "return", "(", "app", "in", "getattr", "(", "settings", ",", "'INSTALLED_APPS'", ",", "[", "]", ")", ")" ]
returns whether the app is installed in django .
train
false
30,426
@pytest.mark.django_db def test_data_store_checks(tp0): store = StoreDBFactory(name='foo.po', parent=tp0.directory, translation_project=tp0) check_data = StoreChecksData.objects.create(store=store) assert (repr(check_data) == ('<StoreChecksData: %s>' % store.pootle_path))
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_data_store_checks", "(", "tp0", ")", ":", "store", "=", "StoreDBFactory", "(", "name", "=", "'foo.po'", ",", "parent", "=", "tp0", ".", "directory", ",", "translation_project", "=", "tp0", ")", "ch...
test that you cant add a duplicate file extension .
train
false