id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
7,547
def _remove_whitespace(s, regex=_RE_WHITESPACE): return regex.sub(' ', s.strip())
[ "def", "_remove_whitespace", "(", "s", ",", "regex", "=", "_RE_WHITESPACE", ")", ":", "return", "regex", ".", "sub", "(", "' '", ",", "s", ".", "strip", "(", ")", ")" ]
replace extra whitespace inside of a string with a single space .
train
false
7,548
def read_gitfile(f): cs = f.read() if (not cs.startswith('gitdir: ')): raise ValueError("Expected file to start with 'gitdir: '") return cs[len('gitdir: '):].rstrip('\n')
[ "def", "read_gitfile", "(", "f", ")", ":", "cs", "=", "f", ".", "read", "(", ")", "if", "(", "not", "cs", ".", "startswith", "(", "'gitdir: '", ")", ")", ":", "raise", "ValueError", "(", "\"Expected file to start with 'gitdir: '\"", ")", "return", "cs", "[", "len", "(", "'gitdir: '", ")", ":", "]", ".", "rstrip", "(", "'\\n'", ")" ]
read a .
train
false
7,549
def set_project_type_handler(project_type, project_type_handler): global PROJECT_TYPES PROJECT_TYPES[project_type] = project_type_handler
[ "def", "set_project_type_handler", "(", "project_type", ",", "project_type_handler", ")", ":", "global", "PROJECT_TYPES", "PROJECT_TYPES", "[", "project_type", "]", "=", "project_type_handler" ]
set a project type handler for the given project_type .
train
false
7,550
@not_implemented_for('directed') @not_implemented_for('multigraph') def modularity_matrix(G, nodelist=None, weight=None): if (nodelist is None): nodelist = list(G) A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format='csr') k = A.sum(axis=1) m = (k.sum() * 0.5) X = ((k * k.transpose()) / (2 * m)) return (A - X)
[ "@", "not_implemented_for", "(", "'directed'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "modularity_matrix", "(", "G", ",", "nodelist", "=", "None", ",", "weight", "=", "None", ")", ":", "if", "(", "nodelist", "is", "None", ")", ":", "nodelist", "=", "list", "(", "G", ")", "A", "=", "nx", ".", "to_scipy_sparse_matrix", "(", "G", ",", "nodelist", "=", "nodelist", ",", "weight", "=", "weight", ",", "format", "=", "'csr'", ")", "k", "=", "A", ".", "sum", "(", "axis", "=", "1", ")", "m", "=", "(", "k", ".", "sum", "(", ")", "*", "0.5", ")", "X", "=", "(", "(", "k", "*", "k", ".", "transpose", "(", ")", ")", "/", "(", "2", "*", "m", ")", ")", "return", "(", "A", "-", "X", ")" ]
return the modularity matrix of g .
train
false
7,551
def temp_install(html=False, del_module=None): if html: Checker = LHTMLOutputChecker else: Checker = LXMLOutputChecker frame = _find_doctest_frame() dt_self = frame.f_locals['self'] checker = Checker() old_checker = dt_self._checker dt_self._checker = checker if _IS_PYTHON_3: check_func = frame.f_locals['check'].__func__ checker_check_func = checker.check_output.__func__ else: check_func = frame.f_locals['check'].im_func checker_check_func = checker.check_output.im_func doctest.etree = etree _RestoreChecker(dt_self, old_checker, checker, check_func, checker_check_func, del_module)
[ "def", "temp_install", "(", "html", "=", "False", ",", "del_module", "=", "None", ")", ":", "if", "html", ":", "Checker", "=", "LHTMLOutputChecker", "else", ":", "Checker", "=", "LXMLOutputChecker", "frame", "=", "_find_doctest_frame", "(", ")", "dt_self", "=", "frame", ".", "f_locals", "[", "'self'", "]", "checker", "=", "Checker", "(", ")", "old_checker", "=", "dt_self", ".", "_checker", "dt_self", ".", "_checker", "=", "checker", "if", "_IS_PYTHON_3", ":", "check_func", "=", "frame", ".", "f_locals", "[", "'check'", "]", ".", "__func__", "checker_check_func", "=", "checker", ".", "check_output", ".", "__func__", "else", ":", "check_func", "=", "frame", ".", "f_locals", "[", "'check'", "]", ".", "im_func", "checker_check_func", "=", "checker", ".", "check_output", ".", "im_func", "doctest", ".", "etree", "=", "etree", "_RestoreChecker", "(", "dt_self", ",", "old_checker", ",", "checker", ",", "check_func", ",", "checker_check_func", ",", "del_module", ")" ]
temporary install for convenience .
train
false
7,552
@register.filter() def getlist(value, arg): return value.getlist(arg)
[ "@", "register", ".", "filter", "(", ")", "def", "getlist", "(", "value", ",", "arg", ")", ":", "return", "value", ".", "getlist", "(", "arg", ")" ]
return all values of a querydict key .
train
false
7,554
def _gf_pow_pnm1d2(f, n, g, b, p, K): f = gf_rem(f, g, p, K) h = f r = f for i in range(1, n): h = gf_frobenius_map(h, g, b, p, K) r = gf_mul(r, h, p, K) r = gf_rem(r, g, p, K) res = gf_pow_mod(r, ((p - 1) // 2), g, p, K) return res
[ "def", "_gf_pow_pnm1d2", "(", "f", ",", "n", ",", "g", ",", "b", ",", "p", ",", "K", ")", ":", "f", "=", "gf_rem", "(", "f", ",", "g", ",", "p", ",", "K", ")", "h", "=", "f", "r", "=", "f", "for", "i", "in", "range", "(", "1", ",", "n", ")", ":", "h", "=", "gf_frobenius_map", "(", "h", ",", "g", ",", "b", ",", "p", ",", "K", ")", "r", "=", "gf_mul", "(", "r", ",", "h", ",", "p", ",", "K", ")", "r", "=", "gf_rem", "(", "r", ",", "g", ",", "p", ",", "K", ")", "res", "=", "gf_pow_mod", "(", "r", ",", "(", "(", "p", "-", "1", ")", "//", "2", ")", ",", "g", ",", "p", ",", "K", ")", "return", "res" ]
utility function for gf_edf_zassenhaus compute f**( // 2) in gf(p)[x]/(g) f**( // 2) = (f*f**p* .
train
false
7,556
def asanyarray(a, dtype=None): return cupy.asarray(a, dtype)
[ "def", "asanyarray", "(", "a", ",", "dtype", "=", "None", ")", ":", "return", "cupy", ".", "asarray", "(", "a", ",", "dtype", ")" ]
converts an object to array .
train
false
7,558
def _concatenate_coils(coils): rmags = np.concatenate([coil['rmag'] for coil in coils]) cosmags = np.concatenate([coil['cosmag'] for coil in coils]) ws = np.concatenate([coil['w'] for coil in coils]) n_int = np.array([len(coil['rmag']) for coil in coils]) if (n_int[(-1)] == 0): raise RuntimeError('not supported') bins = np.repeat(np.arange(len(n_int)), n_int) return (rmags, cosmags, ws, bins)
[ "def", "_concatenate_coils", "(", "coils", ")", ":", "rmags", "=", "np", ".", "concatenate", "(", "[", "coil", "[", "'rmag'", "]", "for", "coil", "in", "coils", "]", ")", "cosmags", "=", "np", ".", "concatenate", "(", "[", "coil", "[", "'cosmag'", "]", "for", "coil", "in", "coils", "]", ")", "ws", "=", "np", ".", "concatenate", "(", "[", "coil", "[", "'w'", "]", "for", "coil", "in", "coils", "]", ")", "n_int", "=", "np", ".", "array", "(", "[", "len", "(", "coil", "[", "'rmag'", "]", ")", "for", "coil", "in", "coils", "]", ")", "if", "(", "n_int", "[", "(", "-", "1", ")", "]", "==", "0", ")", ":", "raise", "RuntimeError", "(", "'not supported'", ")", "bins", "=", "np", ".", "repeat", "(", "np", ".", "arange", "(", "len", "(", "n_int", ")", ")", ",", "n_int", ")", "return", "(", "rmags", ",", "cosmags", ",", "ws", ",", "bins", ")" ]
helper to concatenate meg coil parameters .
train
false
7,559
def bulk_upload(): s3.stylesheets.append('plugins/fileuploader.css') return dict()
[ "def", "bulk_upload", "(", ")", ":", "s3", ".", "stylesheets", ".", "append", "(", "'plugins/fileuploader.css'", ")", "return", "dict", "(", ")" ]
custom view to allow bulk uploading of photos @todo: allow creation of a gis feature layer to view on the map @todo: allow uploading of associated gpx track for timestamp correlation .
train
false
7,560
def MakeFreshResponse(): reference = json.loads(kVerifyResponseRenewedExpired) new = {'status': 0, 'receipt': reference['receipt']} new['receipt']['expires_date'] = (1000.0 * (time.time() + datetime.timedelta(days=28).total_seconds())) return json.dumps(new)
[ "def", "MakeFreshResponse", "(", ")", ":", "reference", "=", "json", ".", "loads", "(", "kVerifyResponseRenewedExpired", ")", "new", "=", "{", "'status'", ":", "0", ",", "'receipt'", ":", "reference", "[", "'receipt'", "]", "}", "new", "[", "'receipt'", "]", "[", "'expires_date'", "]", "=", "(", "1000.0", "*", "(", "time", ".", "time", "(", ")", "+", "datetime", ".", "timedelta", "(", "days", "=", "28", ")", ".", "total_seconds", "(", ")", ")", ")", "return", "json", ".", "dumps", "(", "new", ")" ]
returns a response for a subscription which has an expiration date in the future .
train
false
7,561
@ckan.logic.validate(ckan.logic.schema.job_clear_schema) def job_clear(context, data_dict): _check_access(u'job_clear', context, data_dict) queues = data_dict.get(u'queues') if queues: queues = [jobs.get_queue(q) for q in queues] else: queues = jobs.get_all_queues() names = [jobs.remove_queue_name_prefix(queue.name) for queue in queues] for (queue, name) in zip(queues, names): queue.empty() log.info(u'Cleared background job queue "{}"'.format(name)) return names
[ "@", "ckan", ".", "logic", ".", "validate", "(", "ckan", ".", "logic", ".", "schema", ".", "job_clear_schema", ")", "def", "job_clear", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "u'job_clear'", ",", "context", ",", "data_dict", ")", "queues", "=", "data_dict", ".", "get", "(", "u'queues'", ")", "if", "queues", ":", "queues", "=", "[", "jobs", ".", "get_queue", "(", "q", ")", "for", "q", "in", "queues", "]", "else", ":", "queues", "=", "jobs", ".", "get_all_queues", "(", ")", "names", "=", "[", "jobs", ".", "remove_queue_name_prefix", "(", "queue", ".", "name", ")", "for", "queue", "in", "queues", "]", "for", "(", "queue", ",", "name", ")", "in", "zip", "(", "queues", ",", "names", ")", ":", "queue", ".", "empty", "(", ")", "log", ".", "info", "(", "u'Cleared background job queue \"{}\"'", ".", "format", "(", "name", ")", ")", "return", "names" ]
clear background jobs .
train
false
7,562
@snippet def sink_pubsub(client, to_delete): topic = _sink_pubsub_setup(client) to_delete.append(topic) SINK_NAME = ('robots-pubsub-%d' % (_millis(),)) FILTER = 'logName:apache-access AND textPayload:robot' UPDATED_FILTER = 'textPayload:robot' DESTINATION = ('pubsub.googleapis.com/%s' % (topic.full_name,)) sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) assert (not sink.exists()) sink.create() assert sink.exists() to_delete.insert(0, sink) for sink in client.list_sinks(): do_something_with(sink) existing_sink = client.sink(SINK_NAME) existing_sink.reload() assert (existing_sink.filter_ == FILTER) assert (existing_sink.destination == DESTINATION) existing_sink.filter_ = UPDATED_FILTER existing_sink.update() existing_sink.reload() assert (existing_sink.filter_ == UPDATED_FILTER) sink.delete() to_delete.pop(0)
[ "@", "snippet", "def", "sink_pubsub", "(", "client", ",", "to_delete", ")", ":", "topic", "=", "_sink_pubsub_setup", "(", "client", ")", "to_delete", ".", "append", "(", "topic", ")", "SINK_NAME", "=", "(", "'robots-pubsub-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "FILTER", "=", "'logName:apache-access AND textPayload:robot'", "UPDATED_FILTER", "=", "'textPayload:robot'", "DESTINATION", "=", "(", "'pubsub.googleapis.com/%s'", "%", "(", "topic", ".", "full_name", ",", ")", ")", "sink", "=", "client", ".", "sink", "(", "SINK_NAME", ",", "filter_", "=", "FILTER", ",", "destination", "=", "DESTINATION", ")", "assert", "(", "not", "sink", ".", "exists", "(", ")", ")", "sink", ".", "create", "(", ")", "assert", "sink", ".", "exists", "(", ")", "to_delete", ".", "insert", "(", "0", ",", "sink", ")", "for", "sink", "in", "client", ".", "list_sinks", "(", ")", ":", "do_something_with", "(", "sink", ")", "existing_sink", "=", "client", ".", "sink", "(", "SINK_NAME", ")", "existing_sink", ".", "reload", "(", ")", "assert", "(", "existing_sink", ".", "filter_", "==", "FILTER", ")", "assert", "(", "existing_sink", ".", "destination", "==", "DESTINATION", ")", "existing_sink", ".", "filter_", "=", "UPDATED_FILTER", "existing_sink", ".", "update", "(", ")", "existing_sink", ".", "reload", "(", ")", "assert", "(", "existing_sink", ".", "filter_", "==", "UPDATED_FILTER", ")", "sink", ".", "delete", "(", ")", "to_delete", ".", "pop", "(", "0", ")" ]
sink log entries to pubsub .
train
true
7,563
@gen.engine def _Run(callback): job_set = options.options.job_set assert (job_set in kJobSets.keys()), ('--job_set must be one of %r' % kJobSets.keys()) jobs = kJobSets[job_set] client = db_client.DBClient.Instance() job = Job(client, job_set) got_lock = (yield gen.Task(job.AcquireLock)) if (not got_lock): logging.warning('Failed to acquire job lock: exiting.') callback() return try: for (title, args) in jobs: logging.info(('[%s] running %s' % (title, ' '.join(args)))) proc = process.Subprocess(args, io_loop=ioloop.IOLoop.instance()) code = (yield gen.Task(proc.set_exit_callback)) logging.info(('[%s] finished with code: %r' % (title, code))) except: logging.error(traceback.format_exc()) finally: (yield gen.Task(job.ReleaseLock)) callback()
[ "@", "gen", ".", "engine", "def", "_Run", "(", "callback", ")", ":", "job_set", "=", "options", ".", "options", ".", "job_set", "assert", "(", "job_set", "in", "kJobSets", ".", "keys", "(", ")", ")", ",", "(", "'--job_set must be one of %r'", "%", "kJobSets", ".", "keys", "(", ")", ")", "jobs", "=", "kJobSets", "[", "job_set", "]", "client", "=", "db_client", ".", "DBClient", ".", "Instance", "(", ")", "job", "=", "Job", "(", "client", ",", "job_set", ")", "got_lock", "=", "(", "yield", "gen", ".", "Task", "(", "job", ".", "AcquireLock", ")", ")", "if", "(", "not", "got_lock", ")", ":", "logging", ".", "warning", "(", "'Failed to acquire job lock: exiting.'", ")", "callback", "(", ")", "return", "try", ":", "for", "(", "title", ",", "args", ")", "in", "jobs", ":", "logging", ".", "info", "(", "(", "'[%s] running %s'", "%", "(", "title", ",", "' '", ".", "join", "(", "args", ")", ")", ")", ")", "proc", "=", "process", ".", "Subprocess", "(", "args", ",", "io_loop", "=", "ioloop", ".", "IOLoop", ".", "instance", "(", ")", ")", "code", "=", "(", "yield", "gen", ".", "Task", "(", "proc", ".", "set_exit_callback", ")", ")", "logging", ".", "info", "(", "(", "'[%s] finished with code: %r'", "%", "(", "title", ",", "code", ")", ")", ")", "except", ":", "logging", ".", "error", "(", "traceback", ".", "format_exc", "(", ")", ")", "finally", ":", "(", "yield", "gen", ".", "Task", "(", "job", ".", "ReleaseLock", ")", ")", "callback", "(", ")" ]
grab the lock and run all commands an subprocesess .
train
false
7,567
@synchronized(DIR_LOCK) def get_unique_path(dirpath, n=0, create_dir=True): if (not check_mount(dirpath)): return dirpath path = dirpath if n: path = ('%s.%s' % (dirpath, n)) if (not os.path.exists(path)): if create_dir: return create_dirs(path) else: return path else: return get_unique_path(dirpath, n=(n + 1), create_dir=create_dir)
[ "@", "synchronized", "(", "DIR_LOCK", ")", "def", "get_unique_path", "(", "dirpath", ",", "n", "=", "0", ",", "create_dir", "=", "True", ")", ":", "if", "(", "not", "check_mount", "(", "dirpath", ")", ")", ":", "return", "dirpath", "path", "=", "dirpath", "if", "n", ":", "path", "=", "(", "'%s.%s'", "%", "(", "dirpath", ",", "n", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "if", "create_dir", ":", "return", "create_dirs", "(", "path", ")", "else", ":", "return", "path", "else", ":", "return", "get_unique_path", "(", "dirpath", ",", "n", "=", "(", "n", "+", "1", ")", ",", "create_dir", "=", "create_dir", ")" ]
determine a unique folder or filename .
train
false
7,568
def process_args(args, defaults, description): parser = argparse.ArgumentParser(description=description) parser.add_argument('-r', '--rom', dest='rom', default=defaults.ROM, help='ROM to run (default: %(default)s)') parser.add_argument('-e', '--epochs', dest='epochs', type=int, default=defaults.EPOCHS, help='Number of training epochs (default: %(default)s)') parser.add_argument('-s', '--steps-per-epoch', dest='steps_per_epoch', type=int, default=defaults.STEPS_PER_EPOCH, help='Number of steps per epoch (default: %(default)s)') parser.add_argument('-t', '--test-length', dest='steps_per_test', type=int, default=defaults.STEPS_PER_TEST, help='Number of steps per test (default: %(default)s)') parser.add_argument('--display-screen', dest='display_screen', action='store_true', default=False, help='Show the game screen.') parser.add_argument('--experiment-prefix', dest='experiment_prefix', default=None, help='Experiment name prefix (default is the name of the game)') parser.add_argument('--frame-skip', dest='frame_skip', default=defaults.FRAME_SKIP, type=int, help='Every how many frames to process (default: %(default)s)') parser.add_argument('--repeat-action-probability', dest='repeat_action_probability', default=defaults.REPEAT_ACTION_PROBABILITY, type=float, help=('Probability that action choice will be ' + 'ignored (default: %(default)s)')) parser.add_argument('--update-rule', dest='update_rule', type=str, default=defaults.UPDATE_RULE, help=('deepmind_rmsprop|rmsprop|sgd ' + '(default: %(default)s)')) parser.add_argument('--batch-accumulator', dest='batch_accumulator', type=str, default=defaults.BATCH_ACCUMULATOR, help='sum|mean (default: %(default)s)') parser.add_argument('--learning-rate', dest='learning_rate', type=float, default=defaults.LEARNING_RATE, help='Learning rate (default: %(default)s)') parser.add_argument('--rms-decay', dest='rms_decay', type=float, default=defaults.RMS_DECAY, help='Decay rate for rms_prop (default: %(default)s)') parser.add_argument('--rms-epsilon', dest='rms_epsilon', type=float, default=defaults.RMS_EPSILON, help=('Denominator epsilson for rms_prop ' + '(default: %(default)s)')) parser.add_argument('--momentum', type=float, default=defaults.MOMENTUM, help=('Momentum term for Nesterov momentum. ' + '(default: %(default)s)')) parser.add_argument('--clip-delta', dest='clip_delta', type=float, default=defaults.CLIP_DELTA, help=('Max absolute value for Q-update delta value. ' + '(default: %(default)s)')) parser.add_argument('--discount', type=float, default=defaults.DISCOUNT, help='Discount rate') parser.add_argument('--epsilon-start', dest='epsilon_start', type=float, default=defaults.EPSILON_START, help=('Starting value for epsilon. ' + '(default: %(default)s)')) parser.add_argument('--epsilon-min', dest='epsilon_min', type=float, default=defaults.EPSILON_MIN, help='Minimum epsilon. (default: %(default)s)') parser.add_argument('--epsilon-decay', dest='epsilon_decay', type=float, default=defaults.EPSILON_DECAY, help=('Number of steps to minimum epsilon. ' + '(default: %(default)s)')) parser.add_argument('--phi-length', dest='phi_length', type=int, default=defaults.PHI_LENGTH, help=('Number of recent frames used to represent ' + 'state. (default: %(default)s)')) parser.add_argument('--max-history', dest='replay_memory_size', type=int, default=defaults.REPLAY_MEMORY_SIZE, help=('Maximum number of steps stored in replay ' + 'memory. (default: %(default)s)')) parser.add_argument('--batch-size', dest='batch_size', type=int, default=defaults.BATCH_SIZE, help='Batch size. (default: %(default)s)') parser.add_argument('--network-type', dest='network_type', type=str, default=defaults.NETWORK_TYPE, help=('nips_cuda|nips_dnn|nature_cuda|nature_dnn' + '|linear (default: %(default)s)')) parser.add_argument('--freeze-interval', dest='freeze_interval', type=int, default=defaults.FREEZE_INTERVAL, help=('Interval between target freezes. ' + '(default: %(default)s)')) parser.add_argument('--update-frequency', dest='update_frequency', type=int, default=defaults.UPDATE_FREQUENCY, help=('Number of actions before each SGD update. ' + '(default: %(default)s)')) parser.add_argument('--replay-start-size', dest='replay_start_size', type=int, default=defaults.REPLAY_START_SIZE, help=('Number of random steps before training. ' + '(default: %(default)s)')) parser.add_argument('--resize-method', dest='resize_method', type=str, default=defaults.RESIZE_METHOD, help='crop|scale (default: %(default)s)') parser.add_argument('--nn-file', dest='nn_file', type=str, default=None, help='Pickle file containing trained net.') parser.add_argument('--death-ends-episode', dest='death_ends_episode', type=str, default=defaults.DEATH_ENDS_EPISODE, help='true|false (default: %(default)s)') parser.add_argument('--max-start-nullops', dest='max_start_nullops', type=int, default=defaults.MAX_START_NULLOPS, help=('Maximum number of null-ops at the start ' + 'of games. (default: %(default)s)')) parser.add_argument('--deterministic', dest='deterministic', type=bool, default=defaults.DETERMINISTIC, help=('Whether to use deterministic parameters ' + 'for learning. (default: %(default)s)')) parser.add_argument('--cudnn_deterministic', dest='cudnn_deterministic', type=bool, default=defaults.CUDNN_DETERMINISTIC, help=('Whether to use deterministic backprop. ' + '(default: %(default)s)')) parameters = parser.parse_args(args) if (parameters.experiment_prefix is None): name = os.path.splitext(os.path.basename(parameters.rom))[0] parameters.experiment_prefix = name if (parameters.death_ends_episode == 'true'): parameters.death_ends_episode = True elif (parameters.death_ends_episode == 'false'): parameters.death_ends_episode = False else: raise ValueError('--death-ends-episode must be true or false') if (parameters.freeze_interval > 0): parameters.freeze_interval = (parameters.freeze_interval // parameters.update_frequency) return parameters
[ "def", "process_args", "(", "args", ",", "defaults", ",", "description", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "description", ")", "parser", ".", "add_argument", "(", "'-r'", ",", "'--rom'", ",", "dest", "=", "'rom'", ",", "default", "=", "defaults", ".", "ROM", ",", "help", "=", "'ROM to run (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'-e'", ",", "'--epochs'", ",", "dest", "=", "'epochs'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "EPOCHS", ",", "help", "=", "'Number of training epochs (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--steps-per-epoch'", ",", "dest", "=", "'steps_per_epoch'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "STEPS_PER_EPOCH", ",", "help", "=", "'Number of steps per epoch (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "'--test-length'", ",", "dest", "=", "'steps_per_test'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "STEPS_PER_TEST", ",", "help", "=", "'Number of steps per test (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--display-screen'", ",", "dest", "=", "'display_screen'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'Show the game screen.'", ")", "parser", ".", "add_argument", "(", "'--experiment-prefix'", ",", "dest", "=", "'experiment_prefix'", ",", "default", "=", "None", ",", "help", "=", "'Experiment name prefix (default is the name of the game)'", ")", "parser", ".", "add_argument", "(", "'--frame-skip'", ",", "dest", "=", "'frame_skip'", ",", "default", "=", "defaults", ".", "FRAME_SKIP", ",", "type", "=", "int", ",", "help", "=", "'Every how many frames to process (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--repeat-action-probability'", ",", "dest", "=", "'repeat_action_probability'", ",", "default", "=", "defaults", ".", "REPEAT_ACTION_PROBABILITY", ",", "type", "=", "float", ",", "help", "=", "(", "'Probability that action choice will be '", "+", "'ignored (default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--update-rule'", ",", "dest", "=", "'update_rule'", ",", "type", "=", "str", ",", "default", "=", "defaults", ".", "UPDATE_RULE", ",", "help", "=", "(", "'deepmind_rmsprop|rmsprop|sgd '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--batch-accumulator'", ",", "dest", "=", "'batch_accumulator'", ",", "type", "=", "str", ",", "default", "=", "defaults", ".", "BATCH_ACCUMULATOR", ",", "help", "=", "'sum|mean (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--learning-rate'", ",", "dest", "=", "'learning_rate'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "LEARNING_RATE", ",", "help", "=", "'Learning rate (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--rms-decay'", ",", "dest", "=", "'rms_decay'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "RMS_DECAY", ",", "help", "=", "'Decay rate for rms_prop (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--rms-epsilon'", ",", "dest", "=", "'rms_epsilon'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "RMS_EPSILON", ",", "help", "=", "(", "'Denominator epsilson for rms_prop '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--momentum'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "MOMENTUM", ",", "help", "=", "(", "'Momentum term for Nesterov momentum. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--clip-delta'", ",", "dest", "=", "'clip_delta'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "CLIP_DELTA", ",", "help", "=", "(", "'Max absolute value for Q-update delta value. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--discount'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "DISCOUNT", ",", "help", "=", "'Discount rate'", ")", "parser", ".", "add_argument", "(", "'--epsilon-start'", ",", "dest", "=", "'epsilon_start'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "EPSILON_START", ",", "help", "=", "(", "'Starting value for epsilon. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--epsilon-min'", ",", "dest", "=", "'epsilon_min'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "EPSILON_MIN", ",", "help", "=", "'Minimum epsilon. (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--epsilon-decay'", ",", "dest", "=", "'epsilon_decay'", ",", "type", "=", "float", ",", "default", "=", "defaults", ".", "EPSILON_DECAY", ",", "help", "=", "(", "'Number of steps to minimum epsilon. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--phi-length'", ",", "dest", "=", "'phi_length'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "PHI_LENGTH", ",", "help", "=", "(", "'Number of recent frames used to represent '", "+", "'state. (default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--max-history'", ",", "dest", "=", "'replay_memory_size'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "REPLAY_MEMORY_SIZE", ",", "help", "=", "(", "'Maximum number of steps stored in replay '", "+", "'memory. (default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--batch-size'", ",", "dest", "=", "'batch_size'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "BATCH_SIZE", ",", "help", "=", "'Batch size. (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--network-type'", ",", "dest", "=", "'network_type'", ",", "type", "=", "str", ",", "default", "=", "defaults", ".", "NETWORK_TYPE", ",", "help", "=", "(", "'nips_cuda|nips_dnn|nature_cuda|nature_dnn'", "+", "'|linear (default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--freeze-interval'", ",", "dest", "=", "'freeze_interval'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "FREEZE_INTERVAL", ",", "help", "=", "(", "'Interval between target freezes. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--update-frequency'", ",", "dest", "=", "'update_frequency'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "UPDATE_FREQUENCY", ",", "help", "=", "(", "'Number of actions before each SGD update. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--replay-start-size'", ",", "dest", "=", "'replay_start_size'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "REPLAY_START_SIZE", ",", "help", "=", "(", "'Number of random steps before training. '", "+", "'(default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--resize-method'", ",", "dest", "=", "'resize_method'", ",", "type", "=", "str", ",", "default", "=", "defaults", ".", "RESIZE_METHOD", ",", "help", "=", "'crop|scale (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--nn-file'", ",", "dest", "=", "'nn_file'", ",", "type", "=", "str", ",", "default", "=", "None", ",", "help", "=", "'Pickle file containing trained net.'", ")", "parser", ".", "add_argument", "(", "'--death-ends-episode'", ",", "dest", "=", "'death_ends_episode'", ",", "type", "=", "str", ",", "default", "=", "defaults", ".", "DEATH_ENDS_EPISODE", ",", "help", "=", "'true|false (default: %(default)s)'", ")", "parser", ".", "add_argument", "(", "'--max-start-nullops'", ",", "dest", "=", "'max_start_nullops'", ",", "type", "=", "int", ",", "default", "=", "defaults", ".", "MAX_START_NULLOPS", ",", "help", "=", "(", "'Maximum number of null-ops at the start '", "+", "'of games. (default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--deterministic'", ",", "dest", "=", "'deterministic'", ",", "type", "=", "bool", ",", "default", "=", "defaults", ".", "DETERMINISTIC", ",", "help", "=", "(", "'Whether to use deterministic parameters '", "+", "'for learning. (default: %(default)s)'", ")", ")", "parser", ".", "add_argument", "(", "'--cudnn_deterministic'", ",", "dest", "=", "'cudnn_deterministic'", ",", "type", "=", "bool", ",", "default", "=", "defaults", ".", "CUDNN_DETERMINISTIC", ",", "help", "=", "(", "'Whether to use deterministic backprop. '", "+", "'(default: %(default)s)'", ")", ")", "parameters", "=", "parser", ".", "parse_args", "(", "args", ")", "if", "(", "parameters", ".", "experiment_prefix", "is", "None", ")", ":", "name", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "parameters", ".", "rom", ")", ")", "[", "0", "]", "parameters", ".", "experiment_prefix", "=", "name", "if", "(", "parameters", ".", "death_ends_episode", "==", "'true'", ")", ":", "parameters", ".", "death_ends_episode", "=", "True", "elif", "(", "parameters", ".", "death_ends_episode", "==", "'false'", ")", ":", "parameters", ".", "death_ends_episode", "=", "False", "else", ":", "raise", "ValueError", "(", "'--death-ends-episode must be true or false'", ")", "if", "(", "parameters", ".", "freeze_interval", ">", "0", ")", ":", "parameters", ".", "freeze_interval", "=", "(", "parameters", ".", "freeze_interval", "//", "parameters", ".", "update_frequency", ")", "return", "parameters" ]
take a list of string arguments parsed via pyparsing and evaluate the special variables .
train
false
7,569
def take_action(name=None, call=None, command=None, data=None, method='GET', location=DEFAULT_LOCATION): caller = inspect.stack()[1][3] if (call != 'action'): raise SaltCloudSystemExit('This action must be called with -a or --action.') if data: data = json.dumps(data) ret = [] try: ret = query(command=command, data=data, method=method, location=location) log.info('Success {0} for node {1}'.format(caller, name)) except Exception as exc: if ('InvalidState' in str(exc)): ret = [200, {}] else: log.error('Failed to invoke {0} node {1}: {2}'.format(caller, name, exc), exc_info_on_loglevel=logging.DEBUG) ret = [100, {}] return ret
[ "def", "take_action", "(", "name", "=", "None", ",", "call", "=", "None", ",", "command", "=", "None", ",", "data", "=", "None", ",", "method", "=", "'GET'", ",", "location", "=", "DEFAULT_LOCATION", ")", ":", "caller", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "[", "3", "]", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'This action must be called with -a or --action.'", ")", "if", "data", ":", "data", "=", "json", ".", "dumps", "(", "data", ")", "ret", "=", "[", "]", "try", ":", "ret", "=", "query", "(", "command", "=", "command", ",", "data", "=", "data", ",", "method", "=", "method", ",", "location", "=", "location", ")", "log", ".", "info", "(", "'Success {0} for node {1}'", ".", "format", "(", "caller", ",", "name", ")", ")", "except", "Exception", "as", "exc", ":", "if", "(", "'InvalidState'", "in", "str", "(", "exc", ")", ")", ":", "ret", "=", "[", "200", ",", "{", "}", "]", "else", ":", "log", ".", "error", "(", "'Failed to invoke {0} node {1}: {2}'", ".", "format", "(", "caller", ",", "name", ",", "exc", ")", ",", "exc_info_on_loglevel", "=", "logging", ".", "DEBUG", ")", "ret", "=", "[", "100", ",", "{", "}", "]", "return", "ret" ]
take action call used by start .
train
true
7,571
def decomposeGlyph(font, glyphName): glyph = font[glyphName] deepCopyContours(font, glyph, glyph, (0, 0), (1, 1)) glyph.clearComponents()
[ "def", "decomposeGlyph", "(", "font", ",", "glyphName", ")", ":", "glyph", "=", "font", "[", "glyphName", "]", "deepCopyContours", "(", "font", ",", "glyph", ",", "glyph", ",", "(", "0", ",", "0", ")", ",", "(", "1", ",", "1", ")", ")", "glyph", ".", "clearComponents", "(", ")" ]
moves the components of a glyph to its outline .
train
false
7,572
def cross_entropy_bernoulli(mat, p, target=None, tiny=1e-10): if (not target): target = mat if isinstance(p, CUDAMatrix): err_code = _cudamat.compute_cross_entropy_bernoulli(mat.p_mat, p.p_mat, target.p_mat, ct.c_float(tiny)) else: raise ValueError, 'Value must be of type CUDAMatrix.' if err_code: raise generate_exception(err_code) return target
[ "def", "cross_entropy_bernoulli", "(", "mat", ",", "p", ",", "target", "=", "None", ",", "tiny", "=", "1e-10", ")", ":", "if", "(", "not", "target", ")", ":", "target", "=", "mat", "if", "isinstance", "(", "p", ",", "CUDAMatrix", ")", ":", "err_code", "=", "_cudamat", ".", "compute_cross_entropy_bernoulli", "(", "mat", ".", "p_mat", ",", "p", ".", "p_mat", ",", "target", ".", "p_mat", ",", "ct", ".", "c_float", "(", "tiny", ")", ")", "else", ":", "raise", "ValueError", ",", "'Value must be of type CUDAMatrix.'", "if", "err_code", ":", "raise", "generate_exception", "(", "err_code", ")", "return", "target" ]
compute -mat*log(p) - .
train
false
7,573
def patch(target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs): (getter, attribute) = _get_target(target) return _patch(getter, attribute, new, spec, create, spec_set, autospec, new_callable, kwargs)
[ "def", "patch", "(", "target", ",", "new", "=", "DEFAULT", ",", "spec", "=", "None", ",", "create", "=", "False", ",", "spec_set", "=", "None", ",", "autospec", "=", "None", ",", "new_callable", "=", "None", ",", "**", "kwargs", ")", ":", "(", "getter", ",", "attribute", ")", "=", "_get_target", "(", "target", ")", "return", "_patch", "(", "getter", ",", "attribute", ",", "new", ",", "spec", ",", "create", ",", "spec_set", ",", "autospec", ",", "new_callable", ",", "kwargs", ")" ]
monkey-patch the djangousermixin class .
train
true
7,574
def load_module_sections(trans): module_sections = {} module_sections['inputs'] = {'name': 'inputs', 'title': 'Inputs', 'modules': [{'name': 'data_input', 'title': 'Input Dataset', 'description': 'Input dataset'}, {'name': 'data_collection_input', 'title': 'Input Dataset Collection', 'description': 'Input dataset collection'}]} if trans.app.config.enable_beta_workflow_modules: module_sections['experimental'] = {'name': 'experimental', 'title': 'Experimental', 'modules': [{'name': 'pause', 'title': 'Pause Workflow for Dataset Review', 'description': 'Pause for Review'}, {'name': 'parameter_input', 'title': 'Parameter Input', 'description': 'Simple inputs used for workflow logic'}]} return module_sections
[ "def", "load_module_sections", "(", "trans", ")", ":", "module_sections", "=", "{", "}", "module_sections", "[", "'inputs'", "]", "=", "{", "'name'", ":", "'inputs'", ",", "'title'", ":", "'Inputs'", ",", "'modules'", ":", "[", "{", "'name'", ":", "'data_input'", ",", "'title'", ":", "'Input Dataset'", ",", "'description'", ":", "'Input dataset'", "}", ",", "{", "'name'", ":", "'data_collection_input'", ",", "'title'", ":", "'Input Dataset Collection'", ",", "'description'", ":", "'Input dataset collection'", "}", "]", "}", "if", "trans", ".", "app", ".", "config", ".", "enable_beta_workflow_modules", ":", "module_sections", "[", "'experimental'", "]", "=", "{", "'name'", ":", "'experimental'", ",", "'title'", ":", "'Experimental'", ",", "'modules'", ":", "[", "{", "'name'", ":", "'pause'", ",", "'title'", ":", "'Pause Workflow for Dataset Review'", ",", "'description'", ":", "'Pause for Review'", "}", ",", "{", "'name'", ":", "'parameter_input'", ",", "'title'", ":", "'Parameter Input'", ",", "'description'", ":", "'Simple inputs used for workflow logic'", "}", "]", "}", "return", "module_sections" ]
get abstract description of the workflow modules this galaxy instance is configured with .
train
false
7,575
@register.tag def ifhasperm(parser, token): return PermissionComparisonNode.handle_token(parser, token)
[ "@", "register", ".", "tag", "def", "ifhasperm", "(", "parser", ",", "token", ")", ":", "return", "PermissionComparisonNode", ".", "handle_token", "(", "parser", ",", "token", ")" ]
this function provides functionality for the ifhasperm template tag syntax:: {% ifhasperm permission_label .
train
false
7,576
def geocode_r(): vars = request.post_vars lat = vars.get('lat', None) lon = vars.get('lon', None) results = gis.geocode_r(lat, lon) results = json.dumps(results, separators=SEPARATORS) response.headers['Content-Type'] = 'application/json' return results
[ "def", "geocode_r", "(", ")", ":", "vars", "=", "request", ".", "post_vars", "lat", "=", "vars", ".", "get", "(", "'lat'", ",", "None", ")", "lon", "=", "vars", ".", "get", "(", "'lon'", ",", "None", ")", "results", "=", "gis", ".", "geocode_r", "(", "lat", ",", "lon", ")", "results", "=", "json", ".", "dumps", "(", "results", ",", "separators", "=", "SEPARATORS", ")", "response", ".", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "return", "results" ]
reverse-geocode a location - designed to be called via ajax post looks up lx in our own database @todo: if not found then calls out to 3rd party services .
train
false
7,577
def network_entry(is_metal, interface, bridge=None, net_type=None, net_mtu=None): if is_metal: _network = dict() else: _network = {'interface': interface} if bridge: _network['bridge'] = bridge if net_type: _network['type'] = net_type if net_mtu: _network['mtu'] = net_mtu return _network
[ "def", "network_entry", "(", "is_metal", ",", "interface", ",", "bridge", "=", "None", ",", "net_type", "=", "None", ",", "net_mtu", "=", "None", ")", ":", "if", "is_metal", ":", "_network", "=", "dict", "(", ")", "else", ":", "_network", "=", "{", "'interface'", ":", "interface", "}", "if", "bridge", ":", "_network", "[", "'bridge'", "]", "=", "bridge", "if", "net_type", ":", "_network", "[", "'type'", "]", "=", "net_type", "if", "net_mtu", ":", "_network", "[", "'mtu'", "]", "=", "net_mtu", "return", "_network" ]
return a network entry for a container .
train
false
7,578
def prepare_path_for_serialization(path): return [(unicode(path_item.usage_key), path_item.display_name) for path_item in path]
[ "def", "prepare_path_for_serialization", "(", "path", ")", ":", "return", "[", "(", "unicode", "(", "path_item", ".", "usage_key", ")", ",", "path_item", ".", "display_name", ")", "for", "path_item", "in", "path", "]" ]
return the data from a list of pathitems ready for serialization to json .
train
false
7,579
@transaction.atomic def override_field_for_ccx(ccx, block, name, value): field = block.fields[name] value_json = field.to_json(value) serialized_value = json.dumps(value_json) override_has_changes = False clean_ccx_key = _clean_ccx_key(block.location) override = get_override_for_ccx(ccx, block, (name + '_instance')) if override: override_has_changes = (serialized_value != override.value) if (not override): (override, created) = CcxFieldOverride.objects.get_or_create(ccx=ccx, location=block.location, field=name, defaults={'value': serialized_value}) if created: _get_overrides_for_ccx(ccx).setdefault(clean_ccx_key, {})[(name + '_id')] = override.id else: override_has_changes = (serialized_value != override.value) if override_has_changes: override.value = serialized_value override.save() _get_overrides_for_ccx(ccx).setdefault(clean_ccx_key, {})[name] = value_json _get_overrides_for_ccx(ccx).setdefault(clean_ccx_key, {})[(name + '_instance')] = override
[ "@", "transaction", ".", "atomic", "def", "override_field_for_ccx", "(", "ccx", ",", "block", ",", "name", ",", "value", ")", ":", "field", "=", "block", ".", "fields", "[", "name", "]", "value_json", "=", "field", ".", "to_json", "(", "value", ")", "serialized_value", "=", "json", ".", "dumps", "(", "value_json", ")", "override_has_changes", "=", "False", "clean_ccx_key", "=", "_clean_ccx_key", "(", "block", ".", "location", ")", "override", "=", "get_override_for_ccx", "(", "ccx", ",", "block", ",", "(", "name", "+", "'_instance'", ")", ")", "if", "override", ":", "override_has_changes", "=", "(", "serialized_value", "!=", "override", ".", "value", ")", "if", "(", "not", "override", ")", ":", "(", "override", ",", "created", ")", "=", "CcxFieldOverride", ".", "objects", ".", "get_or_create", "(", "ccx", "=", "ccx", ",", "location", "=", "block", ".", "location", ",", "field", "=", "name", ",", "defaults", "=", "{", "'value'", ":", "serialized_value", "}", ")", "if", "created", ":", "_get_overrides_for_ccx", "(", "ccx", ")", ".", "setdefault", "(", "clean_ccx_key", ",", "{", "}", ")", "[", "(", "name", "+", "'_id'", ")", "]", "=", "override", ".", "id", "else", ":", "override_has_changes", "=", "(", "serialized_value", "!=", "override", ".", "value", ")", "if", "override_has_changes", ":", "override", ".", "value", "=", "serialized_value", "override", ".", "save", "(", ")", "_get_overrides_for_ccx", "(", "ccx", ")", ".", "setdefault", "(", "clean_ccx_key", ",", "{", "}", ")", "[", "name", "]", "=", "value_json", "_get_overrides_for_ccx", "(", "ccx", ")", ".", "setdefault", "(", "clean_ccx_key", ",", "{", "}", ")", "[", "(", "name", "+", "'_instance'", ")", "]", "=", "override" ]
overrides a field for the ccx .
train
false
7,580
def test_uninstall_as_egg(script, data): to_install = data.packages.join('FSPkg') result = script.pip('install', to_install, '--egg', expect_error=True) fspkg_folder = (script.site_packages / 'fspkg') egg_folder = ((script.site_packages / 'FSPkg-0.1.dev0-py%s.egg') % pyversion) assert (fspkg_folder not in result.files_created), str(result.stdout) assert (egg_folder in result.files_created), str(result) result2 = script.pip('uninstall', 'FSPkg', '-y') assert_all_changes(result, result2, [(script.venv / 'build'), 'cache', (script.site_packages / 'easy-install.pth')])
[ "def", "test_uninstall_as_egg", "(", "script", ",", "data", ")", ":", "to_install", "=", "data", ".", "packages", ".", "join", "(", "'FSPkg'", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "to_install", ",", "'--egg'", ",", "expect_error", "=", "True", ")", "fspkg_folder", "=", "(", "script", ".", "site_packages", "/", "'fspkg'", ")", "egg_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'FSPkg-0.1.dev0-py%s.egg'", ")", "%", "pyversion", ")", "assert", "(", "fspkg_folder", "not", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ".", "stdout", ")", "assert", "(", "egg_folder", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")", "result2", "=", "script", ".", "pip", "(", "'uninstall'", ",", "'FSPkg'", ",", "'-y'", ")", "assert_all_changes", "(", "result", ",", "result2", ",", "[", "(", "script", ".", "venv", "/", "'build'", ")", ",", "'cache'", ",", "(", "script", ".", "site_packages", "/", "'easy-install.pth'", ")", "]", ")" ]
test uninstall package installed as egg .
train
false
7,581
def multinomial_logpmf(x, n, p): return ((gammaln((n + 1.0)) - np.sum(gammaln((x + 1.0)))) + np.sum((x * np.log(p))))
[ "def", "multinomial_logpmf", "(", "x", ",", "n", ",", "p", ")", ":", "return", "(", "(", "gammaln", "(", "(", "n", "+", "1.0", ")", ")", "-", "np", ".", "sum", "(", "gammaln", "(", "(", "x", "+", "1.0", ")", ")", ")", ")", "+", "np", ".", "sum", "(", "(", "x", "*", "np", ".", "log", "(", "p", ")", ")", ")", ")" ]
log pmf of multinomial .
train
false
7,582
@status('Fixing whitespace', info=(lambda x: ('%s files' % x))) def normalize_whitespace(file_paths): reindent.makebackup = False result = map(reindent.check, (x for x in file_paths if x.endswith('.py'))) return sum(result)
[ "@", "status", "(", "'Fixing whitespace'", ",", "info", "=", "(", "lambda", "x", ":", "(", "'%s files'", "%", "x", ")", ")", ")", "def", "normalize_whitespace", "(", "file_paths", ")", ":", "reindent", ".", "makebackup", "=", "False", "result", "=", "map", "(", "reindent", ".", "check", ",", "(", "x", "for", "x", "in", "file_paths", "if", "x", ".", "endswith", "(", "'.py'", ")", ")", ")", "return", "sum", "(", "result", ")" ]
make sure that the whitespace for .
train
false
7,585
@library.filter def selector_content_find(document, selector): content = '' try: page = pq(document.rendered_html) except ValueError: pass try: content = page.find(selector).text() except SelectorSyntaxError: pass return content
[ "@", "library", ".", "filter", "def", "selector_content_find", "(", "document", ",", "selector", ")", ":", "content", "=", "''", "try", ":", "page", "=", "pq", "(", "document", ".", "rendered_html", ")", "except", "ValueError", ":", "pass", "try", ":", "content", "=", "page", ".", "find", "(", "selector", ")", ".", "text", "(", ")", "except", "SelectorSyntaxError", ":", "pass", "return", "content" ]
provided a selector .
train
false
7,587
def check_or_raise_extra(value, message=u'WAMP message invalid'): if (type(value) != dict): raise ProtocolError(u'{0}: invalid type {1}'.format(message, type(value))) for k in value.keys(): if (type(k) != six.text_type): raise ProtocolError(u"{0}: invalid type {1} for key '{2}'".format(message, type(k), k)) return value
[ "def", "check_or_raise_extra", "(", "value", ",", "message", "=", "u'WAMP message invalid'", ")", ":", "if", "(", "type", "(", "value", ")", "!=", "dict", ")", ":", "raise", "ProtocolError", "(", "u'{0}: invalid type {1}'", ".", "format", "(", "message", ",", "type", "(", "value", ")", ")", ")", "for", "k", "in", "value", ".", "keys", "(", ")", ":", "if", "(", "type", "(", "k", ")", "!=", "six", ".", "text_type", ")", ":", "raise", "ProtocolError", "(", "u\"{0}: invalid type {1} for key '{2}'\"", ".", "format", "(", "message", ",", "type", "(", "k", ")", ",", "k", ")", ")", "return", "value" ]
check a value for being a valid wamp extra dictionary .
train
false
7,589
def enable_pretty_logging(options=None, logger=None): if (options is None): import tornado.options options = tornado.options.options if ((options.logging is None) or (options.logging.lower() == 'none')): return if (logger is None): logger = logging.getLogger() logger.setLevel(getattr(logging, options.logging.upper())) if options.log_file_prefix: rotate_mode = options.log_rotate_mode if (rotate_mode == 'size'): channel = logging.handlers.RotatingFileHandler(filename=options.log_file_prefix, maxBytes=options.log_file_max_size, backupCount=options.log_file_num_backups) elif (rotate_mode == 'time'): channel = logging.handlers.TimedRotatingFileHandler(filename=options.log_file_prefix, when=options.log_rotate_when, interval=options.log_rotate_interval, backupCount=options.log_file_num_backups) else: error_message = ('The value of log_rotate_mode option should be ' + ('"size" or "time", not "%s".' % rotate_mode)) raise ValueError(error_message) channel.setFormatter(LogFormatter(color=False)) logger.addHandler(channel) if (options.log_to_stderr or ((options.log_to_stderr is None) and (not logger.handlers))): channel = logging.StreamHandler() channel.setFormatter(LogFormatter()) logger.addHandler(channel)
[ "def", "enable_pretty_logging", "(", "options", "=", "None", ",", "logger", "=", "None", ")", ":", "if", "(", "options", "is", "None", ")", ":", "import", "tornado", ".", "options", "options", "=", "tornado", ".", "options", ".", "options", "if", "(", "(", "options", ".", "logging", "is", "None", ")", "or", "(", "options", ".", "logging", ".", "lower", "(", ")", "==", "'none'", ")", ")", ":", "return", "if", "(", "logger", "is", "None", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "logger", ".", "setLevel", "(", "getattr", "(", "logging", ",", "options", ".", "logging", ".", "upper", "(", ")", ")", ")", "if", "options", ".", "log_file_prefix", ":", "rotate_mode", "=", "options", ".", "log_rotate_mode", "if", "(", "rotate_mode", "==", "'size'", ")", ":", "channel", "=", "logging", ".", "handlers", ".", "RotatingFileHandler", "(", "filename", "=", "options", ".", "log_file_prefix", ",", "maxBytes", "=", "options", ".", "log_file_max_size", ",", "backupCount", "=", "options", ".", "log_file_num_backups", ")", "elif", "(", "rotate_mode", "==", "'time'", ")", ":", "channel", "=", "logging", ".", "handlers", ".", "TimedRotatingFileHandler", "(", "filename", "=", "options", ".", "log_file_prefix", ",", "when", "=", "options", ".", "log_rotate_when", ",", "interval", "=", "options", ".", "log_rotate_interval", ",", "backupCount", "=", "options", ".", "log_file_num_backups", ")", "else", ":", "error_message", "=", "(", "'The value of log_rotate_mode option should be '", "+", "(", "'\"size\" or \"time\", not \"%s\".'", "%", "rotate_mode", ")", ")", "raise", "ValueError", "(", "error_message", ")", "channel", ".", "setFormatter", "(", "LogFormatter", "(", "color", "=", "False", ")", ")", "logger", ".", "addHandler", "(", "channel", ")", "if", "(", "options", ".", "log_to_stderr", "or", "(", "(", "options", ".", "log_to_stderr", "is", "None", ")", "and", "(", "not", "logger", ".", "handlers", ")", ")", ")", ":", "channel", "=", "logging", ".", "StreamHandler", "(", ")", "channel", ".", "setFormatter", "(", "LogFormatter", "(", ")", ")", "logger", ".", "addHandler", "(", "channel", ")" ]
turns on formatted logging output as configured .
train
true
7,590
def memoize_in_request_cache(request_cache_attr_name=None): def _decorator(func): 'Outer method decorator.' @functools.wraps(func) def _wrapper(self, *args, **kwargs): '\n Wraps a method to memoize results.\n ' request_cache = getattr(self, request_cache_attr_name, None) if request_cache: cache_key = '&'.join([hashvalue(arg) for arg in args]) if (cache_key in request_cache.data.setdefault(func.__name__, {})): return request_cache.data[func.__name__][cache_key] result = func(self, *args, **kwargs) request_cache.data[func.__name__][cache_key] = result return result else: return func(self, *args, **kwargs) return _wrapper return _decorator
[ "def", "memoize_in_request_cache", "(", "request_cache_attr_name", "=", "None", ")", ":", "def", "_decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "request_cache", "=", "getattr", "(", "self", ",", "request_cache_attr_name", ",", "None", ")", "if", "request_cache", ":", "cache_key", "=", "'&'", ".", "join", "(", "[", "hashvalue", "(", "arg", ")", "for", "arg", "in", "args", "]", ")", "if", "(", "cache_key", "in", "request_cache", ".", "data", ".", "setdefault", "(", "func", ".", "__name__", ",", "{", "}", ")", ")", ":", "return", "request_cache", ".", "data", "[", "func", ".", "__name__", "]", "[", "cache_key", "]", "result", "=", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "request_cache", ".", "data", "[", "func", ".", "__name__", "]", "[", "cache_key", "]", "=", "result", "return", "result", "else", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "_wrapper", "return", "_decorator" ]
memoize a method calls results in the request_cache if theres one .
train
false
7,591
def StartTcpServer(context, identity=None, address=None, console=False, **kwargs): from twisted.internet import reactor address = (address or ('', Defaults.Port)) framer = ModbusSocketFramer factory = ModbusServerFactory(context, framer, identity, **kwargs) if console: from pymodbus.internal.ptwisted import InstallManagementConsole InstallManagementConsole({'factory': factory}) _logger.info(('Starting Modbus TCP Server on %s:%s' % address)) reactor.listenTCP(address[1], factory, interface=address[0]) reactor.run()
[ "def", "StartTcpServer", "(", "context", ",", "identity", "=", "None", ",", "address", "=", "None", ",", "console", "=", "False", ",", "**", "kwargs", ")", ":", "from", "twisted", ".", "internet", "import", "reactor", "address", "=", "(", "address", "or", "(", "''", ",", "Defaults", ".", "Port", ")", ")", "framer", "=", "ModbusSocketFramer", "factory", "=", "ModbusServerFactory", "(", "context", ",", "framer", ",", "identity", ",", "**", "kwargs", ")", "if", "console", ":", "from", "pymodbus", ".", "internal", ".", "ptwisted", "import", "InstallManagementConsole", "InstallManagementConsole", "(", "{", "'factory'", ":", "factory", "}", ")", "_logger", ".", "info", "(", "(", "'Starting Modbus TCP Server on %s:%s'", "%", "address", ")", ")", "reactor", ".", "listenTCP", "(", "address", "[", "1", "]", ",", "factory", ",", "interface", "=", "address", "[", "0", "]", ")", "reactor", ".", "run", "(", ")" ]
a factory to start and run a tcp modbus server .
train
false
7,592
def number_for_course_location(location): return location.course
[ "def", "number_for_course_location", "(", "location", ")", ":", "return", "location", ".", "course" ]
given a courses block usage locator .
train
false
7,593
def attr_le(accessing_obj, accessed_obj, *args, **kwargs): return attr(accessing_obj, accessed_obj, *args, **{'compare': 'le'})
[ "def", "attr_le", "(", "accessing_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "attr", "(", "accessing_obj", ",", "accessed_obj", ",", "*", "args", ",", "**", "{", "'compare'", ":", "'le'", "}", ")" ]
usage: attr_gt only true if access_objs attribute <= the value given .
train
false
7,595
def grad_undefined(op, x_pos, x, comment=''): return NullType(('This variable is Null because the grad method for input %s (%s) of the %s op is mathematically undefined. %s' % (x_pos, x, op, comment)))()
[ "def", "grad_undefined", "(", "op", ",", "x_pos", ",", "x", ",", "comment", "=", "''", ")", ":", "return", "NullType", "(", "(", "'This variable is Null because the grad method for input %s (%s) of the %s op is mathematically undefined. %s'", "%", "(", "x_pos", ",", "x", ",", "op", ",", "comment", ")", ")", ")", "(", ")" ]
return an un-computable symbolic variable of type x .
train
false
7,596
def show_main(path=MAIN_CF): (pairs, conf_list) = _parse_main(path) return pairs
[ "def", "show_main", "(", "path", "=", "MAIN_CF", ")", ":", "(", "pairs", ",", "conf_list", ")", "=", "_parse_main", "(", "path", ")", "return", "pairs" ]
return a dict of active config values .
train
false
7,597
@task(queue='web') def update_static_metadata(project_pk, path=None): project = Project.objects.get(pk=project_pk) if (not path): path = project.static_metadata_path() log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Updating static metadata')) translations = [trans.language for trans in project.translations.all()] languages = set(translations) metadata = {'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version} try: fh = open(path, 'w+') json.dump(metadata, fh) fh.close() Syncer.copy(path, path, host=socket.gethostname(), file=True) except (AttributeError, IOError) as e: log.debug(LOG_TEMPLATE.format(project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e)))
[ "@", "task", "(", "queue", "=", "'web'", ")", "def", "update_static_metadata", "(", "project_pk", ",", "path", "=", "None", ")", ":", "project", "=", "Project", ".", "objects", ".", "get", "(", "pk", "=", "project_pk", ")", "if", "(", "not", "path", ")", ":", "path", "=", "project", ".", "static_metadata_path", "(", ")", "log", ".", "info", "(", "LOG_TEMPLATE", ".", "format", "(", "project", "=", "project", ".", "slug", ",", "version", "=", "''", ",", "msg", "=", "'Updating static metadata'", ")", ")", "translations", "=", "[", "trans", ".", "language", "for", "trans", "in", "project", ".", "translations", ".", "all", "(", ")", "]", "languages", "=", "set", "(", "translations", ")", "metadata", "=", "{", "'version'", ":", "project", ".", "default_version", ",", "'language'", ":", "project", ".", "language", ",", "'languages'", ":", "list", "(", "languages", ")", ",", "'single_version'", ":", "project", ".", "single_version", "}", "try", ":", "fh", "=", "open", "(", "path", ",", "'w+'", ")", "json", ".", "dump", "(", "metadata", ",", "fh", ")", "fh", ".", "close", "(", ")", "Syncer", ".", "copy", "(", "path", ",", "path", ",", "host", "=", "socket", ".", "gethostname", "(", ")", ",", "file", "=", "True", ")", "except", "(", "AttributeError", ",", "IOError", ")", "as", "e", ":", "log", ".", "debug", "(", "LOG_TEMPLATE", ".", "format", "(", "project", "=", "project", ".", "slug", ",", "version", "=", "''", ",", "msg", "=", "'Cannot write to metadata.json: {0}'", ".", "format", "(", "e", ")", ")", ")" ]
this is here to avoid circular imports in models .
train
false
7,599
def _launchd_paths(): return ['/Library/LaunchAgents', '/Library/LaunchDaemons', '/System/Library/LaunchAgents', '/System/Library/LaunchDaemons']
[ "def", "_launchd_paths", "(", ")", ":", "return", "[", "'/Library/LaunchAgents'", ",", "'/Library/LaunchDaemons'", ",", "'/System/Library/LaunchAgents'", ",", "'/System/Library/LaunchDaemons'", "]" ]
paths where launchd services can be found .
train
false
7,601
def _check_h5py(): try: import h5py except ImportError: raise ImportError('the h5py module is required to use HDF5 I/O') return h5py
[ "def", "_check_h5py", "(", ")", ":", "try", ":", "import", "h5py", "except", "ImportError", ":", "raise", "ImportError", "(", "'the h5py module is required to use HDF5 I/O'", ")", "return", "h5py" ]
helper to check if h5py is installed .
train
false
7,603
def _convert_between_ucsc_and_ensemble_naming(chrom): if chrom.startswith('chr'): return chrom[3:] else: return ('chr' + chrom)
[ "def", "_convert_between_ucsc_and_ensemble_naming", "(", "chrom", ")", ":", "if", "chrom", ".", "startswith", "(", "'chr'", ")", ":", "return", "chrom", "[", "3", ":", "]", "else", ":", "return", "(", "'chr'", "+", "chrom", ")" ]
convert between ucsc chromosome naming conventions and ensembl naming conventions (1) .
train
false
7,604
def _unmount(path, idempotent=False): try: run_process(['umount', '-l', path.path]) except CalledProcessError as e: if (idempotent and (e.returncode in (1, 32))): pass else: raise UnmountError(blockdevice=path.path, source_message=e.output)
[ "def", "_unmount", "(", "path", ",", "idempotent", "=", "False", ")", ":", "try", ":", "run_process", "(", "[", "'umount'", ",", "'-l'", ",", "path", ".", "path", "]", ")", "except", "CalledProcessError", "as", "e", ":", "if", "(", "idempotent", "and", "(", "e", ".", "returncode", "in", "(", "1", ",", "32", ")", ")", ")", ":", "pass", "else", ":", "raise", "UnmountError", "(", "blockdevice", "=", "path", ".", "path", ",", "source_message", "=", "e", ".", "output", ")" ]
unmount the path .
train
false
7,605
def iter_bad_values(): for (ctype, values) in sorted(BAD_VALUES.items()): for (value, repl) in values: (yield (ctype, value, repl))
[ "def", "iter_bad_values", "(", ")", ":", "for", "(", "ctype", ",", "values", ")", "in", "sorted", "(", "BAD_VALUES", ".", "items", "(", ")", ")", ":", "for", "(", "value", ",", "repl", ")", "in", "values", ":", "(", "yield", "(", "ctype", ",", "value", ",", "repl", ")", ")" ]
yield pairs of "bad" tuples .
train
false
7,606
def write_error_summary(error): fullpath = request.environ.get('FULLPATH', request.path) uid = (c.user._id if c.user_is_loggedin else '-') g.log.error('E: %s U: %s FP: %s', error, uid, fullpath)
[ "def", "write_error_summary", "(", "error", ")", ":", "fullpath", "=", "request", ".", "environ", ".", "get", "(", "'FULLPATH'", ",", "request", ".", "path", ")", "uid", "=", "(", "c", ".", "user", ".", "_id", "if", "c", ".", "user_is_loggedin", "else", "'-'", ")", "g", ".", "log", ".", "error", "(", "'E: %s U: %s FP: %s'", ",", "error", ",", "uid", ",", "fullpath", ")" ]
log a single-line summary of the error for easy log grepping .
train
false
7,608
@register.inclusion_tag(u'generic/includes/comments.html', takes_context=True) def comments_for(context, obj): form_class = import_dotted_path(settings.COMMENT_FORM_CLASS) form = form_class(context[u'request'], obj) context_form = context.get(u'posted_comment_form', form) context.update({u'posted_comment_form': (context_form if (context_form.target_object == obj) else form), u'unposted_comment_form': form, u'comment_url': reverse(u'comment'), u'object_for_comments': obj}) return context
[ "@", "register", ".", "inclusion_tag", "(", "u'generic/includes/comments.html'", ",", "takes_context", "=", "True", ")", "def", "comments_for", "(", "context", ",", "obj", ")", ":", "form_class", "=", "import_dotted_path", "(", "settings", ".", "COMMENT_FORM_CLASS", ")", "form", "=", "form_class", "(", "context", "[", "u'request'", "]", ",", "obj", ")", "context_form", "=", "context", ".", "get", "(", "u'posted_comment_form'", ",", "form", ")", "context", ".", "update", "(", "{", "u'posted_comment_form'", ":", "(", "context_form", "if", "(", "context_form", ".", "target_object", "==", "obj", ")", "else", "form", ")", ",", "u'unposted_comment_form'", ":", "form", ",", "u'comment_url'", ":", "reverse", "(", "u'comment'", ")", ",", "u'object_for_comments'", ":", "obj", "}", ")", "return", "context" ]
provides a generic context variable name for the object that comments are being rendered for .
train
false
7,609
def get_diff_chunk_generator(*args, **kwargs): return _generator(*args, **kwargs)
[ "def", "get_diff_chunk_generator", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "_generator", "(", "*", "args", ",", "**", "kwargs", ")" ]
returns a diffchunkgenerator instance used for generating chunks .
train
false
7,610
def task_pull_docker_images(images=ACCEPTANCE_IMAGES): return sequence([run_from_args(['docker', 'pull', image]) for image in images])
[ "def", "task_pull_docker_images", "(", "images", "=", "ACCEPTANCE_IMAGES", ")", ":", "return", "sequence", "(", "[", "run_from_args", "(", "[", "'docker'", ",", "'pull'", ",", "image", "]", ")", "for", "image", "in", "images", "]", ")" ]
pull docker images .
train
false
7,611
def transform_old_to_new(line, old_module, new_module, ignore_import_from=False): if (not ignore_import_from): import_from_statement = (IMPORT_FROM_TEMPLATE % (old_module,)) if line.startswith(import_from_statement): new_import_from_statement = (IMPORT_FROM_TEMPLATE % (new_module,)) return line.replace(import_from_statement, new_import_from_statement, 1) import_statement = (IMPORT_TEMPLATE % (old_module,)) if (import_statement in line): new_import_statement = (IMPORT_TEMPLATE % (new_module,)) return line.replace(import_statement, new_import_statement, 1) if line.lstrip().startswith(old_module): return line.replace(old_module, new_module, 1) if ((': ' + old_module) in line): return line.replace((': ' + old_module), (': ' + new_module), 1)
[ "def", "transform_old_to_new", "(", "line", ",", "old_module", ",", "new_module", ",", "ignore_import_from", "=", "False", ")", ":", "if", "(", "not", "ignore_import_from", ")", ":", "import_from_statement", "=", "(", "IMPORT_FROM_TEMPLATE", "%", "(", "old_module", ",", ")", ")", "if", "line", ".", "startswith", "(", "import_from_statement", ")", ":", "new_import_from_statement", "=", "(", "IMPORT_FROM_TEMPLATE", "%", "(", "new_module", ",", ")", ")", "return", "line", ".", "replace", "(", "import_from_statement", ",", "new_import_from_statement", ",", "1", ")", "import_statement", "=", "(", "IMPORT_TEMPLATE", "%", "(", "old_module", ",", ")", ")", "if", "(", "import_statement", "in", "line", ")", ":", "new_import_statement", "=", "(", "IMPORT_TEMPLATE", "%", "(", "new_module", ",", ")", ")", "return", "line", ".", "replace", "(", "import_statement", ",", "new_import_statement", ",", "1", ")", "if", "line", ".", "lstrip", "(", ")", ".", "startswith", "(", "old_module", ")", ":", "return", "line", ".", "replace", "(", "old_module", ",", "new_module", ",", "1", ")", "if", "(", "(", "': '", "+", "old_module", ")", "in", "line", ")", ":", "return", "line", ".", "replace", "(", "(", "': '", "+", "old_module", ")", ",", "(", "': '", "+", "new_module", ")", ",", "1", ")" ]
transforms from an old module to a new one .
train
false
7,613
def _theano_leapfrog_integrator(H, q, p, **theano_kwargs): epsilon = tt.dscalar('epsilon') epsilon.tag.test_value = 1 n_steps = tt.iscalar('n_steps') n_steps.tag.test_value = 2 (q_new, p_new) = leapfrog(H, q, p, epsilon, n_steps) energy_new = energy(H, q_new, p_new) f = theano.function([q, p, epsilon, n_steps], [q_new, p_new, energy_new], **theano_kwargs) f.trust_input = True return f
[ "def", "_theano_leapfrog_integrator", "(", "H", ",", "q", ",", "p", ",", "**", "theano_kwargs", ")", ":", "epsilon", "=", "tt", ".", "dscalar", "(", "'epsilon'", ")", "epsilon", ".", "tag", ".", "test_value", "=", "1", "n_steps", "=", "tt", ".", "iscalar", "(", "'n_steps'", ")", "n_steps", ".", "tag", ".", "test_value", "=", "2", "(", "q_new", ",", "p_new", ")", "=", "leapfrog", "(", "H", ",", "q", ",", "p", ",", "epsilon", ",", "n_steps", ")", "energy_new", "=", "energy", "(", "H", ",", "q_new", ",", "p_new", ")", "f", "=", "theano", ".", "function", "(", "[", "q", ",", "p", ",", "epsilon", ",", "n_steps", "]", ",", "[", "q_new", ",", "p_new", ",", "energy_new", "]", ",", "**", "theano_kwargs", ")", "f", ".", "trust_input", "=", "True", "return", "f" ]
computes a theano function that computes one leapfrog step and the energy at the end of the trajectory .
train
false
7,615
def port_standard(standard_annotations, sel, sample, extractors): new_annotations = [] for annotation in standard_annotations: if (not annotation.get('tagid')): continue element = find_element(annotation, sel) if (element is None): continue selector = find_generalized_css_selector(element, sel) if (not selector): continue annotation['accept_selectors'] = [selector] annotation['selector'] = selector annotation['reject_selectors'] = [] annotation = _add_annotation_data(annotation, sample, extractors) for (_id, data) in annotation.get('data', {}).items(): a = copy.deepcopy(annotation) a['id'] = gen_predictable_id(_id, a['id']) a['data'] = {gen_predictable_id(a['id'], 1): data} new_annotations.append(a) return new_annotations
[ "def", "port_standard", "(", "standard_annotations", ",", "sel", ",", "sample", ",", "extractors", ")", ":", "new_annotations", "=", "[", "]", "for", "annotation", "in", "standard_annotations", ":", "if", "(", "not", "annotation", ".", "get", "(", "'tagid'", ")", ")", ":", "continue", "element", "=", "find_element", "(", "annotation", ",", "sel", ")", "if", "(", "element", "is", "None", ")", ":", "continue", "selector", "=", "find_generalized_css_selector", "(", "element", ",", "sel", ")", "if", "(", "not", "selector", ")", ":", "continue", "annotation", "[", "'accept_selectors'", "]", "=", "[", "selector", "]", "annotation", "[", "'selector'", "]", "=", "selector", "annotation", "[", "'reject_selectors'", "]", "=", "[", "]", "annotation", "=", "_add_annotation_data", "(", "annotation", ",", "sample", ",", "extractors", ")", "for", "(", "_id", ",", "data", ")", "in", "annotation", ".", "get", "(", "'data'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "a", "=", "copy", ".", "deepcopy", "(", "annotation", ")", "a", "[", "'id'", "]", "=", "gen_predictable_id", "(", "_id", ",", "a", "[", "'id'", "]", ")", "a", "[", "'data'", "]", "=", "{", "gen_predictable_id", "(", "a", "[", "'id'", "]", ",", "1", ")", ":", "data", "}", "new_annotations", ".", "append", "(", "a", ")", "return", "new_annotations" ]
add accept selectors for existing annotations .
train
false
7,622
def set_vif_bandwidth_config(conf, inst_type): bandwidth_items = ['vif_inbound_average', 'vif_inbound_peak', 'vif_inbound_burst', 'vif_outbound_average', 'vif_outbound_peak', 'vif_outbound_burst'] for (key, value) in inst_type.get('extra_specs', {}).items(): scope = key.split(':') if ((len(scope) > 1) and (scope[0] == 'quota')): if (scope[1] in bandwidth_items): setattr(conf, scope[1], value)
[ "def", "set_vif_bandwidth_config", "(", "conf", ",", "inst_type", ")", ":", "bandwidth_items", "=", "[", "'vif_inbound_average'", ",", "'vif_inbound_peak'", ",", "'vif_inbound_burst'", ",", "'vif_outbound_average'", ",", "'vif_outbound_peak'", ",", "'vif_outbound_burst'", "]", "for", "(", "key", ",", "value", ")", "in", "inst_type", ".", "get", "(", "'extra_specs'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "scope", "=", "key", ".", "split", "(", "':'", ")", "if", "(", "(", "len", "(", "scope", ")", ">", "1", ")", "and", "(", "scope", "[", "0", "]", "==", "'quota'", ")", ")", ":", "if", "(", "scope", "[", "1", "]", "in", "bandwidth_items", ")", ":", "setattr", "(", "conf", ",", "scope", "[", "1", "]", ",", "value", ")" ]
config vif inbound/outbound bandwidth limit .
train
false
7,624
def generate_oauth_authorization_url(token, next=None, hd=DEFAULT_DOMAIN, hl=None, btmpl=None, auth_server=OAUTH_AUTHORIZE_URL): uri = atom.http_core.Uri.parse_uri(auth_server) uri.query['oauth_token'] = token uri.query['hd'] = hd if (next is not None): uri.query['oauth_callback'] = str(next) if (hl is not None): uri.query['hl'] = hl if (btmpl is not None): uri.query['btmpl'] = btmpl return uri
[ "def", "generate_oauth_authorization_url", "(", "token", ",", "next", "=", "None", ",", "hd", "=", "DEFAULT_DOMAIN", ",", "hl", "=", "None", ",", "btmpl", "=", "None", ",", "auth_server", "=", "OAUTH_AUTHORIZE_URL", ")", ":", "uri", "=", "atom", ".", "http_core", ".", "Uri", ".", "parse_uri", "(", "auth_server", ")", "uri", ".", "query", "[", "'oauth_token'", "]", "=", "token", "uri", ".", "query", "[", "'hd'", "]", "=", "hd", "if", "(", "next", "is", "not", "None", ")", ":", "uri", ".", "query", "[", "'oauth_callback'", "]", "=", "str", "(", "next", ")", "if", "(", "hl", "is", "not", "None", ")", ":", "uri", ".", "query", "[", "'hl'", "]", "=", "hl", "if", "(", "btmpl", "is", "not", "None", ")", ":", "uri", ".", "query", "[", "'btmpl'", "]", "=", "btmpl", "return", "uri" ]
creates a url for the page where the request token can be authorized .
train
false
7,626
def disk_partition_usage(all=False): result = disk_partitions(all) for partition in result: partition.update(disk_usage(partition['mountpoint'])) return result
[ "def", "disk_partition_usage", "(", "all", "=", "False", ")", ":", "result", "=", "disk_partitions", "(", "all", ")", "for", "partition", "in", "result", ":", "partition", ".", "update", "(", "disk_usage", "(", "partition", "[", "'mountpoint'", "]", ")", ")", "return", "result" ]
return a list of disk partitions plus the mount point .
train
true
7,628
@local_optimizer([GpuSparseBlockOuter], inplace=True) def local_inplace_gpu_sparse_block_outer(node): if (isinstance(node.op, GpuSparseBlockOuter) and (not node.op.inplace)): new_node = gpu_sparse_block_outer_inplace(*node.inputs) return [new_node] return False
[ "@", "local_optimizer", "(", "[", "GpuSparseBlockOuter", "]", ",", "inplace", "=", "True", ")", "def", "local_inplace_gpu_sparse_block_outer", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ".", "op", ",", "GpuSparseBlockOuter", ")", "and", "(", "not", "node", ".", "op", ".", "inplace", ")", ")", ":", "new_node", "=", "gpu_sparse_block_outer_inplace", "(", "*", "node", ".", "inputs", ")", "return", "[", "new_node", "]", "return", "False" ]
gpusparseblockouter -> gpusparseblockouter .
train
false
7,629
def setCacheCapacity(capacity=DEFAULT_CACHE_CAPACITY): enableCache() _entityCache.setCapacity(capacity)
[ "def", "setCacheCapacity", "(", "capacity", "=", "DEFAULT_CACHE_CAPACITY", ")", ":", "enableCache", "(", ")", "_entityCache", ".", "setCapacity", "(", "capacity", ")" ]
set the capacity of the entity cache .
train
false
7,631
def parse_grid_id_args(grid, grid_url): if (grid is not None): id_from_grid = grid.id else: id_from_grid = None args = [id_from_grid, grid_url] arg_names = ('grid', 'grid_url') supplied_arg_names = [arg_name for (arg_name, arg) in zip(arg_names, args) if (arg is not None)] if (not supplied_arg_names): raise exceptions.InputError("One of the two keyword arguments is required:\n `grid` or `grid_url`\n\ngrid: a plotly.graph_objs.Grid object that has already\n been uploaded to Plotly.\n\ngrid_url: the url where the grid can be accessed on\n Plotly, e.g. 'https://plot.ly/~chris/3043'\n\n") elif (len(supplied_arg_names) > 1): raise exceptions.InputError('Only one of `grid` or `grid_url` is required. \nYou supplied both. \n') else: supplied_arg_name = supplied_arg_names.pop() if (supplied_arg_name == 'grid_url'): path = six.moves.urllib.parse.urlparse(grid_url).path (file_owner, file_id) = path.replace('/~', '').split('/')[0:2] return '{0}:{1}'.format(file_owner, file_id) else: return grid.id
[ "def", "parse_grid_id_args", "(", "grid", ",", "grid_url", ")", ":", "if", "(", "grid", "is", "not", "None", ")", ":", "id_from_grid", "=", "grid", ".", "id", "else", ":", "id_from_grid", "=", "None", "args", "=", "[", "id_from_grid", ",", "grid_url", "]", "arg_names", "=", "(", "'grid'", ",", "'grid_url'", ")", "supplied_arg_names", "=", "[", "arg_name", "for", "(", "arg_name", ",", "arg", ")", "in", "zip", "(", "arg_names", ",", "args", ")", "if", "(", "arg", "is", "not", "None", ")", "]", "if", "(", "not", "supplied_arg_names", ")", ":", "raise", "exceptions", ".", "InputError", "(", "\"One of the two keyword arguments is required:\\n `grid` or `grid_url`\\n\\ngrid: a plotly.graph_objs.Grid object that has already\\n been uploaded to Plotly.\\n\\ngrid_url: the url where the grid can be accessed on\\n Plotly, e.g. 'https://plot.ly/~chris/3043'\\n\\n\"", ")", "elif", "(", "len", "(", "supplied_arg_names", ")", ">", "1", ")", ":", "raise", "exceptions", ".", "InputError", "(", "'Only one of `grid` or `grid_url` is required. \\nYou supplied both. \\n'", ")", "else", ":", "supplied_arg_name", "=", "supplied_arg_names", ".", "pop", "(", ")", "if", "(", "supplied_arg_name", "==", "'grid_url'", ")", ":", "path", "=", "six", ".", "moves", ".", "urllib", ".", "parse", ".", "urlparse", "(", "grid_url", ")", ".", "path", "(", "file_owner", ",", "file_id", ")", "=", "path", ".", "replace", "(", "'/~'", ",", "''", ")", ".", "split", "(", "'/'", ")", "[", "0", ":", "2", "]", "return", "'{0}:{1}'", ".", "format", "(", "file_owner", ",", "file_id", ")", "else", ":", "return", "grid", ".", "id" ]
return the grid_id from the non-none input argument .
train
false
7,633
@pytest.mark.parametrize('console_scripts', ['test_ = distutils_install', 'test_:test_ = distutils_install']) def test_uninstall_entry_point(script, console_scripts): pkg_name = 'ep_install' pkg_path = create_test_package_with_setup(script, name=pkg_name, version='0.1', entry_points={'console_scripts': [console_scripts], 'pip_test.ep': ['ep:name1 = distutils_install', 'ep:name2 = distutils_install']}) script_name = script.bin_path.join(console_scripts.split('=')[0].strip()) result = script.pip('install', pkg_path) assert script_name.exists result = script.pip('list', '--format=legacy') assert ('ep-install (0.1)' in result.stdout) script.pip('uninstall', 'ep_install', '-y') assert (not script_name.exists) result2 = script.pip('list', '--format=legacy') assert ('ep-install (0.1)' not in result2.stdout)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'console_scripts'", ",", "[", "'test_ = distutils_install'", ",", "'test_:test_ = distutils_install'", "]", ")", "def", "test_uninstall_entry_point", "(", "script", ",", "console_scripts", ")", ":", "pkg_name", "=", "'ep_install'", "pkg_path", "=", "create_test_package_with_setup", "(", "script", ",", "name", "=", "pkg_name", ",", "version", "=", "'0.1'", ",", "entry_points", "=", "{", "'console_scripts'", ":", "[", "console_scripts", "]", ",", "'pip_test.ep'", ":", "[", "'ep:name1 = distutils_install'", ",", "'ep:name2 = distutils_install'", "]", "}", ")", "script_name", "=", "script", ".", "bin_path", ".", "join", "(", "console_scripts", ".", "split", "(", "'='", ")", "[", "0", "]", ".", "strip", "(", ")", ")", "result", "=", "script", ".", "pip", "(", "'install'", ",", "pkg_path", ")", "assert", "script_name", ".", "exists", "result", "=", "script", ".", "pip", "(", "'list'", ",", "'--format=legacy'", ")", "assert", "(", "'ep-install (0.1)'", "in", "result", ".", "stdout", ")", "script", ".", "pip", "(", "'uninstall'", ",", "'ep_install'", ",", "'-y'", ")", "assert", "(", "not", "script_name", ".", "exists", ")", "result2", "=", "script", ".", "pip", "(", "'list'", ",", "'--format=legacy'", ")", "assert", "(", "'ep-install (0.1)'", "not", "in", "result2", ".", "stdout", ")" ]
test uninstall package with two or more entry points in the same section .
train
false
7,635
def repeat_last_axis(array, count): return as_strided(array, (array.shape + (count,)), (array.strides + (0,)))
[ "def", "repeat_last_axis", "(", "array", ",", "count", ")", ":", "return", "as_strided", "(", "array", ",", "(", "array", ".", "shape", "+", "(", "count", ",", ")", ")", ",", "(", "array", ".", "strides", "+", "(", "0", ",", ")", ")", ")" ]
restride array to repeat count times along the last axis .
train
true
7,637
def get_language_name(lang_code, native=None, error_on_missing=False): lang_code = lcode_to_ietf(lang_code) language_entry = get_code2lang_map(lang_code) if (not language_entry): if error_on_missing: raise LanguageNotFoundError(("We don't have language code '%s' saved in our lookup dictionary (location: %s). Please manually add it before re-running this command." % (lang_code, settings.LANG_LOOKUP_FILEPATH))) else: language_entry = {'name': lang_code, 'native_name': lang_code} if (not isinstance(language_entry, dict)): return language_entry elif (native is None): return (language_entry.get('native_name') or language_entry.get('ka_name') or language_entry.get('name')) elif (not native): return language_entry.get('name') else: return language_entry.get('native_name')
[ "def", "get_language_name", "(", "lang_code", ",", "native", "=", "None", ",", "error_on_missing", "=", "False", ")", ":", "lang_code", "=", "lcode_to_ietf", "(", "lang_code", ")", "language_entry", "=", "get_code2lang_map", "(", "lang_code", ")", "if", "(", "not", "language_entry", ")", ":", "if", "error_on_missing", ":", "raise", "LanguageNotFoundError", "(", "(", "\"We don't have language code '%s' saved in our lookup dictionary (location: %s). Please manually add it before re-running this command.\"", "%", "(", "lang_code", ",", "settings", ".", "LANG_LOOKUP_FILEPATH", ")", ")", ")", "else", ":", "language_entry", "=", "{", "'name'", ":", "lang_code", ",", "'native_name'", ":", "lang_code", "}", "if", "(", "not", "isinstance", "(", "language_entry", ",", "dict", ")", ")", ":", "return", "language_entry", "elif", "(", "native", "is", "None", ")", ":", "return", "(", "language_entry", ".", "get", "(", "'native_name'", ")", "or", "language_entry", ".", "get", "(", "'ka_name'", ")", "or", "language_entry", ".", "get", "(", "'name'", ")", ")", "elif", "(", "not", "native", ")", ":", "return", "language_entry", ".", "get", "(", "'name'", ")", "else", ":", "return", "language_entry", ".", "get", "(", "'native_name'", ")" ]
get a languages name in the currently active locale .
train
false
7,638
def get_suggestions(request, user, project_ids): base = Translation.objects.prefetch().filter(subproject__project_id__in=project_ids).exclude(total=F('translated')).order_by('-translated') all_matching = base.none() if (user.is_authenticated() and user.profile.languages.exists()): all_matching = base.filter(language__in=user.profile.languages.all()).exclude(subproject__project__in=user.profile.subscriptions.all()) else: session_lang = translation.get_language() if (session_lang and (session_lang != 'en')): all_matching = base.filter(language__code=session_lang) if (not all_matching): all_matching = base.exclude(language__code='en') return all_matching[:10]
[ "def", "get_suggestions", "(", "request", ",", "user", ",", "project_ids", ")", ":", "base", "=", "Translation", ".", "objects", ".", "prefetch", "(", ")", ".", "filter", "(", "subproject__project_id__in", "=", "project_ids", ")", ".", "exclude", "(", "total", "=", "F", "(", "'translated'", ")", ")", ".", "order_by", "(", "'-translated'", ")", "all_matching", "=", "base", ".", "none", "(", ")", "if", "(", "user", ".", "is_authenticated", "(", ")", "and", "user", ".", "profile", ".", "languages", ".", "exists", "(", ")", ")", ":", "all_matching", "=", "base", ".", "filter", "(", "language__in", "=", "user", ".", "profile", ".", "languages", ".", "all", "(", ")", ")", ".", "exclude", "(", "subproject__project__in", "=", "user", ".", "profile", ".", "subscriptions", ".", "all", "(", ")", ")", "else", ":", "session_lang", "=", "translation", ".", "get_language", "(", ")", "if", "(", "session_lang", "and", "(", "session_lang", "!=", "'en'", ")", ")", ":", "all_matching", "=", "base", ".", "filter", "(", "language__code", "=", "session_lang", ")", "if", "(", "not", "all_matching", ")", ":", "all_matching", "=", "base", ".", "exclude", "(", "language__code", "=", "'en'", ")", "return", "all_matching", "[", ":", "10", "]" ]
filter a list of folders with a given search path .
train
false
7,639
def get_dir_time_suffix(): dirfmt = '%4d-%02d-%02d_%02d%02d%02d' now = time.localtime()[0:6] dirname = (dirfmt % now) return dirname
[ "def", "get_dir_time_suffix", "(", ")", ":", "dirfmt", "=", "'%4d-%02d-%02d_%02d%02d%02d'", "now", "=", "time", ".", "localtime", "(", ")", "[", "0", ":", "6", "]", "dirname", "=", "(", "dirfmt", "%", "now", ")", "return", "dirname" ]
returns the name of a folder with the now time as suffix .
train
false
7,640
@event(u'manager.before_config_validate') def process_variables(config, manager): env_params = {u'block_start_string': u'^^disabled^^', u'block_end_string': u'^^disabled^^', u'variable_start_string': u'{?', u'variable_end_string': u'?}'} if ((u'variables' not in config) or (config.get(u'variables') is False)): return env = Environment(**env_params) if isinstance(config[u'variables'], bool): log.debug(u'trying to load variables from DB') variables = variables_from_db() else: log.debug(u'trying to load variables from file') variables = variables_from_file(manager.config_base, config[u'variables']) log.debug(u'updating DB with variable file contents') variables_to_db(variables) env.globals = variables _process(config, env) return config
[ "@", "event", "(", "u'manager.before_config_validate'", ")", "def", "process_variables", "(", "config", ",", "manager", ")", ":", "env_params", "=", "{", "u'block_start_string'", ":", "u'^^disabled^^'", ",", "u'block_end_string'", ":", "u'^^disabled^^'", ",", "u'variable_start_string'", ":", "u'{?'", ",", "u'variable_end_string'", ":", "u'?}'", "}", "if", "(", "(", "u'variables'", "not", "in", "config", ")", "or", "(", "config", ".", "get", "(", "u'variables'", ")", "is", "False", ")", ")", ":", "return", "env", "=", "Environment", "(", "**", "env_params", ")", "if", "isinstance", "(", "config", "[", "u'variables'", "]", ",", "bool", ")", ":", "log", ".", "debug", "(", "u'trying to load variables from DB'", ")", "variables", "=", "variables_from_db", "(", ")", "else", ":", "log", ".", "debug", "(", "u'trying to load variables from file'", ")", "variables", "=", "variables_from_file", "(", "manager", ".", "config_base", ",", "config", "[", "u'variables'", "]", ")", "log", ".", "debug", "(", "u'updating DB with variable file contents'", ")", "variables_to_db", "(", "variables", ")", "env", ".", "globals", "=", "variables", "_process", "(", "config", ",", "env", ")", "return", "config" ]
render all string elements of the config against defined variables .
train
false
7,642
def drop_missing(Y, X=None, axis=1): Y = np.asarray(Y) if (Y.ndim == 1): Y = Y[:, None] if (X is not None): X = np.array(X) if (X.ndim == 1): X = X[:, None] keepidx = np.logical_and((~ np.isnan(Y).any(axis)), (~ np.isnan(X).any(axis))) return (Y[keepidx], X[keepidx]) else: keepidx = (~ np.isnan(Y).any(axis)) return Y[keepidx]
[ "def", "drop_missing", "(", "Y", ",", "X", "=", "None", ",", "axis", "=", "1", ")", ":", "Y", "=", "np", ".", "asarray", "(", "Y", ")", "if", "(", "Y", ".", "ndim", "==", "1", ")", ":", "Y", "=", "Y", "[", ":", ",", "None", "]", "if", "(", "X", "is", "not", "None", ")", ":", "X", "=", "np", ".", "array", "(", "X", ")", "if", "(", "X", ".", "ndim", "==", "1", ")", ":", "X", "=", "X", "[", ":", ",", "None", "]", "keepidx", "=", "np", ".", "logical_and", "(", "(", "~", "np", ".", "isnan", "(", "Y", ")", ".", "any", "(", "axis", ")", ")", ",", "(", "~", "np", ".", "isnan", "(", "X", ")", ".", "any", "(", "axis", ")", ")", ")", "return", "(", "Y", "[", "keepidx", "]", ",", "X", "[", "keepidx", "]", ")", "else", ":", "keepidx", "=", "(", "~", "np", ".", "isnan", "(", "Y", ")", ".", "any", "(", "axis", ")", ")", "return", "Y", "[", "keepidx", "]" ]
returns views on the arrays y and x where missing observations are dropped .
train
false
7,644
def _diff_replication_group(current, desired): if (current.get('AutomaticFailover') is not None): current['AutomaticFailoverEnabled'] = (True if (current['AutomaticFailover'] in ('enabled', 'enabling')) else False) modifiable = {'AutomaticFailoverEnabled': 'AutomaticFailoverEnabled', 'AutoMinorVersionUpgrade': None, 'CacheNodeType': None, 'CacheParameterGroupName': None, 'CacheSecurityGroupNames': None, 'EngineVersion': None, 'NotificationTopicArn': None, 'NotificationTopicStatus': None, 'PreferredMaintenanceWindow': None, 'PrimaryClusterId': None, 'ReplicationGroupDescription': 'Description', 'SecurityGroupIds': None, 'SnapshotRetentionLimit': 'SnapshotRetentionLimit', 'SnapshottingClusterId': 'SnapshottingClusterId', 'SnapshotWindow': 'SnapshotWindow'} need_update = {} for (m, o) in modifiable.items(): if (m in desired): if (not o): need_update[m] = desired[m] elif (m in current): if (current[m] != desired[m]): need_update[m] = desired[m] return need_update
[ "def", "_diff_replication_group", "(", "current", ",", "desired", ")", ":", "if", "(", "current", ".", "get", "(", "'AutomaticFailover'", ")", "is", "not", "None", ")", ":", "current", "[", "'AutomaticFailoverEnabled'", "]", "=", "(", "True", "if", "(", "current", "[", "'AutomaticFailover'", "]", "in", "(", "'enabled'", ",", "'enabling'", ")", ")", "else", "False", ")", "modifiable", "=", "{", "'AutomaticFailoverEnabled'", ":", "'AutomaticFailoverEnabled'", ",", "'AutoMinorVersionUpgrade'", ":", "None", ",", "'CacheNodeType'", ":", "None", ",", "'CacheParameterGroupName'", ":", "None", ",", "'CacheSecurityGroupNames'", ":", "None", ",", "'EngineVersion'", ":", "None", ",", "'NotificationTopicArn'", ":", "None", ",", "'NotificationTopicStatus'", ":", "None", ",", "'PreferredMaintenanceWindow'", ":", "None", ",", "'PrimaryClusterId'", ":", "None", ",", "'ReplicationGroupDescription'", ":", "'Description'", ",", "'SecurityGroupIds'", ":", "None", ",", "'SnapshotRetentionLimit'", ":", "'SnapshotRetentionLimit'", ",", "'SnapshottingClusterId'", ":", "'SnapshottingClusterId'", ",", "'SnapshotWindow'", ":", "'SnapshotWindow'", "}", "need_update", "=", "{", "}", "for", "(", "m", ",", "o", ")", "in", "modifiable", ".", "items", "(", ")", ":", "if", "(", "m", "in", "desired", ")", ":", "if", "(", "not", "o", ")", ":", "need_update", "[", "m", "]", "=", "desired", "[", "m", "]", "elif", "(", "m", "in", "current", ")", ":", "if", "(", "current", "[", "m", "]", "!=", "desired", "[", "m", "]", ")", ":", "need_update", "[", "m", "]", "=", "desired", "[", "m", "]", "return", "need_update" ]
if you need to enhance what modify_replication_group() considers when deciding what is to be updated .
train
true
7,645
def service_present(name, service_type, description=None, profile=None, **connection_args): ret = {'name': name, 'changes': {}, 'result': True, 'comment': 'Service "{0}" already exists'.format(name)} role = __salt__['keystone.service_get'](name=name, profile=profile, **connection_args) if ('Error' not in role): return ret else: if __opts__.get('test'): ret['result'] = None ret['comment'] = 'Service "{0}" will be added'.format(name) return ret __salt__['keystone.service_create'](name, service_type, description, profile=profile, **connection_args) ret['comment'] = 'Service "{0}" has been added'.format(name) ret['changes']['Service'] = 'Created' return ret
[ "def", "service_present", "(", "name", ",", "service_type", ",", "description", "=", "None", ",", "profile", "=", "None", ",", "**", "connection_args", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "'Service \"{0}\" already exists'", ".", "format", "(", "name", ")", "}", "role", "=", "__salt__", "[", "'keystone.service_get'", "]", "(", "name", "=", "name", ",", "profile", "=", "profile", ",", "**", "connection_args", ")", "if", "(", "'Error'", "not", "in", "role", ")", ":", "return", "ret", "else", ":", "if", "__opts__", ".", "get", "(", "'test'", ")", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Service \"{0}\" will be added'", ".", "format", "(", "name", ")", "return", "ret", "__salt__", "[", "'keystone.service_create'", "]", "(", "name", ",", "service_type", ",", "description", ",", "profile", "=", "profile", ",", "**", "connection_args", ")", "ret", "[", "'comment'", "]", "=", "'Service \"{0}\" has been added'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "[", "'Service'", "]", "=", "'Created'", "return", "ret" ]
ensure service present in keystone catalog name the name of the service service_type the type of openstack service description description of the service .
train
true
7,646
def strip_quotes(table_name): has_quotes = (table_name.startswith('"') and table_name.endswith('"')) return (table_name[1:(-1)] if has_quotes else table_name)
[ "def", "strip_quotes", "(", "table_name", ")", ":", "has_quotes", "=", "(", "table_name", ".", "startswith", "(", "'\"'", ")", "and", "table_name", ".", "endswith", "(", "'\"'", ")", ")", "return", "(", "table_name", "[", "1", ":", "(", "-", "1", ")", "]", "if", "has_quotes", "else", "table_name", ")" ]
strip quotes of doctests output values: .
train
false
7,647
def notify_errors(request_id, error): try: _REQUESTS[request_id].notify_errors(request_id, error) except KeyError: pass
[ "def", "notify_errors", "(", "request_id", ",", "error", ")", ":", "try", ":", "_REQUESTS", "[", "request_id", "]", ".", "notify_errors", "(", "request_id", ",", "error", ")", "except", "KeyError", ":", "pass" ]
add errors to a config request .
train
false
7,650
@skip_if_on_windows @pytest.mark.parametrize('cmd, fmt, exp', [('pwd', None, (lambda : (os.getcwd() + '\n'))), ('echo WORKING', None, 'WORKING\n'), ('ls -f', (lambda out: out.splitlines().sort()), os.listdir().sort())]) def test_single_command(cmd, fmt, exp): (out, err, rtn) = run_xonsh(cmd, stderr=sp.DEVNULL) if callable(fmt): out = fmt(out) if callable(exp): exp = exp() assert (out == exp) assert (rtn == 0)
[ "@", "skip_if_on_windows", "@", "pytest", ".", "mark", ".", "parametrize", "(", "'cmd, fmt, exp'", ",", "[", "(", "'pwd'", ",", "None", ",", "(", "lambda", ":", "(", "os", ".", "getcwd", "(", ")", "+", "'\\n'", ")", ")", ")", ",", "(", "'echo WORKING'", ",", "None", ",", "'WORKING\\n'", ")", ",", "(", "'ls -f'", ",", "(", "lambda", "out", ":", "out", ".", "splitlines", "(", ")", ".", "sort", "(", ")", ")", ",", "os", ".", "listdir", "(", ")", ".", "sort", "(", ")", ")", "]", ")", "def", "test_single_command", "(", "cmd", ",", "fmt", ",", "exp", ")", ":", "(", "out", ",", "err", ",", "rtn", ")", "=", "run_xonsh", "(", "cmd", ",", "stderr", "=", "sp", ".", "DEVNULL", ")", "if", "callable", "(", "fmt", ")", ":", "out", "=", "fmt", "(", "out", ")", "if", "callable", "(", "exp", ")", ":", "exp", "=", "exp", "(", ")", "assert", "(", "out", "==", "exp", ")", "assert", "(", "rtn", "==", "0", ")" ]
the fmt parameter is a function that formats the output of cmd .
train
false
7,652
def _load_provider_feature(feature, providers): features = [] for provider in providers: mod = __import__(provider, fromlist=[feature]) features = getattr(mod, feature)(features) return features
[ "def", "_load_provider_feature", "(", "feature", ",", "providers", ")", ":", "features", "=", "[", "]", "for", "provider", "in", "providers", ":", "mod", "=", "__import__", "(", "provider", ",", "fromlist", "=", "[", "feature", "]", ")", "features", "=", "getattr", "(", "mod", ",", "feature", ")", "(", "features", ")", "return", "features" ]
load the named feature from an ordered list of dotted-notation modules which each implements the feature .
train
false
7,653
@utils.arg('backup', metavar='<backup>', help='ID of the backup.') @utils.service_type('monitor') def do_backup_show(cs, args): backup = _find_backup(cs, args.backup) info = dict() info.update(backup._info) if ('links' in info): info.pop('links') utils.print_dict(info)
[ "@", "utils", ".", "arg", "(", "'backup'", ",", "metavar", "=", "'<backup>'", ",", "help", "=", "'ID of the backup.'", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_backup_show", "(", "cs", ",", "args", ")", ":", "backup", "=", "_find_backup", "(", "cs", ",", "args", ".", "backup", ")", "info", "=", "dict", "(", ")", "info", ".", "update", "(", "backup", ".", "_info", ")", "if", "(", "'links'", "in", "info", ")", ":", "info", ".", "pop", "(", "'links'", ")", "utils", ".", "print_dict", "(", "info", ")" ]
show details about a backup .
train
false
7,654
def _load_encryption(proxy_conf_file, **kwargs): _debug('Setting configuration for encryption') conf = ConfigParser() conf.read(proxy_conf_file) try: section = 'pipeline:main' pipeline = conf.get(section, 'pipeline') pipeline = pipeline.replace('proxy-logging proxy-server', 'keymaster encryption proxy-logging proxy-server') conf.set(section, 'pipeline', pipeline) root_secret = os.urandom(32).encode('base64') conf.set('filter:keymaster', 'encryption_root_secret', root_secret) except NoSectionError as err: msg = ('Error problem with proxy conf file %s: %s' % (proxy_conf_file, err)) raise InProcessException(msg) test_conf_file = os.path.join(_testdir, 'proxy-server.conf') with open(test_conf_file, 'w') as fp: conf.write(fp) return test_conf_file
[ "def", "_load_encryption", "(", "proxy_conf_file", ",", "**", "kwargs", ")", ":", "_debug", "(", "'Setting configuration for encryption'", ")", "conf", "=", "ConfigParser", "(", ")", "conf", ".", "read", "(", "proxy_conf_file", ")", "try", ":", "section", "=", "'pipeline:main'", "pipeline", "=", "conf", ".", "get", "(", "section", ",", "'pipeline'", ")", "pipeline", "=", "pipeline", ".", "replace", "(", "'proxy-logging proxy-server'", ",", "'keymaster encryption proxy-logging proxy-server'", ")", "conf", ".", "set", "(", "section", ",", "'pipeline'", ",", "pipeline", ")", "root_secret", "=", "os", ".", "urandom", "(", "32", ")", ".", "encode", "(", "'base64'", ")", "conf", ".", "set", "(", "'filter:keymaster'", ",", "'encryption_root_secret'", ",", "root_secret", ")", "except", "NoSectionError", "as", "err", ":", "msg", "=", "(", "'Error problem with proxy conf file %s: %s'", "%", "(", "proxy_conf_file", ",", "err", ")", ")", "raise", "InProcessException", "(", "msg", ")", "test_conf_file", "=", "os", ".", "path", ".", "join", "(", "_testdir", ",", "'proxy-server.conf'", ")", "with", "open", "(", "test_conf_file", ",", "'w'", ")", "as", "fp", ":", "conf", ".", "write", "(", "fp", ")", "return", "test_conf_file" ]
load encryption configuration and override proxy-server .
train
false
7,656
@task() @timeit def measure_queue_lag(queued_time): lag = (datetime.now() - queued_time) lag = (((lag.days * 3600) * 24) + lag.seconds) statsd.gauge('rabbitmq.lag', max(lag, 0))
[ "@", "task", "(", ")", "@", "timeit", "def", "measure_queue_lag", "(", "queued_time", ")", ":", "lag", "=", "(", "datetime", ".", "now", "(", ")", "-", "queued_time", ")", "lag", "=", "(", "(", "(", "lag", ".", "days", "*", "3600", ")", "*", "24", ")", "+", "lag", ".", "seconds", ")", "statsd", ".", "gauge", "(", "'rabbitmq.lag'", ",", "max", "(", "lag", ",", "0", ")", ")" ]
a task that measures the time it was sitting in the queue .
train
false
7,657
@task @needs('pavelib.i18n.i18n_validate_transifex_config', 'pavelib.i18n.i18n_generate') @timed def i18n_release_push(): resources = find_release_resources() sh(('i18n_tool transifex push ' + ' '.join(resources)))
[ "@", "task", "@", "needs", "(", "'pavelib.i18n.i18n_validate_transifex_config'", ",", "'pavelib.i18n.i18n_generate'", ")", "@", "timed", "def", "i18n_release_push", "(", ")", ":", "resources", "=", "find_release_resources", "(", ")", "sh", "(", "(", "'i18n_tool transifex push '", "+", "' '", ".", "join", "(", "resources", ")", ")", ")" ]
push release-specific resources to transifex .
train
false
7,658
def yearly_activity(request, project=None, subproject=None, lang=None, user=None): activity = get_json_stats(request, 364, 7, project, subproject, lang, user) serie = [] labels = [] month = (-1) for item in activity: serie.append(item[1]) if (month != item[0].month): labels.append(pgettext('Format string for yearly activity chart', '{month}/{year}').format(month=item[0].month, year=item[0].year)) month = item[0].month else: labels.append('') return JsonResponse(data={'series': [serie], 'labels': labels})
[ "def", "yearly_activity", "(", "request", ",", "project", "=", "None", ",", "subproject", "=", "None", ",", "lang", "=", "None", ",", "user", "=", "None", ")", ":", "activity", "=", "get_json_stats", "(", "request", ",", "364", ",", "7", ",", "project", ",", "subproject", ",", "lang", ",", "user", ")", "serie", "=", "[", "]", "labels", "=", "[", "]", "month", "=", "(", "-", "1", ")", "for", "item", "in", "activity", ":", "serie", ".", "append", "(", "item", "[", "1", "]", ")", "if", "(", "month", "!=", "item", "[", "0", "]", ".", "month", ")", ":", "labels", ".", "append", "(", "pgettext", "(", "'Format string for yearly activity chart'", ",", "'{month}/{year}'", ")", ".", "format", "(", "month", "=", "item", "[", "0", "]", ".", "month", ",", "year", "=", "item", "[", "0", "]", ".", "year", ")", ")", "month", "=", "item", "[", "0", "]", ".", "month", "else", ":", "labels", ".", "append", "(", "''", ")", "return", "JsonResponse", "(", "data", "=", "{", "'series'", ":", "[", "serie", "]", ",", "'labels'", ":", "labels", "}", ")" ]
returns yearly activity for matching changes as json .
train
false
7,659
def projective_transform_by_points(x, src, dst, map_args={}, output_shape=None, order=1, mode='constant', cval=0.0, clip=True, preserve_range=False): if (type(src) is list): src = np.array(src) if (type(dst) is list): dst = np.array(dst) if (np.max(x) > 1): x = (x / 255) m = transform.ProjectiveTransform() m.estimate(dst, src) warped = transform.warp(x, m, map_args=map_args, output_shape=output_shape, order=order, mode=mode, cval=cval, clip=clip, preserve_range=preserve_range) return warped
[ "def", "projective_transform_by_points", "(", "x", ",", "src", ",", "dst", ",", "map_args", "=", "{", "}", ",", "output_shape", "=", "None", ",", "order", "=", "1", ",", "mode", "=", "'constant'", ",", "cval", "=", "0.0", ",", "clip", "=", "True", ",", "preserve_range", "=", "False", ")", ":", "if", "(", "type", "(", "src", ")", "is", "list", ")", ":", "src", "=", "np", ".", "array", "(", "src", ")", "if", "(", "type", "(", "dst", ")", "is", "list", ")", ":", "dst", "=", "np", ".", "array", "(", "dst", ")", "if", "(", "np", ".", "max", "(", "x", ")", ">", "1", ")", ":", "x", "=", "(", "x", "/", "255", ")", "m", "=", "transform", ".", "ProjectiveTransform", "(", ")", "m", ".", "estimate", "(", "dst", ",", "src", ")", "warped", "=", "transform", ".", "warp", "(", "x", ",", "m", ",", "map_args", "=", "map_args", ",", "output_shape", "=", "output_shape", ",", "order", "=", "order", ",", "mode", "=", "mode", ",", "cval", "=", "cval", ",", "clip", "=", "clip", ",", "preserve_range", "=", "preserve_range", ")", "return", "warped" ]
projective transform by given coordinates .
train
true
7,661
def _allmsgs(obj): if isinstance(obj, MultiMessage): return obj.messages elif (obj == BUBBLE): return [] else: return [obj]
[ "def", "_allmsgs", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "MultiMessage", ")", ":", "return", "obj", ".", "messages", "elif", "(", "obj", "==", "BUBBLE", ")", ":", "return", "[", "]", "else", ":", "return", "[", "obj", "]" ]
returns a list of all the messages encapsulated in obj .
train
false
7,662
def safechardecode(value, binary=False): retVal = value if isinstance(value, basestring): retVal = retVal.replace('\\\\', SLASH_MARKER) while True: match = re.search(HEX_ENCODED_CHAR_REGEX, retVal) if match: retVal = retVal.replace(match.group('result'), (unichr if isinstance(value, unicode) else chr)(ord(binascii.unhexlify(match.group('result').lstrip('\\x'))))) else: break for char in SAFE_ENCODE_SLASH_REPLACEMENTS[::(-1)]: retVal = retVal.replace(repr(char).strip("'"), char) retVal = retVal.replace(SLASH_MARKER, '\\') if binary: if isinstance(retVal, unicode): retVal = retVal.encode('utf8') while True: match = re.search(INVALID_UNICODE_CHAR_REGEX, retVal) if match: retVal = retVal.replace(match.group('result'), chr(ord(binascii.unhexlify(match.group('result').lstrip('\\?'))))) else: break elif isinstance(value, (list, tuple)): for i in xrange(len(value)): retVal[i] = safechardecode(value[i]) return retVal
[ "def", "safechardecode", "(", "value", ",", "binary", "=", "False", ")", ":", "retVal", "=", "value", "if", "isinstance", "(", "value", ",", "basestring", ")", ":", "retVal", "=", "retVal", ".", "replace", "(", "'\\\\\\\\'", ",", "SLASH_MARKER", ")", "while", "True", ":", "match", "=", "re", ".", "search", "(", "HEX_ENCODED_CHAR_REGEX", ",", "retVal", ")", "if", "match", ":", "retVal", "=", "retVal", ".", "replace", "(", "match", ".", "group", "(", "'result'", ")", ",", "(", "unichr", "if", "isinstance", "(", "value", ",", "unicode", ")", "else", "chr", ")", "(", "ord", "(", "binascii", ".", "unhexlify", "(", "match", ".", "group", "(", "'result'", ")", ".", "lstrip", "(", "'\\\\x'", ")", ")", ")", ")", ")", "else", ":", "break", "for", "char", "in", "SAFE_ENCODE_SLASH_REPLACEMENTS", "[", ":", ":", "(", "-", "1", ")", "]", ":", "retVal", "=", "retVal", ".", "replace", "(", "repr", "(", "char", ")", ".", "strip", "(", "\"'\"", ")", ",", "char", ")", "retVal", "=", "retVal", ".", "replace", "(", "SLASH_MARKER", ",", "'\\\\'", ")", "if", "binary", ":", "if", "isinstance", "(", "retVal", ",", "unicode", ")", ":", "retVal", "=", "retVal", ".", "encode", "(", "'utf8'", ")", "while", "True", ":", "match", "=", "re", ".", "search", "(", "INVALID_UNICODE_CHAR_REGEX", ",", "retVal", ")", "if", "match", ":", "retVal", "=", "retVal", ".", "replace", "(", "match", ".", "group", "(", "'result'", ")", ",", "chr", "(", "ord", "(", "binascii", ".", "unhexlify", "(", "match", ".", "group", "(", "'result'", ")", ".", "lstrip", "(", "'\\\\?'", ")", ")", ")", ")", ")", "else", ":", "break", "elif", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "i", "in", "xrange", "(", "len", "(", "value", ")", ")", ":", "retVal", "[", "i", "]", "=", "safechardecode", "(", "value", "[", "i", "]", ")", "return", "retVal" ]
reverse function to safecharencode .
train
false
7,663
def xtext_decode(s, errors=None): r = [] i = 0 while (i < len(s)): if (s[i] == '+'): try: r.append(chr(int(s[(i + 1):(i + 3)], 16))) except ValueError: r.append(s[i:(i + 3)]) i += 3 else: r.append(s[i]) i += 1 return (''.join(r), len(s))
[ "def", "xtext_decode", "(", "s", ",", "errors", "=", "None", ")", ":", "r", "=", "[", "]", "i", "=", "0", "while", "(", "i", "<", "len", "(", "s", ")", ")", ":", "if", "(", "s", "[", "i", "]", "==", "'+'", ")", ":", "try", ":", "r", ".", "append", "(", "chr", "(", "int", "(", "s", "[", "(", "i", "+", "1", ")", ":", "(", "i", "+", "3", ")", "]", ",", "16", ")", ")", ")", "except", "ValueError", ":", "r", ".", "append", "(", "s", "[", "i", ":", "(", "i", "+", "3", ")", "]", ")", "i", "+=", "3", "else", ":", "r", ".", "append", "(", "s", "[", "i", "]", ")", "i", "+=", "1", "return", "(", "''", ".", "join", "(", "r", ")", ",", "len", "(", "s", ")", ")" ]
decode the xtext-encoded string c{s} .
train
false
7,664
def view_config_changes(config, num=None): rev = reverter.Reverter(config) rev.recovery_routine() rev.view_config_changes(num)
[ "def", "view_config_changes", "(", "config", ",", "num", "=", "None", ")", ":", "rev", "=", "reverter", ".", "Reverter", "(", "config", ")", "rev", ".", "recovery_routine", "(", ")", "rev", ".", "view_config_changes", "(", "num", ")" ]
view checkpoints and associated configuration changes .
train
false
7,665
def translate_jobconf(variable, version): if (version is None): raise TypeError if (variable in _JOBCONF_MAP): return map_version(version, _JOBCONF_MAP[variable]) else: return variable
[ "def", "translate_jobconf", "(", "variable", ",", "version", ")", ":", "if", "(", "version", "is", "None", ")", ":", "raise", "TypeError", "if", "(", "variable", "in", "_JOBCONF_MAP", ")", ":", "return", "map_version", "(", "version", ",", "_JOBCONF_MAP", "[", "variable", "]", ")", "else", ":", "return", "variable" ]
translate *variable* to hadoop version *version* .
train
false
7,666
def get_qos(tenant_id, qos_id): LOG.debug(_('get_qos() called')) session = db.get_session() try: qos = session.query(l2network_models.QoS).filter_by(tenant_id=tenant_id).filter_by(qos_id=qos_id).one() return qos except exc.NoResultFound: raise c_exc.QosNotFound(qos_id=qos_id, tenant_id=tenant_id)
[ "def", "get_qos", "(", "tenant_id", ",", "qos_id", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'get_qos() called'", ")", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "qos", "=", "session", ".", "query", "(", "l2network_models", ".", "QoS", ")", ".", "filter_by", "(", "tenant_id", "=", "tenant_id", ")", ".", "filter_by", "(", "qos_id", "=", "qos_id", ")", ".", "one", "(", ")", "return", "qos", "except", "exc", ".", "NoResultFound", ":", "raise", "c_exc", ".", "QosNotFound", "(", "qos_id", "=", "qos_id", ",", "tenant_id", "=", "tenant_id", ")" ]
lists the qos given a tenant_id and qos_id .
train
false
7,667
def l(path): if (path and path.startswith('/')): base_url = getattr(settings, 'SCRIPT_NAME', '') return (base_url + path) return path
[ "def", "l", "(", "path", ")", ":", "if", "(", "path", "and", "path", ".", "startswith", "(", "'/'", ")", ")", ":", "base_url", "=", "getattr", "(", "settings", ",", "'SCRIPT_NAME'", ",", "''", ")", "return", "(", "base_url", "+", "path", ")", "return", "path" ]
filter urls adding base_path prefix if required .
train
false
7,668
def strip_formatted_string(str): return re.sub('\\s\\s+', ' ', str).strip()
[ "def", "strip_formatted_string", "(", "str", ")", ":", "return", "re", ".", "sub", "(", "'\\\\s\\\\s+'", ",", "' '", ",", "str", ")", ".", "strip", "(", ")" ]
remove multiple whitespaces and whitespaces at the beginning and end of the given string .
train
false
7,669
@conf.commands.register def fragment(pkt, fragsize=1480): fragsize = (((fragsize + 7) / 8) * 8) lst = [] for p in pkt: s = str(p[IP].payload) nb = (((len(s) + fragsize) - 1) / fragsize) for i in range(nb): q = p.copy() del q[IP].payload del q[IP].chksum del q[IP].len if (i == (nb - 1)): q[IP].flags &= (~ 1) else: q[IP].flags |= 1 q[IP].frag = ((i * fragsize) / 8) r = Raw(load=s[(i * fragsize):((i + 1) * fragsize)]) r.overload_fields = p[IP].payload.overload_fields.copy() q.add_payload(r) lst.append(q) return lst
[ "@", "conf", ".", "commands", ".", "register", "def", "fragment", "(", "pkt", ",", "fragsize", "=", "1480", ")", ":", "fragsize", "=", "(", "(", "(", "fragsize", "+", "7", ")", "/", "8", ")", "*", "8", ")", "lst", "=", "[", "]", "for", "p", "in", "pkt", ":", "s", "=", "str", "(", "p", "[", "IP", "]", ".", "payload", ")", "nb", "=", "(", "(", "(", "len", "(", "s", ")", "+", "fragsize", ")", "-", "1", ")", "/", "fragsize", ")", "for", "i", "in", "range", "(", "nb", ")", ":", "q", "=", "p", ".", "copy", "(", ")", "del", "q", "[", "IP", "]", ".", "payload", "del", "q", "[", "IP", "]", ".", "chksum", "del", "q", "[", "IP", "]", ".", "len", "if", "(", "i", "==", "(", "nb", "-", "1", ")", ")", ":", "q", "[", "IP", "]", ".", "flags", "&=", "(", "~", "1", ")", "else", ":", "q", "[", "IP", "]", ".", "flags", "|=", "1", "q", "[", "IP", "]", ".", "frag", "=", "(", "(", "i", "*", "fragsize", ")", "/", "8", ")", "r", "=", "Raw", "(", "load", "=", "s", "[", "(", "i", "*", "fragsize", ")", ":", "(", "(", "i", "+", "1", ")", "*", "fragsize", ")", "]", ")", "r", ".", "overload_fields", "=", "p", "[", "IP", "]", ".", "payload", ".", "overload_fields", ".", "copy", "(", ")", "q", ".", "add_payload", "(", "r", ")", "lst", ".", "append", "(", "q", ")", "return", "lst" ]
appends the given content to the fragment .
train
false
7,670
def _common_bytes(blocks1, blocks2): if (len(blocks1) > len(blocks2)): (blocks1, blocks2) = (blocks2, blocks1) score = 0 for (block, count1) in blocks1.items(): count2 = blocks2.get(block) if count2: score += min(count1, count2) return score
[ "def", "_common_bytes", "(", "blocks1", ",", "blocks2", ")", ":", "if", "(", "len", "(", "blocks1", ")", ">", "len", "(", "blocks2", ")", ")", ":", "(", "blocks1", ",", "blocks2", ")", "=", "(", "blocks2", ",", "blocks1", ")", "score", "=", "0", "for", "(", "block", ",", "count1", ")", "in", "blocks1", ".", "items", "(", ")", ":", "count2", "=", "blocks2", ".", "get", "(", "block", ")", "if", "count2", ":", "score", "+=", "min", "(", "count1", ",", "count2", ")", "return", "score" ]
count the number of common bytes in two block count dicts .
train
false
7,671
def _get_image_properties(image): (immin, immax) = (np.min(image), np.max(image)) imtype = image.dtype.type try: (lo, hi) = dtypes.dtype_range[imtype] except KeyError: (lo, hi) = (immin, immax) signed = (immin < 0) out_of_range_float = (np.issubdtype(image.dtype, np.float) and ((immin < lo) or (immax > hi))) low_data_range = ((immin != immax) and is_low_contrast(image)) unsupported_dtype = (image.dtype not in dtypes._supported_types) return ImageProperties(signed, out_of_range_float, low_data_range, unsupported_dtype)
[ "def", "_get_image_properties", "(", "image", ")", ":", "(", "immin", ",", "immax", ")", "=", "(", "np", ".", "min", "(", "image", ")", ",", "np", ".", "max", "(", "image", ")", ")", "imtype", "=", "image", ".", "dtype", ".", "type", "try", ":", "(", "lo", ",", "hi", ")", "=", "dtypes", ".", "dtype_range", "[", "imtype", "]", "except", "KeyError", ":", "(", "lo", ",", "hi", ")", "=", "(", "immin", ",", "immax", ")", "signed", "=", "(", "immin", "<", "0", ")", "out_of_range_float", "=", "(", "np", ".", "issubdtype", "(", "image", ".", "dtype", ",", "np", ".", "float", ")", "and", "(", "(", "immin", "<", "lo", ")", "or", "(", "immax", ">", "hi", ")", ")", ")", "low_data_range", "=", "(", "(", "immin", "!=", "immax", ")", "and", "is_low_contrast", "(", "image", ")", ")", "unsupported_dtype", "=", "(", "image", ".", "dtype", "not", "in", "dtypes", ".", "_supported_types", ")", "return", "ImageProperties", "(", "signed", ",", "out_of_range_float", ",", "low_data_range", ",", "unsupported_dtype", ")" ]
determine nonstandard properties of an input image .
train
false
7,672
def contracted_nodes(G, u, v, self_loops=True): H = G.copy() if H.is_directed(): in_edges = ((w, u, d) for (w, x, d) in G.in_edges(v, data=True) if (self_loops or (w != u))) out_edges = ((u, w, d) for (x, w, d) in G.out_edges(v, data=True) if (self_loops or (w != u))) new_edges = chain(in_edges, out_edges) else: new_edges = ((u, w, d) for (x, w, d) in G.edges(v, data=True) if (self_loops or (w != u))) v_data = H.node[v] H.remove_node(v) H.add_edges_from(new_edges) if ('contraction' in H.node[u]): H.node[u]['contraction'][v] = v_data else: H.node[u]['contraction'] = {v: v_data} return H
[ "def", "contracted_nodes", "(", "G", ",", "u", ",", "v", ",", "self_loops", "=", "True", ")", ":", "H", "=", "G", ".", "copy", "(", ")", "if", "H", ".", "is_directed", "(", ")", ":", "in_edges", "=", "(", "(", "w", ",", "u", ",", "d", ")", "for", "(", "w", ",", "x", ",", "d", ")", "in", "G", ".", "in_edges", "(", "v", ",", "data", "=", "True", ")", "if", "(", "self_loops", "or", "(", "w", "!=", "u", ")", ")", ")", "out_edges", "=", "(", "(", "u", ",", "w", ",", "d", ")", "for", "(", "x", ",", "w", ",", "d", ")", "in", "G", ".", "out_edges", "(", "v", ",", "data", "=", "True", ")", "if", "(", "self_loops", "or", "(", "w", "!=", "u", ")", ")", ")", "new_edges", "=", "chain", "(", "in_edges", ",", "out_edges", ")", "else", ":", "new_edges", "=", "(", "(", "u", ",", "w", ",", "d", ")", "for", "(", "x", ",", "w", ",", "d", ")", "in", "G", ".", "edges", "(", "v", ",", "data", "=", "True", ")", "if", "(", "self_loops", "or", "(", "w", "!=", "u", ")", ")", ")", "v_data", "=", "H", ".", "node", "[", "v", "]", "H", ".", "remove_node", "(", "v", ")", "H", ".", "add_edges_from", "(", "new_edges", ")", "if", "(", "'contraction'", "in", "H", ".", "node", "[", "u", "]", ")", ":", "H", ".", "node", "[", "u", "]", "[", "'contraction'", "]", "[", "v", "]", "=", "v_data", "else", ":", "H", ".", "node", "[", "u", "]", "[", "'contraction'", "]", "=", "{", "v", ":", "v_data", "}", "return", "H" ]
returns the graph that results from contracting u and v .
train
false
7,676
def _dep_eeg_ref(add_eeg_ref): add_eeg_ref = bool(add_eeg_ref) if add_eeg_ref: warn('add_eeg_ref will be removed in 0.15, use set_eeg_reference() instead', DeprecationWarning) return add_eeg_ref
[ "def", "_dep_eeg_ref", "(", "add_eeg_ref", ")", ":", "add_eeg_ref", "=", "bool", "(", "add_eeg_ref", ")", "if", "add_eeg_ref", ":", "warn", "(", "'add_eeg_ref will be removed in 0.15, use set_eeg_reference() instead'", ",", "DeprecationWarning", ")", "return", "add_eeg_ref" ]
helper for deprecation add_eeg_ref -> false .
train
false
7,677
@with_setup(prepare_stdout) def test_commented_scenario(): runner = Runner(feature_name('commented_feature'), verbosity=1) runner.run() assert_stdout_lines('.\n1 feature (1 passed)\n1 scenario (1 passed)\n1 step (1 passed)\n')
[ "@", "with_setup", "(", "prepare_stdout", ")", "def", "test_commented_scenario", "(", ")", ":", "runner", "=", "Runner", "(", "feature_name", "(", "'commented_feature'", ")", ",", "verbosity", "=", "1", ")", "runner", ".", "run", "(", ")", "assert_stdout_lines", "(", "'.\\n1 feature (1 passed)\\n1 scenario (1 passed)\\n1 step (1 passed)\\n'", ")" ]
test one commented scenario .
train
false
7,678
def parse_host_port(address, default_port=None): if (address[0] == '['): (_host, _port) = address[1:].split(']') host = _host if (':' in _port): port = _port.split(':')[1] else: port = default_port elif (address.count(':') == 1): (host, port) = address.split(':') else: host = address port = default_port return (host, (None if (port is None) else int(port)))
[ "def", "parse_host_port", "(", "address", ",", "default_port", "=", "None", ")", ":", "if", "(", "address", "[", "0", "]", "==", "'['", ")", ":", "(", "_host", ",", "_port", ")", "=", "address", "[", "1", ":", "]", ".", "split", "(", "']'", ")", "host", "=", "_host", "if", "(", "':'", "in", "_port", ")", ":", "port", "=", "_port", ".", "split", "(", "':'", ")", "[", "1", "]", "else", ":", "port", "=", "default_port", "elif", "(", "address", ".", "count", "(", "':'", ")", "==", "1", ")", ":", "(", "host", ",", "port", ")", "=", "address", ".", "split", "(", "':'", ")", "else", ":", "host", "=", "address", "port", "=", "default_port", "return", "(", "host", ",", "(", "None", "if", "(", "port", "is", "None", ")", "else", "int", "(", "port", ")", ")", ")" ]
interpret a string as a host:port pair .
train
false
7,679
@register.inclusion_tag('utilities/render_custom_fields.html') def render_custom_fields(form): return {'form': form}
[ "@", "register", ".", "inclusion_tag", "(", "'utilities/render_custom_fields.html'", ")", "def", "render_custom_fields", "(", "form", ")", ":", "return", "{", "'form'", ":", "form", "}" ]
render all custom fields in a form .
train
false
7,680
def axapi_enabled_disabled(flag): if (flag == 'enabled'): return 1 else: return 0
[ "def", "axapi_enabled_disabled", "(", "flag", ")", ":", "if", "(", "flag", "==", "'enabled'", ")", ":", "return", "1", "else", ":", "return", "0" ]
the axapi uses 0/1 integer values for flags .
train
false
7,681
def processtime(): (user, system) = systimes() return (user + system)
[ "def", "processtime", "(", ")", ":", "(", "user", ",", "system", ")", "=", "systimes", "(", ")", "return", "(", "user", "+", "system", ")" ]
return the total time spent on the process .
train
false
7,682
def RetryWithBackoff(callable_func, retry_notify_func, initial_delay=1, backoff_factor=2, max_delay=60, max_tries=20): delay = initial_delay num_tries = 0 while True: (done, opaque_value) = callable_func() num_tries += 1 if done: return (True, opaque_value) if (num_tries >= max_tries): return (False, opaque_value) retry_notify_func(opaque_value, delay) time.sleep(delay) delay = min((delay * backoff_factor), max_delay)
[ "def", "RetryWithBackoff", "(", "callable_func", ",", "retry_notify_func", ",", "initial_delay", "=", "1", ",", "backoff_factor", "=", "2", ",", "max_delay", "=", "60", ",", "max_tries", "=", "20", ")", ":", "delay", "=", "initial_delay", "num_tries", "=", "0", "while", "True", ":", "(", "done", ",", "opaque_value", ")", "=", "callable_func", "(", ")", "num_tries", "+=", "1", "if", "done", ":", "return", "(", "True", ",", "opaque_value", ")", "if", "(", "num_tries", ">=", "max_tries", ")", ":", "return", "(", "False", ",", "opaque_value", ")", "retry_notify_func", "(", "opaque_value", ",", "delay", ")", "time", ".", "sleep", "(", "delay", ")", "delay", "=", "min", "(", "(", "delay", "*", "backoff_factor", ")", ",", "max_delay", ")" ]
calls a function multiple times .
train
false
7,684
def _get_source_sum(source_hash, file_path, saltenv): ret = dict() schemes = (u'salt', u'http', u'https', u'ftp', u'swift', u's3', u'file') invalid_hash_msg = u"Source hash '{0}' format is invalid. It must be in the format <hash type>=<hash>".format(source_hash) source_hash = str(source_hash) source_hash_scheme = _urlparse(source_hash).scheme if (source_hash_scheme in schemes): cached_hash_file = __salt__[u'cp.cache_file'](source_hash, saltenv) if (not cached_hash_file): raise CommandExecutionError(u'Source hash file {0} not found'.format(source_hash)) ret = __salt__[u'file.extract_hash'](cached_hash_file, u'', file_path) if (ret is None): raise SaltInvocationError(invalid_hash_msg) else: items = source_hash.split(u'=', 1) if (len(items) != 2): invalid_hash_msg = u'{0}, or it must be a supported protocol: {1}'.format(invalid_hash_msg, u', '.join(schemes)) raise SaltInvocationError(invalid_hash_msg) (ret[u'hash_type'], ret[u'hsum']) = [item.strip().lower() for item in items] return ret
[ "def", "_get_source_sum", "(", "source_hash", ",", "file_path", ",", "saltenv", ")", ":", "ret", "=", "dict", "(", ")", "schemes", "=", "(", "u'salt'", ",", "u'http'", ",", "u'https'", ",", "u'ftp'", ",", "u'swift'", ",", "u's3'", ",", "u'file'", ")", "invalid_hash_msg", "=", "u\"Source hash '{0}' format is invalid. It must be in the format <hash type>=<hash>\"", ".", "format", "(", "source_hash", ")", "source_hash", "=", "str", "(", "source_hash", ")", "source_hash_scheme", "=", "_urlparse", "(", "source_hash", ")", ".", "scheme", "if", "(", "source_hash_scheme", "in", "schemes", ")", ":", "cached_hash_file", "=", "__salt__", "[", "u'cp.cache_file'", "]", "(", "source_hash", ",", "saltenv", ")", "if", "(", "not", "cached_hash_file", ")", ":", "raise", "CommandExecutionError", "(", "u'Source hash file {0} not found'", ".", "format", "(", "source_hash", ")", ")", "ret", "=", "__salt__", "[", "u'file.extract_hash'", "]", "(", "cached_hash_file", ",", "u''", ",", "file_path", ")", "if", "(", "ret", "is", "None", ")", ":", "raise", "SaltInvocationError", "(", "invalid_hash_msg", ")", "else", ":", "items", "=", "source_hash", ".", "split", "(", "u'='", ",", "1", ")", "if", "(", "len", "(", "items", ")", "!=", "2", ")", ":", "invalid_hash_msg", "=", "u'{0}, or it must be a supported protocol: {1}'", ".", "format", "(", "invalid_hash_msg", ",", "u', '", ".", "join", "(", "schemes", ")", ")", "raise", "SaltInvocationError", "(", "invalid_hash_msg", ")", "(", "ret", "[", "u'hash_type'", "]", ",", "ret", "[", "u'hsum'", "]", ")", "=", "[", "item", ".", "strip", "(", ")", ".", "lower", "(", ")", "for", "item", "in", "items", "]", "return", "ret" ]
extract the hash sum .
train
false
7,685
def rgb2rgbcie(rgb): return _convert(rgbcie_from_rgb, rgb)
[ "def", "rgb2rgbcie", "(", "rgb", ")", ":", "return", "_convert", "(", "rgbcie_from_rgb", ",", "rgb", ")" ]
rgb to rgb cie color space conversion .
train
false
7,686
def _valid_method_call_check_resources(resource, method): for name in resource.keys(): _valid_method_call_check_resource(name, method)
[ "def", "_valid_method_call_check_resources", "(", "resource", ",", "method", ")", ":", "for", "name", "in", "resource", ".", "keys", "(", ")", ":", "_valid_method_call_check_resource", "(", "name", ",", "method", ")" ]
a method to check whether the resource can use the quota method .
train
false
7,689
def action_peek_xml(body): dom = utils.safe_minidom_parse_string(body) action_node = dom.childNodes[0] return action_node.tagName
[ "def", "action_peek_xml", "(", "body", ")", ":", "dom", "=", "utils", ".", "safe_minidom_parse_string", "(", "body", ")", "action_node", "=", "dom", ".", "childNodes", "[", "0", "]", "return", "action_node", ".", "tagName" ]
determine action to invoke .
train
false
7,690
def list_firewalls(profile=None): conn = _auth(profile) return conn.list_firewalls()
[ "def", "list_firewalls", "(", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "list_firewalls", "(", ")" ]
fetches a list of all firewalls for a tenant cli example: .
train
false