id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
31,632
def score(count_bigram, count1, count2, n_words): N = n_words c12 = count_bigram c1 = count1 c2 = count2 p = (c2 / N) p1 = (c12 / c1) p2 = ((c2 - c12) / (N - c1)) score = (((l(c12, c1, p) + l((c2 - c12), (N - c1), p)) - l(c12, c1, p1)) - l((c2 - c12), (N - c1), p2)) return ((-2) * score)
[ "def", "score", "(", "count_bigram", ",", "count1", ",", "count2", ",", "n_words", ")", ":", "N", "=", "n_words", "c12", "=", "count_bigram", "c1", "=", "count1", "c2", "=", "count2", "p", "=", "(", "c2", "/", "N", ")", "p1", "=", "(", "c12", "/"...
score sentences based on different features .
train
true
31,633
def test_tempdir(): tempdir2 = _TempDir() assert_true(op.isdir(tempdir2)) x = str(tempdir2) del tempdir2 assert_true((not op.isdir(x)))
[ "def", "test_tempdir", "(", ")", ":", "tempdir2", "=", "_TempDir", "(", ")", "assert_true", "(", "op", ".", "isdir", "(", "tempdir2", ")", ")", "x", "=", "str", "(", "tempdir2", ")", "del", "tempdir2", "assert_true", "(", "(", "not", "op", ".", "isdi...
test tempdir .
train
false
31,634
def build_flask_context(request): return HTTPContext(url=request.url, method=request.method, user_agent=request.user_agent.string, referrer=request.referrer, remote_ip=request.remote_addr)
[ "def", "build_flask_context", "(", "request", ")", ":", "return", "HTTPContext", "(", "url", "=", "request", ".", "url", ",", "method", "=", "request", ".", "method", ",", "user_agent", "=", "request", ".", "user_agent", ".", "string", ",", "referrer", "="...
builds an http context object from a flask request object .
train
true
31,635
def display_structure(structure, parentparts=[]): if parentparts: name = '.'.join(parentparts) else: print 'HEADER' name = 'TEXT' is_multipart = isinstance(structure[0], list) if is_multipart: parttype = ('multipart/%s' % structure[1].lower()) else: parttype = ('%s/%s' % structure[:2]).lower() print ('%-9s' % name), parttype, if is_multipart: print subparts = structure[0] for i in range(len(subparts)): display_structure(subparts[i], (parentparts + [str((i + 1))])) else: if structure[6]: print ('size=%s' % structure[6]), if structure[8]: (disposition, namevalues) = structure[8] print disposition, for i in range(0, len(namevalues), 2): print ('%s=%r' % namevalues[i:(i + 2)]) print
[ "def", "display_structure", "(", "structure", ",", "parentparts", "=", "[", "]", ")", ":", "if", "parentparts", ":", "name", "=", "'.'", ".", "join", "(", "parentparts", ")", "else", ":", "print", "'HEADER'", "name", "=", "'TEXT'", "is_multipart", "=", "...
attractively display a given message structure .
train
false
31,636
def create_transfer_manager(client, config, osutil=None): executor_cls = None if (not config.use_threads): executor_cls = NonThreadedExecutor return TransferManager(client, config, osutil, executor_cls)
[ "def", "create_transfer_manager", "(", "client", ",", "config", ",", "osutil", "=", "None", ")", ":", "executor_cls", "=", "None", "if", "(", "not", "config", ".", "use_threads", ")", ":", "executor_cls", "=", "NonThreadedExecutor", "return", "TransferManager", ...
creates a transfer manager based on configuration :type client: boto3 .
train
false
31,637
@click.command('serve_default_site') @click.argument('state', type=click.Choice(['on', 'off'])) def config_serve_default_site(state): state = (True if (state == 'on') else False) update_config({'serve_default_site': state})
[ "@", "click", ".", "command", "(", "'serve_default_site'", ")", "@", "click", ".", "argument", "(", "'state'", ",", "type", "=", "click", ".", "Choice", "(", "[", "'on'", ",", "'off'", "]", ")", ")", "def", "config_serve_default_site", "(", "state", ")",...
configure nginx to serve the default site on port 80 .
train
false
31,638
def newer_pairwise(sources, targets): if (len(sources) != len(targets)): raise ValueError, "'sources' and 'targets' must be same length" n_sources = [] n_targets = [] for i in range(len(sources)): if newer(sources[i], targets[i]): n_sources.append(sources[i]) n_targets.append(targets[i]) return (n_sources, n_targets)
[ "def", "newer_pairwise", "(", "sources", ",", "targets", ")", ":", "if", "(", "len", "(", "sources", ")", "!=", "len", "(", "targets", ")", ")", ":", "raise", "ValueError", ",", "\"'sources' and 'targets' must be same length\"", "n_sources", "=", "[", "]", "...
walk two filename lists in parallel .
train
false
31,639
def generate_dropout_mask(mlp, default_include_prob=0.5, input_include_probs=None, rng=(2013, 5, 17)): if (input_include_probs is None): input_include_probs = {} if (not hasattr(rng, 'uniform')): rng = np.random.RandomState(rng) total_units = 0 mask = 0 for layer in mlp.layers: if (layer.layer_name in input_include_probs): p = input_include_probs[layer.layer_name] else: p = default_include_prob for _ in xrange(layer.get_input_space().get_total_dimension()): mask |= (int((rng.uniform() < p)) << total_units) total_units += 1 return mask
[ "def", "generate_dropout_mask", "(", "mlp", ",", "default_include_prob", "=", "0.5", ",", "input_include_probs", "=", "None", ",", "rng", "=", "(", "2013", ",", "5", ",", "17", ")", ")", ":", "if", "(", "input_include_probs", "is", "None", ")", ":", "inp...
generate a dropout mask given inclusion probabilities .
train
false
31,640
def test_scharr_v_zeros(): result = filters.scharr_v(np.zeros((10, 10)), np.ones((10, 10), bool)) assert_allclose(result, 0)
[ "def", "test_scharr_v_zeros", "(", ")", ":", "result", "=", "filters", ".", "scharr_v", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclose"...
vertical scharr on an array of all zeros .
train
false
31,642
def previous_current_next(items): extend = itertools.chain([None], items, [None]) (prev, cur, nex) = itertools.tee(extend, 3) next(cur) next(nex) next(nex) return zip(prev, cur, nex)
[ "def", "previous_current_next", "(", "items", ")", ":", "extend", "=", "itertools", ".", "chain", "(", "[", "None", "]", ",", "items", ",", "[", "None", "]", ")", "(", "prev", ",", "cur", ",", "nex", ")", "=", "itertools", ".", "tee", "(", "extend"...
from URL creates an iterator which returns triples .
train
false
31,643
def testNsTunnels(remote='ubuntu2', link=RemoteGRELink): net = Mininet(host=RemoteHost, link=link) h1 = net.addHost('h1') h2 = net.addHost('h2', server=remote) net.addLink(h1, h2) net.start() net.pingAll() net.stop()
[ "def", "testNsTunnels", "(", "remote", "=", "'ubuntu2'", ",", "link", "=", "RemoteGRELink", ")", ":", "net", "=", "Mininet", "(", "host", "=", "RemoteHost", ",", "link", "=", "link", ")", "h1", "=", "net", ".", "addHost", "(", "'h1'", ")", "h2", "=",...
test tunnels between nodes in namespaces .
train
false
31,644
def _handle_sigusr1(sig, stack): if sys.stderr.isatty(): output = sys.stderr _makepretty(output, stack) else: filename = 'salt-debug-{0}.log'.format(int(time.time())) destfile = os.path.join(tempfile.gettempdir(), filename) with salt.utils.fopen(destfile, 'w') as output: _makepretty(output, stack)
[ "def", "_handle_sigusr1", "(", "sig", ",", "stack", ")", ":", "if", "sys", ".", "stderr", ".", "isatty", "(", ")", ":", "output", "=", "sys", ".", "stderr", "_makepretty", "(", "output", ",", "stack", ")", "else", ":", "filename", "=", "'salt-debug-{0}...
signal handler for sigusr1 .
train
true
31,645
def flagFunction(method, name=None): if _PY3: reqArgs = len(inspect.signature(method).parameters) if (reqArgs > 1): raise UsageError(('Invalid Option function for %s' % (name or method.__name__))) if (reqArgs == 1): return False else: reqArgs = len(inspect.getargspec(method).args) if (reqArgs > 2): raise UsageError(('Invalid Option function for %s' % (name or method.__name__))) if (reqArgs == 2): return False return True
[ "def", "flagFunction", "(", "method", ",", "name", "=", "None", ")", ":", "if", "_PY3", ":", "reqArgs", "=", "len", "(", "inspect", ".", "signature", "(", "method", ")", ".", "parameters", ")", "if", "(", "reqArgs", ">", "1", ")", ":", "raise", "Us...
determine whether a function is an optional handler for a i{flag} or an i{option} .
train
false
31,646
def _run_inactive(games): return (len(games) == 0)
[ "def", "_run_inactive", "(", "games", ")", ":", "return", "(", "len", "(", "games", ")", "==", "0", ")" ]
the inactive mode simply checks if there are any active games .
train
false
31,649
def _parseTCPSSL(factory, domain, port): return ((domain, int(port), factory), {})
[ "def", "_parseTCPSSL", "(", "factory", ",", "domain", ",", "port", ")", ":", "return", "(", "(", "domain", ",", "int", "(", "port", ")", ",", "factory", ")", ",", "{", "}", ")" ]
for the moment .
train
false
31,650
def effective_n_jobs(n_jobs=(-1)): (backend, _) = get_active_backend() return backend.effective_n_jobs(n_jobs=n_jobs)
[ "def", "effective_n_jobs", "(", "n_jobs", "=", "(", "-", "1", ")", ")", ":", "(", "backend", ",", "_", ")", "=", "get_active_backend", "(", ")", "return", "backend", ".", "effective_n_jobs", "(", "n_jobs", "=", "n_jobs", ")" ]
determine the number of jobs that can actually run in parallel n_jobs is the is the number of workers requested by the callers .
train
false
31,651
def getLoopLayerAppend(loopLayers, z): settings.printProgress(len(loopLayers), 'slice') loopLayer = euclidean.LoopLayer(z) loopLayers.append(loopLayer) return loopLayer
[ "def", "getLoopLayerAppend", "(", "loopLayers", ",", "z", ")", ":", "settings", ".", "printProgress", "(", "len", "(", "loopLayers", ")", ",", "'slice'", ")", "loopLayer", "=", "euclidean", ".", "LoopLayer", "(", "z", ")", "loopLayers", ".", "append", "(",...
get next z and add extruder loops .
train
false
31,652
def course(): mode = session.s3.hrm.mode def prep(r): if (mode is not None): auth.permission.fail() if (r.component_name == 'training'): s3.crud_strings['hrm_training'].label_create = T('Add Trainee') return True s3.prep = prep if ((not auth.s3_has_role(ADMIN)) and (not s3.filter)): s3.filter = auth.filter_by_root_org(s3db.hrm_course) output = s3_rest_controller(rheader=s3db.hrm_rheader) return output
[ "def", "course", "(", ")", ":", "mode", "=", "session", ".", "s3", ".", "hrm", ".", "mode", "def", "prep", "(", "r", ")", ":", "if", "(", "mode", "is", "not", "None", ")", ":", "auth", ".", "permission", ".", "fail", "(", ")", "if", "(", "r",...
courses controller .
train
false
31,653
def PrintResponse(batch_job_helper, response_xml): response = batch_job_helper.ParseResponse(response_xml) if ('rval' in response['mutateResponse']): for data in response['mutateResponse']['rval']: if ('errorList' in data): print ('Operation %s - FAILURE:' % data['index']) print (' DCTB errorType=%s' % data['errorList']['errors']['ApiError.Type']) print (' DCTB trigger=%s' % data['errorList']['errors']['trigger']) print (' DCTB errorString=%s' % data['errorList']['errors']['errorString']) print (' DCTB fieldPath=%s' % data['errorList']['errors']['fieldPath']) print (' DCTB reason=%s' % data['errorList']['errors']['reason']) if ('result' in data): print ('Operation %s - SUCCESS.' % data['index'])
[ "def", "PrintResponse", "(", "batch_job_helper", ",", "response_xml", ")", ":", "response", "=", "batch_job_helper", ".", "ParseResponse", "(", "response_xml", ")", "if", "(", "'rval'", "in", "response", "[", "'mutateResponse'", "]", ")", ":", "for", "data", "...
prints the batchjobservice response .
train
true
31,654
@core_helper def debug_inspect(arg): return ((literal('<pre>') + pprint.pformat(arg)) + literal('</pre>'))
[ "@", "core_helper", "def", "debug_inspect", "(", "arg", ")", ":", "return", "(", "(", "literal", "(", "'<pre>'", ")", "+", "pprint", ".", "pformat", "(", "arg", ")", ")", "+", "literal", "(", "'</pre>'", ")", ")" ]
output pprint .
train
false
31,655
def _plot_update_evoked_topo_proj(params, bools): evokeds = [e.copy() for e in params['evokeds']] fig = params['fig'] projs = [proj for (proj, b) in zip(params['projs'], bools) if b] params['proj_bools'] = bools for e in evokeds: e.add_proj(projs, remove_existing=True) e.apply_proj() for ax in fig.axes[0]._mne_axs: for (line, evoked) in zip(ax.data_lines, evokeds): line.set_ydata((ax.y_t + (ax.y_s * evoked.data[ax._mne_ch_idx]))) fig.canvas.draw()
[ "def", "_plot_update_evoked_topo_proj", "(", "params", ",", "bools", ")", ":", "evokeds", "=", "[", "e", ".", "copy", "(", ")", "for", "e", "in", "params", "[", "'evokeds'", "]", "]", "fig", "=", "params", "[", "'fig'", "]", "projs", "=", "[", "proj"...
update topo sensor plots .
train
false
31,656
def pysiphash(uint64): assert (0 <= uint64 < (1 << 64)) if (uint64 > ((1 << 63) - 1)): int64 = (uint64 - (1 << 64)) else: int64 = uint64 uint32 = ((uint64 ^ (uint64 >> 32)) & 4294967295) if (uint32 > ((1 << 31) - 1)): int32 = (uint32 - (1 << 32)) else: int32 = uint32 return (int32, int64)
[ "def", "pysiphash", "(", "uint64", ")", ":", "assert", "(", "0", "<=", "uint64", "<", "(", "1", "<<", "64", ")", ")", "if", "(", "uint64", ">", "(", "(", "1", "<<", "63", ")", "-", "1", ")", ")", ":", "int64", "=", "(", "uint64", "-", "(", ...
convert siphash24 output to py_hash_t .
train
false
31,658
def _reinstall_default_lookups(): _install_lookups(dict(instance_state=_default_state_getter, instance_dict=_default_dict_getter, manager_of_class=_default_manager_getter)) _instrumentation_factory._extended = False
[ "def", "_reinstall_default_lookups", "(", ")", ":", "_install_lookups", "(", "dict", "(", "instance_state", "=", "_default_state_getter", ",", "instance_dict", "=", "_default_dict_getter", ",", "manager_of_class", "=", "_default_manager_getter", ")", ")", "_instrumentatio...
restore simplified lookups .
train
false
31,659
def _parse_dbus_locale(): bus = dbus.SystemBus() localed = bus.get_object('org.freedesktop.locale1', '/org/freedesktop/locale1') properties = dbus.Interface(localed, 'org.freedesktop.DBus.Properties') system_locale = properties.Get('org.freedesktop.locale1', 'Locale') ret = {} for env_var in system_locale: match = re.match('^([A-Z_]+)=(.*)$', env_var) if match: ret[match.group(1)] = match.group(2).replace('"', '') else: log.error('Odd locale parameter "{0}" detected in dbus locale output. This should not happen. You should probably investigate what caused this.'.format(env_var)) return ret
[ "def", "_parse_dbus_locale", "(", ")", ":", "bus", "=", "dbus", ".", "SystemBus", "(", ")", "localed", "=", "bus", ".", "get_object", "(", "'org.freedesktop.locale1'", ",", "'/org/freedesktop/locale1'", ")", "properties", "=", "dbus", ".", "Interface", "(", "l...
get the system locale parameters from dbus .
train
true
31,660
def MultifieldParser(fieldnames, schema, fieldboosts=None, **kwargs): from whoosh.qparser.plugins import MultifieldPlugin p = QueryParser(None, schema, **kwargs) mfp = MultifieldPlugin(fieldnames, fieldboosts=fieldboosts) p.add_plugin(mfp) return p
[ "def", "MultifieldParser", "(", "fieldnames", ",", "schema", ",", "fieldboosts", "=", "None", ",", "**", "kwargs", ")", ":", "from", "whoosh", ".", "qparser", ".", "plugins", "import", "MultifieldPlugin", "p", "=", "QueryParser", "(", "None", ",", "schema", ...
returns a queryparser configured to search in multiple fields .
train
false
31,661
def has_metadata(trait, metadata, value=None, recursive=True): count = 0 if (hasattr(trait, u'_metadata') and (metadata in list(trait._metadata.keys())) and ((trait._metadata[metadata] == value) or (value is None))): count += 1 if recursive: if hasattr(trait, u'inner_traits'): for inner_trait in trait.inner_traits(): count += has_metadata(inner_trait.trait_type, metadata, recursive) if (hasattr(trait, u'handlers') and (trait.handlers is not None)): for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) return (count > 0)
[ "def", "has_metadata", "(", "trait", ",", "metadata", ",", "value", "=", "None", ",", "recursive", "=", "True", ")", ":", "count", "=", "0", "if", "(", "hasattr", "(", "trait", ",", "u'_metadata'", ")", "and", "(", "metadata", "in", "list", "(", "tra...
checks if a given trait has a metadata .
train
false
31,662
def fetch_nb_dependencies(files, repo=REPO, raw=RAW, branch=BRANCH, deps=DEPS, overwrite=False, verbose=True): if (type(files) == list): files = {'': files} for directory in files.keys(): if (directory != ''): if verbose: print 'Parsing directory: %s' for fl in files[directory]: if (directory != ''): fl = ((directory + '/') + fl) if (not overwrite): if os.path.isfile(fl): if verbose: print ('A file named %s already exists in the specified directory ... skipping download.' % fl) continue elif verbose: print ('Overwriting file %s ...' % fl) if verbose: print ('Fetching file: %s' % fl) url = '/'.join([repo, raw, branch, deps, fl]) r = requests.get(url) with open(fl, 'wb') as fl: fl.write(r.content)
[ "def", "fetch_nb_dependencies", "(", "files", ",", "repo", "=", "REPO", ",", "raw", "=", "RAW", ",", "branch", "=", "BRANCH", ",", "deps", "=", "DEPS", ",", "overwrite", "=", "False", ",", "verbose", "=", "True", ")", ":", "if", "(", "type", "(", "...
retrieve raw files from quantecon .
train
true
31,663
def lsmr_operator(Jop, d, active_set): (m, n) = Jop.shape def matvec(x): x_free = x.ravel().copy() x_free[active_set] = 0 return Jop.matvec((x * d)) def rmatvec(x): r = (d * Jop.rmatvec(x)) r[active_set] = 0 return r return LinearOperator((m, n), matvec=matvec, rmatvec=rmatvec, dtype=float)
[ "def", "lsmr_operator", "(", "Jop", ",", "d", ",", "active_set", ")", ":", "(", "m", ",", "n", ")", "=", "Jop", ".", "shape", "def", "matvec", "(", "x", ")", ":", "x_free", "=", "x", ".", "ravel", "(", ")", ".", "copy", "(", ")", "x_free", "[...
compute linearoperator to use in lsmr by dogbox algorithm .
train
false
31,665
def _fake_run_horcmstart(*args): return (0 if (not run_horcmstart_returns_error) else 3)
[ "def", "_fake_run_horcmstart", "(", "*", "args", ")", ":", "return", "(", "0", "if", "(", "not", "run_horcmstart_returns_error", ")", "else", "3", ")" ]
return a value based on a flag value .
train
false
31,666
def pcoa(lines): dist_mtx = DistanceMatrix.read(lines) pcoa_obj = PCoA(dist_mtx) return pcoa_obj.scores()
[ "def", "pcoa", "(", "lines", ")", ":", "dist_mtx", "=", "DistanceMatrix", ".", "read", "(", "lines", ")", "pcoa_obj", "=", "PCoA", "(", "dist_mtx", ")", "return", "pcoa_obj", ".", "scores", "(", ")" ]
run pcoa on the distance matrix present on lines .
train
false
31,668
@cmdopts([('version=', 'v', 'Legacy GeoNode version of the existing database.')]) @task def upgradedb(options): version = options.get('version') if (version in ['1.1', '1.2']): sh('python manage.py migrate maps 0001 --fake') sh('python manage.py migrate avatar 0001 --fake') elif (version is None): print 'Please specify your GeoNode version' else: print ('Upgrades from version %s are not yet supported.' % version)
[ "@", "cmdopts", "(", "[", "(", "'version='", ",", "'v'", ",", "'Legacy GeoNode version of the existing database.'", ")", "]", ")", "@", "task", "def", "upgradedb", "(", "options", ")", ":", "version", "=", "options", ".", "get", "(", "'version'", ")", "if", ...
add fake data migrations for existing tables from legacy geonode versions .
train
false
31,669
@task def virtualenv_verify(requirements_revision=None): req_rev = (requirements_revision or latest_requirements_revision()) env_dir = ('env.%s' % req_rev) package_dir = ('python-package.%s' % req_rev) requirements_file = ('prod-requirements.txt.%s' % req_rev) with settings(warn_only=True): out = run(('~/%s/viewfinder/bin/pip install -f file://$HOME/%s --no-index -r ~/%s --no-install --no-download -q' % (env_dir, package_dir, requirements_file))) if (out.return_code == 0): fprint(('Valid virtual environment for prod-requirements (rev %s)' % req_rev)) return True else: fprint(('Bad virtual environment for prod-requirements (rev %s)' % req_rev)) return False
[ "@", "task", "def", "virtualenv_verify", "(", "requirements_revision", "=", "None", ")", ":", "req_rev", "=", "(", "requirements_revision", "or", "latest_requirements_revision", "(", ")", ")", "env_dir", "=", "(", "'env.%s'", "%", "req_rev", ")", "package_dir", ...
verify the virtual environment for a given revision .
train
false
31,670
def _PushConnection(new_connection): __InitConnection() _thread_local.connection_stack.append(new_connection)
[ "def", "_PushConnection", "(", "new_connection", ")", ":", "__InitConnection", "(", ")", "_thread_local", ".", "connection_stack", ".", "append", "(", "new_connection", ")" ]
internal method to save the current connection and sets a new one .
train
false
31,671
def _check_authorized(requesting_user, username, allow_staff=False): if (requesting_user.username != username): if ((not requesting_user.is_staff) or (not allow_staff)): raise UserNotAuthorized()
[ "def", "_check_authorized", "(", "requesting_user", ",", "username", ",", "allow_staff", "=", "False", ")", ":", "if", "(", "requesting_user", ".", "username", "!=", "username", ")", ":", "if", "(", "(", "not", "requesting_user", ".", "is_staff", ")", "or", ...
helper method that raises usernotauthorized if requesting user is not owner user or is not staff if access to staff is given .
train
false
31,673
def GlobalProcess(): return _dev_process
[ "def", "GlobalProcess", "(", ")", ":", "return", "_dev_process" ]
returns a global devprocess object representing the current process .
train
false
31,678
def get_parent_unit(xblock): while xblock: xblock = xblock.get_parent() if (xblock is None): return None parent = xblock.get_parent() if (parent is None): return None if (parent.category == 'sequential'): return xblock
[ "def", "get_parent_unit", "(", "xblock", ")", ":", "while", "xblock", ":", "xblock", "=", "xblock", ".", "get_parent", "(", ")", "if", "(", "xblock", "is", "None", ")", ":", "return", "None", "parent", "=", "xblock", ".", "get_parent", "(", ")", "if", ...
find vertical that is a unit .
train
false
31,679
def validate_textbook_json(textbook): if isinstance(textbook, basestring): try: textbook = json.loads(textbook) except ValueError: raise TextbookValidationError('invalid JSON') if (not isinstance(textbook, dict)): raise TextbookValidationError('must be JSON object') if (not textbook.get('tab_title')): raise TextbookValidationError('must have tab_title') tid = unicode(textbook.get('id', '')) if (tid and (not tid[0].isdigit())): raise TextbookValidationError('textbook ID must start with a digit') return textbook
[ "def", "validate_textbook_json", "(", "textbook", ")", ":", "if", "isinstance", "(", "textbook", ",", "basestring", ")", ":", "try", ":", "textbook", "=", "json", ".", "loads", "(", "textbook", ")", "except", "ValueError", ":", "raise", "TextbookValidationErro...
validate the given text as representing a list of pdf textbooks .
train
false
31,680
def loadImage(filename, path=GUI_DATA_PATH): im = gtk.Image() filename = os.path.join(path, filename) im.set_from_file(filename) im.show() return im
[ "def", "loadImage", "(", "filename", ",", "path", "=", "GUI_DATA_PATH", ")", ":", "im", "=", "gtk", ".", "Image", "(", ")", "filename", "=", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", "im", ".", "set_from_file", "(", "filename...
loads a pixbuf from disk .
train
false
31,682
def test_match(): assert match(Command(u'ps -ef |\xa0grep foo', stderr=u'-bash: \xa0grep: command not found')) assert (not match(Command('ps -ef | grep foo'))) assert (not match(Command()))
[ "def", "test_match", "(", ")", ":", "assert", "match", "(", "Command", "(", "u'ps -ef |\\xa0grep foo'", ",", "stderr", "=", "u'-bash: \\xa0grep: command not found'", ")", ")", "assert", "(", "not", "match", "(", "Command", "(", "'ps -ef | grep foo'", ")", ")", "...
the character before grep is alt+space .
train
false
31,683
def mangle_args(argtys): return ''.join([mangle_type(t) for t in argtys])
[ "def", "mangle_args", "(", "argtys", ")", ":", "return", "''", ".", "join", "(", "[", "mangle_type", "(", "t", ")", "for", "t", "in", "argtys", "]", ")" ]
mangle sequence of numba type objects .
train
false
31,685
def savepoint(using=None): if (using is None): using = DEFAULT_DB_ALIAS connection = connections[using] return connection.savepoint()
[ "def", "savepoint", "(", "using", "=", "None", ")", ":", "if", "(", "using", "is", "None", ")", ":", "using", "=", "DEFAULT_DB_ALIAS", "connection", "=", "connections", "[", "using", "]", "return", "connection", ".", "savepoint", "(", ")" ]
creates a savepoint inside the current transaction .
train
false
31,686
def _maybe_add_read_preference(spec, read_preference): mode = read_preference.mode tag_sets = read_preference.tag_sets max_staleness = read_preference.max_staleness if (mode and ((mode != ReadPreference.SECONDARY_PREFERRED.mode) or (tag_sets != [{}]) or (max_staleness != (-1)))): if ('$query' not in spec): spec = SON([('$query', spec)]) spec['$readPreference'] = read_preference.document return spec
[ "def", "_maybe_add_read_preference", "(", "spec", ",", "read_preference", ")", ":", "mode", "=", "read_preference", ".", "mode", "tag_sets", "=", "read_preference", ".", "tag_sets", "max_staleness", "=", "read_preference", ".", "max_staleness", "if", "(", "mode", ...
add $readpreference to spec when appropriate .
train
true
31,687
def token_sort_ratio(s1, s2, force_ascii=True, full_process=True): return _token_sort(s1, s2, partial=False, force_ascii=force_ascii, full_process=full_process)
[ "def", "token_sort_ratio", "(", "s1", ",", "s2", ",", "force_ascii", "=", "True", ",", "full_process", "=", "True", ")", ":", "return", "_token_sort", "(", "s1", ",", "s2", ",", "partial", "=", "False", ",", "force_ascii", "=", "force_ascii", ",", "full_...
return a measure of the sequences similarity between 0 and 100 but sorting the token before comparing .
train
true
31,689
def convert_kvp_list_to_dict(kvp_list): if (kvp_list == ['True']): return {} kvp_map = {} for kvp_str in kvp_list: (key, value) = convert_kvp_str_to_list(kvp_str) kvp_map.setdefault(key, set()) kvp_map[key].add(value) return dict(((x, list(y)) for (x, y) in kvp_map.iteritems()))
[ "def", "convert_kvp_list_to_dict", "(", "kvp_list", ")", ":", "if", "(", "kvp_list", "==", "[", "'True'", "]", ")", ":", "return", "{", "}", "kvp_map", "=", "{", "}", "for", "kvp_str", "in", "kvp_list", ":", "(", "key", ",", "value", ")", "=", "conve...
convert a list of key=value strings to a dict .
train
false
31,691
def _create_change_list_from_suggestion(suggestion): return [{'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY, 'state_name': suggestion.state_name, 'property_name': exp_domain.STATE_PROPERTY_CONTENT, 'new_value': [suggestion.state_content]}]
[ "def", "_create_change_list_from_suggestion", "(", "suggestion", ")", ":", "return", "[", "{", "'cmd'", ":", "exp_domain", ".", "CMD_EDIT_STATE_PROPERTY", ",", "'state_name'", ":", "suggestion", ".", "state_name", ",", "'property_name'", ":", "exp_domain", ".", "STA...
creates a change list from a suggestion object .
train
false
31,692
def repr_writers(h): return [u'({0}){1}->{2}'.format(fd, _rcb(cb), repr_flag(WRITE)) for (fd, cb) in items(h.writers)]
[ "def", "repr_writers", "(", "h", ")", ":", "return", "[", "u'({0}){1}->{2}'", ".", "format", "(", "fd", ",", "_rcb", "(", "cb", ")", ",", "repr_flag", "(", "WRITE", ")", ")", "for", "(", "fd", ",", "cb", ")", "in", "items", "(", "h", ".", "writer...
return description of pending writers .
train
false
31,693
def set_log_level_for_all_handlers(logger, level=logging.DEBUG): logger.setLevel(level) handlers = logger.handlers for handler in handlers: handler.setLevel(level) return logger
[ "def", "set_log_level_for_all_handlers", "(", "logger", ",", "level", "=", "logging", ".", "DEBUG", ")", ":", "logger", ".", "setLevel", "(", "level", ")", "handlers", "=", "logger", ".", "handlers", "for", "handler", "in", "handlers", ":", "handler", ".", ...
set a log level for all the handlers on the provided logger .
train
false
31,694
def getLoopConvexAddedIndex(around, lastAddedIndex, lastPoint, segment): polygonConvexAddedIndex = (len(around) - 1) greatestDotProduct = (-9.9) for addedIndex in xrange((lastAddedIndex + 1), len(around)): addedPoint = around[addedIndex] addedSegment = getNormalized((addedPoint - lastPoint)) if (abs(addedSegment) > 0.0): dotProduct = getDotProduct(addedSegment, segment) if (dotProduct >= greatestDotProduct): greatestDotProduct = dotProduct polygonConvexAddedIndex = addedIndex return polygonConvexAddedIndex
[ "def", "getLoopConvexAddedIndex", "(", "around", ",", "lastAddedIndex", ",", "lastPoint", ",", "segment", ")", ":", "polygonConvexAddedIndex", "=", "(", "len", "(", "around", ")", "-", "1", ")", "greatestDotProduct", "=", "(", "-", "9.9", ")", "for", "addedI...
get polygon convex added index .
train
false
31,698
def _batch_create_object(*objparams): dbobjs = [ObjectDB(**objparam[0]) for objparam in objparams] objs = [] for (iobj, obj) in enumerate(dbobjs): objparam = objparams[iobj] obj._createdict = {'permissions': objparam[1], 'locks': objparam[2], 'aliases': objparam[3], 'nattributes': objparam[4], 'attributes': objparam[5], 'tags': objparam[6]} obj.save() for code in objparam[7]: if code: exec code in {}, {'evennia': evennia, 'obj': obj} objs.append(obj) return objs
[ "def", "_batch_create_object", "(", "*", "objparams", ")", ":", "dbobjs", "=", "[", "ObjectDB", "(", "**", "objparam", "[", "0", "]", ")", "for", "objparam", "in", "objparams", "]", "objs", "=", "[", "]", "for", "(", "iobj", ",", "obj", ")", "in", ...
this is a cut-down version of the create_object() function .
train
false
31,700
def dump_options_header(header, options): segments = [] if (header is not None): segments.append(header) for (key, value) in iteritems(options): if (value is None): segments.append(key) else: segments.append(('%s=%s' % (key, quote_header_value(value)))) return '; '.join(segments)
[ "def", "dump_options_header", "(", "header", ",", "options", ")", ":", "segments", "=", "[", "]", "if", "(", "header", "is", "not", "None", ")", ":", "segments", ".", "append", "(", "header", ")", "for", "(", "key", ",", "value", ")", "in", "iteritem...
the reverse function to :func:parse_options_header .
train
true
31,701
def gettext(string, **variables): return get_i18n().gettext(string, **variables)
[ "def", "gettext", "(", "string", ",", "**", "variables", ")", ":", "return", "get_i18n", "(", ")", ".", "gettext", "(", "string", ",", "**", "variables", ")" ]
join the chunks .
train
false
31,702
def setNonBlocking(fd): flags = fcntl.fcntl(fd, fcntl.F_GETFL) flags = (flags | os.O_NONBLOCK) fcntl.fcntl(fd, fcntl.F_SETFL, flags)
[ "def", "setNonBlocking", "(", "fd", ")", ":", "flags", "=", "fcntl", ".", "fcntl", "(", "fd", ",", "fcntl", ".", "F_GETFL", ")", "flags", "=", "(", "flags", "|", "os", ".", "O_NONBLOCK", ")", "fcntl", ".", "fcntl", "(", "fd", ",", "fcntl", ".", "...
make a file descriptor non-blocking .
train
false
31,704
def color_code_severity_widget(widget, name): for (option, color) in zip(widget, ['green', 'yellow', 'orange', 'red']): option[0].__setitem__('_style', ('background-color:%s;' % color)) option[0][0].__setitem__('_name', name) return widget
[ "def", "color_code_severity_widget", "(", "widget", ",", "name", ")", ":", "for", "(", "option", ",", "color", ")", "in", "zip", "(", "widget", ",", "[", "'green'", ",", "'yellow'", ",", "'orange'", ",", "'red'", "]", ")", ":", "option", "[", "0", "]...
utility function to colour-code severity options .
train
false
31,705
def get_response_body(data_format, data_dict, error_list): if (data_format == 'application/json'): data_dict['Errors'] = error_list return json.dumps(data_dict) if (data_format and data_format.endswith('/xml')): output = '<delete>\n' for key in sorted(data_dict): xml_key = key.replace(' ', '_').lower() output += ('<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key)) output += '<errors>\n' output += '\n'.join([('<object><name>%s</name><status>%s</status></object>' % (saxutils.escape(name), status)) for (name, status) in error_list]) output += '</errors>\n</delete>\n' return output output = '' for key in sorted(data_dict): output += ('%s: %s\n' % (key, data_dict[key])) output += 'Errors:\n' output += '\n'.join([('%s, %s' % (name, status)) for (name, status) in error_list]) return output
[ "def", "get_response_body", "(", "data_format", ",", "data_dict", ",", "error_list", ")", ":", "if", "(", "data_format", "==", "'application/json'", ")", ":", "data_dict", "[", "'Errors'", "]", "=", "error_list", "return", "json", ".", "dumps", "(", "data_dict...
returns a properly formatted response body according to format .
train
false
31,708
def test_vcs_url_final_slash_normalization(script, tmpdir): pkg_path = _create_test_package(script, name='testpackage', vcs='hg') args = ['install', '-e', ('hg+%s/#egg=testpackage' % path_to_url(pkg_path))] result = script.pip(*args, **{'expect_error': True}) result.assert_installed('testpackage', with_files=['.hg'])
[ "def", "test_vcs_url_final_slash_normalization", "(", "script", ",", "tmpdir", ")", ":", "pkg_path", "=", "_create_test_package", "(", "script", ",", "name", "=", "'testpackage'", ",", "vcs", "=", "'hg'", ")", "args", "=", "[", "'install'", ",", "'-e'", ",", ...
test that presence or absence of final slash in vcs url is normalized .
train
false
31,709
def cnv_formula(attribute, arg, element): return __save_prefix(attribute, arg, element)
[ "def", "cnv_formula", "(", "attribute", ",", "arg", ",", "element", ")", ":", "return", "__save_prefix", "(", "attribute", ",", "arg", ",", "element", ")" ]
a string containing a formula .
train
false
31,711
def _interpret_spark_task_logs(fs, matches, partial=True, log_callback=None): result = {} for match in matches: error = {} stderr_path = match['path'] if log_callback: log_callback(stderr_path) stderr_error = _parse_task_syslog(_cat_log(fs, stderr_path)) if stderr_error.get('hadoop_error'): stderr_error['hadoop_error']['path'] = stderr_path error.update(stderr_error) else: continue stdout_path = (match.get('stdout') or {}).get('path') check_stdout = error.pop('check_stdout', None) if (stdout_path and check_stdout): if log_callback: log_callback(stdout_path) task_error = _parse_task_stderr(_cat_log(fs, stdout_path)) if task_error: task_error['path'] = stdout_path error['task_error'] = task_error for id_key in ('attempt_id', 'container_id'): if (id_key in match): error[id_key] = match[id_key] _add_implied_task_id(error) result.setdefault('errors', []) result['errors'].append(error) if partial: result['partial'] = True break return result
[ "def", "_interpret_spark_task_logs", "(", "fs", ",", "matches", ",", "partial", "=", "True", ",", "log_callback", "=", "None", ")", ":", "result", "=", "{", "}", "for", "match", "in", "matches", ":", "error", "=", "{", "}", "stderr_path", "=", "match", ...
look for errors in spark task stderr .
train
false
31,713
def TabIterator(handle, alphabet=single_letter_alphabet): for line in handle: try: (title, seq) = line.split(' DCTB ') except: if (line.strip() == ''): continue raise ValueError(('Each line should have one tab separating the' + (' title and sequence, this line has %i tabs: %r' % (line.count(' DCTB '), line)))) title = title.strip() seq = seq.strip() (yield SeqRecord(Seq(seq, alphabet), id=title, name=title, description=''))
[ "def", "TabIterator", "(", "handle", ",", "alphabet", "=", "single_letter_alphabet", ")", ":", "for", "line", "in", "handle", ":", "try", ":", "(", "title", ",", "seq", ")", "=", "line", ".", "split", "(", "' DCTB '", ")", "except", ":", "if", "(", "...
iterates over tab separated lines .
train
false
31,715
def set_chost(value): return set_var('CHOST', value)
[ "def", "set_chost", "(", "value", ")", ":", "return", "set_var", "(", "'CHOST'", ",", "value", ")" ]
set the chost variable return a dict containing the new value for variable:: {<variable>: {old: <old-value> .
train
false
31,716
def associate_by_email(details, user=None, *args, **kwargs): if user: return None email = details.get('email') if email: try: return {'user': UserSocialAuth.get_user_by_email(email=email)} except MultipleObjectsReturned: raise AuthException(kwargs['backend'], 'Not unique email address.') except ObjectDoesNotExist: pass
[ "def", "associate_by_email", "(", "details", ",", "user", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "user", ":", "return", "None", "email", "=", "details", ".", "get", "(", "'email'", ")", "if", "email", ":", "try", ":", "...
associate current auth with a user with the same email address in the db .
train
false
31,717
def set_event_loop(loop): global _current_loop _current_loop = loop return loop
[ "def", "set_event_loop", "(", "loop", ")", ":", "global", "_current_loop", "_current_loop", "=", "loop", "return", "loop" ]
set the current event loop object .
train
false
31,718
def compileAssembly(file_name): cp = CompilerParameters() cp.GenerateExecutable = False cp.OutputAssembly = (file_name.split('.cs')[0] + '.dll') cp.GenerateInMemory = False cp.TreatWarningsAsErrors = False cp.IncludeDebugInformation = True cp.ReferencedAssemblies.Add('IronPython.dll') cr = PROVIDER.CompileAssemblyFromFile(cp, file_name)
[ "def", "compileAssembly", "(", "file_name", ")", ":", "cp", "=", "CompilerParameters", "(", ")", "cp", ".", "GenerateExecutable", "=", "False", "cp", ".", "OutputAssembly", "=", "(", "file_name", ".", "split", "(", "'.cs'", ")", "[", "0", "]", "+", "'.dl...
helper function compiles a * .
train
false
31,721
def render_view_to_response(context, request, name='', secure=True): registry = getattr(request, 'registry', None) if (registry is None): registry = get_current_registry() context_iface = providedBy(context) request_iface = providedBy(request) response = _call_view(registry, request, context, context_iface, name, secure=secure, request_iface=request_iface) return response
[ "def", "render_view_to_response", "(", "context", ",", "request", ",", "name", "=", "''", ",", "secure", "=", "True", ")", ":", "registry", "=", "getattr", "(", "request", ",", "'registry'", ",", "None", ")", "if", "(", "registry", "is", "None", ")", "...
call the :term:view callable configured with a :term:view configuration that matches the :term:view name name registered against the specified context and request and return a :term:response object .
train
false
31,722
def query_database(user_id): ancestor_key = ndb.Key(Note, user_id) query = Note.query(ancestor=ancestor_key).order((- Note.created)) notes = query.fetch() note_messages = [] for note in notes: note_messages.append({'friendly_id': note.friendly_id, 'message': note.message, 'created': note.created}) return note_messages
[ "def", "query_database", "(", "user_id", ")", ":", "ancestor_key", "=", "ndb", ".", "Key", "(", "Note", ",", "user_id", ")", "query", "=", "Note", ".", "query", "(", "ancestor", "=", "ancestor_key", ")", ".", "order", "(", "(", "-", "Note", ".", "cre...
fetches all notes associated with user_id .
train
false
31,723
def assert_equal_type(logical_line): if asse_equal_type_re.match(logical_line): (yield (0, 'N317: assertEqual(type(A), B) sentences not allowed'))
[ "def", "assert_equal_type", "(", "logical_line", ")", ":", "if", "asse_equal_type_re", ".", "match", "(", "logical_line", ")", ":", "(", "yield", "(", "0", ",", "'N317: assertEqual(type(A), B) sentences not allowed'", ")", ")" ]
check for assertequal(type(a) .
train
false
31,724
def pickleMethod(method): if _PY3: return (unpickleMethod, (method.__name__, method.__self__, method.__self__.__class__)) else: return (unpickleMethod, (method.im_func.__name__, method.im_self, method.im_class))
[ "def", "pickleMethod", "(", "method", ")", ":", "if", "_PY3", ":", "return", "(", "unpickleMethod", ",", "(", "method", ".", "__name__", ",", "method", ".", "__self__", ",", "method", ".", "__self__", ".", "__class__", ")", ")", "else", ":", "return", ...
support function for copy_reg to pickle method refs .
train
false
31,726
def imshow(data, title=None, vmin=0, vmax=None, cmap=None, bitspersample=None, photometric='rgb', interpolation='nearest', dpi=96, figure=None, subplot=111, maxdim=8192, **kwargs): isrgb = (photometric in ('rgb', 'palette')) data = numpy.atleast_2d(data.squeeze()) dims = data.ndim if (dims < 2): raise ValueError('not an image') elif (dims == 2): dims = 0 isrgb = False else: if (isrgb and (data.shape[(-3)] in (3, 4))): data = numpy.swapaxes(data, (-3), (-2)) data = numpy.swapaxes(data, (-2), (-1)) elif ((not isrgb) and ((data.shape[(-1)] < (data.shape[(-2)] // 8)) and (data.shape[(-1)] < (data.shape[(-3)] // 8)) and (data.shape[(-1)] < 5))): data = numpy.swapaxes(data, (-3), (-1)) data = numpy.swapaxes(data, (-2), (-1)) isrgb = (isrgb and (data.shape[(-1)] in (3, 4))) dims -= (3 if isrgb else 2) if isrgb: data = data[..., :maxdim, :maxdim, :maxdim] else: data = data[..., :maxdim, :maxdim] if ((photometric == 'palette') and isrgb): datamax = data.max() if (datamax > 255): data >>= 8 data = data.astype('B') elif (data.dtype.kind in 'ui'): if ((not (isrgb and (data.dtype.itemsize <= 1))) or (bitspersample is None)): try: bitspersample = int(math.ceil(math.log(data.max(), 2))) except Exception: bitspersample = (data.dtype.itemsize * 8) elif (not isinstance(bitspersample, int)): bitspersample = (data.dtype.itemsize * 8) datamax = (2 ** bitspersample) if isrgb: if (bitspersample < 8): data <<= (8 - bitspersample) elif (bitspersample > 8): data >>= (bitspersample - 8) data = data.astype('B') elif (data.dtype.kind == 'f'): datamax = data.max() if (isrgb and (datamax > 1.0)): if (data.dtype.char == 'd'): data = data.astype('f') data /= datamax elif (data.dtype.kind == 'b'): datamax = 1 elif (data.dtype.kind == 'c'): raise NotImplementedError('complex type') if (not isrgb): if (vmax is None): vmax = datamax if (vmin is None): if (data.dtype.kind == 'i'): dtmin = numpy.iinfo(data.dtype).min vmin = numpy.min(data) if (vmin == dtmin): vmin = numpy.min((data > dtmin)) if (data.dtype.kind == 'f'): dtmin = numpy.finfo(data.dtype).min vmin = numpy.min(data) if (vmin == dtmin): vmin = numpy.min((data > dtmin)) else: vmin = 0 pyplot = sys.modules['matplotlib.pyplot'] if (figure is None): pyplot.rc('font', family='sans-serif', weight='normal', size=8) figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True, facecolor='1.0', edgecolor='w') try: figure.canvas.manager.window.title(title) except Exception: pass pyplot.subplots_adjust(bottom=(0.03 * (dims + 2)), top=0.9, left=0.1, right=0.95, hspace=0.05, wspace=0.0) subplot = pyplot.subplot(subplot) if title: try: title = unicode(title, 'Windows-1252') except TypeError: pass pyplot.title(title, size=11) if (cmap is None): if ((data.dtype.kind in 'ubf') or (vmin == 0)): cmap = 'cubehelix' else: cmap = 'coolwarm' if (photometric == 'miniswhite'): cmap += '_r' image = pyplot.imshow(data[((0,) * dims)].squeeze(), vmin=vmin, vmax=vmax, cmap=cmap, interpolation=interpolation, **kwargs) if (not isrgb): pyplot.colorbar() def format_coord(x, y): x = int((x + 0.5)) y = int((y + 0.5)) try: if dims: return ('%s @ %s [%4i, %4i]' % (cur_ax_dat[1][(y, x)], current, x, y)) else: return ('%s @ [%4i, %4i]' % (data[(y, x)], x, y)) except IndexError: return '' pyplot.gca().format_coord = format_coord if dims: current = list(((0,) * dims)) cur_ax_dat = [0, data[tuple(current)].squeeze()] sliders = [pyplot.Slider(pyplot.axes([0.125, (0.03 * (axis + 1)), 0.725, 0.025]), ('Dimension %i' % axis), 0, (data.shape[axis] - 1), 0, facecolor='0.5', valfmt=('%%.0f [%i]' % data.shape[axis])) for axis in range(dims)] for slider in sliders: slider.drawon = False def set_image(current, sliders=sliders, data=data): cur_ax_dat[1] = data[tuple(current)].squeeze() image.set_data(cur_ax_dat[1]) for (ctrl, index) in zip(sliders, current): ctrl.eventson = False ctrl.set_val(index) ctrl.eventson = True figure.canvas.draw() def on_changed(index, axis, data=data, current=current): index = int(round(index)) cur_ax_dat[0] = axis if (index == current[axis]): return if (index >= data.shape[axis]): index = 0 elif (index < 0): index = (data.shape[axis] - 1) current[axis] = index set_image(current) def on_keypressed(event, data=data, current=current): key = event.key axis = cur_ax_dat[0] if (str(key) in '0123456789'): on_changed(key, axis) elif (key == 'right'): on_changed((current[axis] + 1), axis) elif (key == 'left'): on_changed((current[axis] - 1), axis) elif (key == 'up'): cur_ax_dat[0] = (0 if (axis == (len(data.shape) - 1)) else (axis + 1)) elif (key == 'down'): cur_ax_dat[0] = ((len(data.shape) - 1) if (axis == 0) else (axis - 1)) elif (key == 'end'): on_changed((data.shape[axis] - 1), axis) elif (key == 'home'): on_changed(0, axis) figure.canvas.mpl_connect('key_press_event', on_keypressed) for (axis, ctrl) in enumerate(sliders): ctrl.on_changed((lambda k, a=axis: on_changed(k, a))) return (figure, subplot, image)
[ "def", "imshow", "(", "data", ",", "title", "=", "None", ",", "vmin", "=", "0", ",", "vmax", "=", "None", ",", "cmap", "=", "None", ",", "bitspersample", "=", "None", ",", "photometric", "=", "'rgb'", ",", "interpolation", "=", "'nearest'", ",", "dpi...
show the input image and return the current axes .
train
true
31,727
def _start_engines(node, user, n_engines=None, kill_existing=False): if (n_engines is None): n_engines = node.num_processors node.ssh.switch_user(user) if kill_existing: node.ssh.execute('pkill -f ipengineapp', ignore_exit_status=True) node.ssh.execute(('ipcluster engines --n=%i --daemonize' % n_engines)) node.ssh.switch_user('root')
[ "def", "_start_engines", "(", "node", ",", "user", ",", "n_engines", "=", "None", ",", "kill_existing", "=", "False", ")", ":", "if", "(", "n_engines", "is", "None", ")", ":", "n_engines", "=", "node", ".", "num_processors", "node", ".", "ssh", ".", "s...
launch ipython engines on the given node start one engine per cpu except on master where 1 cpu is reserved for house keeping tasks when possible .
train
false
31,728
def clip_string(s, limit=1000, sep=None): if (len(s) < limit): return s s = s[:(limit - 3)] if (sep is None): return s sep_pos = s.rfind(sep) if (sep_pos == (-1)): return s return (s[:(sep_pos + len(sep))] + '...')
[ "def", "clip_string", "(", "s", ",", "limit", "=", "1000", ",", "sep", "=", "None", ")", ":", "if", "(", "len", "(", "s", ")", "<", "limit", ")", ":", "return", "s", "s", "=", "s", "[", ":", "(", "limit", "-", "3", ")", "]", "if", "(", "s...
clip a string at a given character and add " .
train
false
31,731
def config_show(cibfile=None): return item_show(item='config', item_id=None, extra_args=None, cibfile=cibfile)
[ "def", "config_show", "(", "cibfile", "=", "None", ")", ":", "return", "item_show", "(", "item", "=", "'config'", ",", "item_id", "=", "None", ",", "extra_args", "=", "None", ",", "cibfile", "=", "cibfile", ")" ]
show config of cluster cibfile name/path of the file containing the cib cli example: .
train
false
31,733
def get_feature(features, id, tags=('locus_tag', 'gene', 'old_locus_tag')): for f in features: for key in tags: for x in f.qualifiers.get(key, []): if (x == id): return f raise KeyError(id)
[ "def", "get_feature", "(", "features", ",", "id", ",", "tags", "=", "(", "'locus_tag'", ",", "'gene'", ",", "'old_locus_tag'", ")", ")", ":", "for", "f", "in", "features", ":", "for", "key", "in", "tags", ":", "for", "x", "in", "f", ".", "qualifiers"...
search list of seqfeature objects for an identifier under the given tags .
train
false
31,734
def get_blockdeviceapi_with_cleanup(test_case): try: api = get_blockdeviceapi() except InvalidConfig as e: raise SkipTest(str(e)) test_case.addCleanup(detach_destroy_volumes, api) return api
[ "def", "get_blockdeviceapi_with_cleanup", "(", "test_case", ")", ":", "try", ":", "api", "=", "get_blockdeviceapi", "(", ")", "except", "InvalidConfig", "as", "e", ":", "raise", "SkipTest", "(", "str", "(", "e", ")", ")", "test_case", ".", "addCleanup", "(",...
instantiate an iblockdeviceapi implementation configured to work in the current environment .
train
false
31,735
def test_unit_normalization(): header = get_pkg_data_contents(u'data/unit.hdr', encoding=u'binary') w = wcs.WCS(header) assert (w.wcs.cunit[2] == u'm/s')
[ "def", "test_unit_normalization", "(", ")", ":", "header", "=", "get_pkg_data_contents", "(", "u'data/unit.hdr'", ",", "encoding", "=", "u'binary'", ")", "w", "=", "wcs", ".", "WCS", "(", "header", ")", "assert", "(", "w", ".", "wcs", ".", "cunit", "[", ...
from github issue #1918 .
train
false
31,737
@_FFI.callback(u'Value(ExternContext*, Value*, Field*, TypeId*)') def extern_project(context_handle, val, field, type_id): c = _FFI.from_handle(context_handle) obj = c.from_value(val) field_name = c.from_key(field) typ = c.from_id(type_id.id_) projected = getattr(obj, field_name) if (type(projected) is not typ): projected = typ(projected) return c.to_value(projected)
[ "@", "_FFI", ".", "callback", "(", "u'Value(ExternContext*, Value*, Field*, TypeId*)'", ")", "def", "extern_project", "(", "context_handle", ",", "val", ",", "field", ",", "type_id", ")", ":", "c", "=", "_FFI", ".", "from_handle", "(", "context_handle", ")", "ob...
given a value for obj .
train
false
31,738
def get_directory(filename): if filename.endswith('.tar.gz'): return basename(filename[:(-7)]) elif filename.endswith('.tgz'): return basename(filename[:(-4)]) elif filename.endswith('.tar.bz2'): return basename(filename[:(-8)]) elif filename.endswith('.tbz2'): return basename(filename[:(-5)]) elif filename.endswith('.zip'): return basename(filename[:(-4)]) info('Unknown file extension for {}'.format(filename)) exit(1)
[ "def", "get_directory", "(", "filename", ")", ":", "if", "filename", ".", "endswith", "(", "'.tar.gz'", ")", ":", "return", "basename", "(", "filename", "[", ":", "(", "-", "7", ")", "]", ")", "elif", "filename", ".", "endswith", "(", "'.tgz'", ")", ...
if the filename ends with a recognised file extension .
train
false
31,739
def verify_date_or_time(css, date_or_time): assert_true(world.css_has_value(css, date_or_time))
[ "def", "verify_date_or_time", "(", "css", ",", "date_or_time", ")", ":", "assert_true", "(", "world", ".", "css_has_value", "(", "css", ",", "date_or_time", ")", ")" ]
verifies date or time field .
train
false
31,741
def _apply_include_exclude_names(table, names, include_names, exclude_names): if (names is not None): xxxs = ('x' * max((len(name) for name in (list(names) + list(table.colnames))))) for (ii, colname) in enumerate(table.colnames): table.rename_column(colname, (xxxs + str(ii))) for (ii, name) in enumerate(names): table.rename_column((xxxs + str(ii)), name) names = set(table.colnames) if (include_names is not None): names.intersection_update(include_names) if (exclude_names is not None): names.difference_update(exclude_names) if (names != set(table.colnames)): remove_names = (set(table.colnames) - set(names)) table.remove_columns(remove_names)
[ "def", "_apply_include_exclude_names", "(", "table", ",", "names", ",", "include_names", ",", "exclude_names", ")", ":", "if", "(", "names", "is", "not", "None", ")", ":", "xxxs", "=", "(", "'x'", "*", "max", "(", "(", "len", "(", "name", ")", "for", ...
apply names .
train
false
31,742
def getIfExists(node, nodeId): return _get(node, nodeId)
[ "def", "getIfExists", "(", "node", ",", "nodeId", ")", ":", "return", "_get", "(", "node", ",", "nodeId", ")" ]
get a node with the specified c{nodeid} as any of the c{class} .
train
false
31,743
def dim_handling(inputs, dim=None, dims={}, broadcastables={}, keys=(), **kwargs): if dim: dims = dict(zip(inputs, ([dim] * len(inputs)))) if dims: maxdim = max(dims.values()) broadcastables = dict(((i, (((False,) * dims[i]) + ((True,) * (maxdim - dims[i])))) for i in inputs)) return broadcastables
[ "def", "dim_handling", "(", "inputs", ",", "dim", "=", "None", ",", "dims", "=", "{", "}", ",", "broadcastables", "=", "{", "}", ",", "keys", "=", "(", ")", ",", "**", "kwargs", ")", ":", "if", "dim", ":", "dims", "=", "dict", "(", "zip", "(", ...
handle various input types for dimensions in tensor_wrap see also: tensor_wrap theano_funciton .
train
false
31,744
def monkeypatch_extend(target, name=None): def patcher(func): newfunc = func patchname = (func.__name__ if (name is None) else name) if hasattr(target, patchname): oldfunc = getattr(target, patchname) if (not callable(oldfunc)): raise TypeError('Can not extend non callable attribute') @wraps(oldfunc) def extended(*a, **k): ret = oldfunc(*a, **k) func(*a, **k) return ret newfunc = extended else: raise (False or AssertionError('Must have something to extend')) setattr(target, patchname, newfunc) return func return patcher
[ "def", "monkeypatch_extend", "(", "target", ",", "name", "=", "None", ")", ":", "def", "patcher", "(", "func", ")", ":", "newfunc", "=", "func", "patchname", "=", "(", "func", ".", "__name__", "if", "(", "name", "is", "None", ")", "else", "name", ")"...
decorator that injects the decorated function as an extension of a method of the target class .
train
false
31,746
def harmonic_centrality(G, nbunch=None, distance=None): if G.is_directed(): G = G.reverse() spl = partial(nx.shortest_path_length, G, weight=distance) return {u: sum((((1 / d) if (d > 0) else 0) for (v, d) in spl(source=u))) for u in G.nbunch_iter(nbunch)}
[ "def", "harmonic_centrality", "(", "G", ",", "nbunch", "=", "None", ",", "distance", "=", "None", ")", ":", "if", "G", ".", "is_directed", "(", ")", ":", "G", "=", "G", ".", "reverse", "(", ")", "spl", "=", "partial", "(", "nx", ".", "shortest_path...
compute harmonic centrality for nodes .
train
false
31,747
def discretize_oversample_2D(model, x_range, y_range, factor=10): x = np.arange((x_range[0] - (0.5 * (1 - (1 / factor)))), (x_range[1] + (0.5 * (1 + (1 / factor)))), (1.0 / factor)) y = np.arange((y_range[0] - (0.5 * (1 - (1 / factor)))), (y_range[1] + (0.5 * (1 + (1 / factor)))), (1.0 / factor)) (x_grid, y_grid) = np.meshgrid(x, y) values = model(x_grid, y_grid) shape = ((y.size // factor), factor, (x.size // factor), factor) values = np.reshape(values, shape) return values.mean(axis=3).mean(axis=1)[:(-1), :(-1)]
[ "def", "discretize_oversample_2D", "(", "model", ",", "x_range", ",", "y_range", ",", "factor", "=", "10", ")", ":", "x", "=", "np", ".", "arange", "(", "(", "x_range", "[", "0", "]", "-", "(", "0.5", "*", "(", "1", "-", "(", "1", "/", "factor", ...
discretize model by taking the average on an oversampled grid .
train
false
31,748
def widont(value, count=1): def replace(matchobj): return force_text(('&nbsp;%s' % matchobj.group(1))) for i in range(count): value = re_widont.sub(replace, force_text(value)) return value
[ "def", "widont", "(", "value", ",", "count", "=", "1", ")", ":", "def", "replace", "(", "matchobj", ")", ":", "return", "force_text", "(", "(", "'&nbsp;%s'", "%", "matchobj", ".", "group", "(", "1", ")", ")", ")", "for", "i", "in", "range", "(", ...
adds an html non-breaking space between the final two words of the string to avoid "widowed" words .
train
true
31,750
def sample_top(a=[], top_k=10): idx = np.argpartition(a, (- top_k))[(- top_k):] probs = a[idx] probs = (probs / np.sum(probs)) choice = np.random.choice(idx, p=probs) return choice
[ "def", "sample_top", "(", "a", "=", "[", "]", ",", "top_k", "=", "10", ")", ":", "idx", "=", "np", ".", "argpartition", "(", "a", ",", "(", "-", "top_k", ")", ")", "[", "(", "-", "top_k", ")", ":", "]", "probs", "=", "a", "[", "idx", "]", ...
sample from top_k probabilities .
train
true
31,751
def splitstrip(string, sep=','): return [word.strip() for word in string.split(sep) if word.strip()]
[ "def", "splitstrip", "(", "string", ",", "sep", "=", "','", ")", ":", "return", "[", "word", ".", "strip", "(", ")", "for", "word", "in", "string", ".", "split", "(", "sep", ")", "if", "word", ".", "strip", "(", ")", "]" ]
return a list of stripped string by splitting the string given as argument on sep .
train
false
31,752
def directed_configuration_model(in_degree_sequence, out_degree_sequence, create_using=None, seed=None): if (sum(in_degree_sequence) != sum(out_degree_sequence)): msg = 'Invalid degree sequences: sequences must have equal sums' raise nx.NetworkXError(msg) if (create_using is None): create_using = nx.MultiDiGraph() G = _configuration_model(out_degree_sequence, create_using, directed=True, in_deg_sequence=in_degree_sequence, seed=seed) name = 'directed configuration_model {} nodes {} edges' G.name = name.format(len(G), G.size()) return G
[ "def", "directed_configuration_model", "(", "in_degree_sequence", ",", "out_degree_sequence", ",", "create_using", "=", "None", ",", "seed", "=", "None", ")", ":", "if", "(", "sum", "(", "in_degree_sequence", ")", "!=", "sum", "(", "out_degree_sequence", ")", ")...
return a directed_random graph with the given degree sequences .
train
false
31,753
def cls_attr_mock(request, cls, attr_name, name=None, **kwargs): name = (request.fixturename if (name is None) else name) _patch = patch.object(cls, attr_name, name=name, **kwargs) request.addfinalizer(_patch.stop) return _patch.start()
[ "def", "cls_attr_mock", "(", "request", ",", "cls", ",", "attr_name", ",", "name", "=", "None", ",", "**", "kwargs", ")", ":", "name", "=", "(", "request", ".", "fixturename", "if", "(", "name", "is", "None", ")", "else", "name", ")", "_patch", "=", ...
return a mock for attribute *attr_name* on *cls* where the patch is reversed after pytest uses it .
train
false
31,754
@utils.arg('project_id', metavar='<project_id>', help=_('The ID of the project.')) @deprecated_network def do_scrub(cs, args): networks_list = cs.networks.list() networks_list = [network for network in networks_list if (getattr(network, 'project_id', '') == args.project_id)] search_opts = {'all_tenants': 1} groups = cs.security_groups.list(search_opts) groups = [group for group in groups if (group.tenant_id == args.project_id)] for network in networks_list: cs.networks.disassociate(network) for group in groups: cs.security_groups.delete(group)
[ "@", "utils", ".", "arg", "(", "'project_id'", ",", "metavar", "=", "'<project_id>'", ",", "help", "=", "_", "(", "'The ID of the project.'", ")", ")", "@", "deprecated_network", "def", "do_scrub", "(", "cs", ",", "args", ")", ":", "networks_list", "=", "c...
delete networks and security groups associated with a project .
train
false
31,756
def plot_mpl(mpl_fig, resize=False, strip_style=False, verbose=False, show_link=True, link_text='Export to plot.ly', validate=True, output_type='file', include_plotlyjs=True, filename='temp-plot.html', auto_open=True, image=None, image_filename='plot_image', image_height=600, image_width=800): plotly_plot = tools.mpl_to_plotly(mpl_fig, resize, strip_style, verbose) return plot(plotly_plot, show_link, link_text, validate, output_type, include_plotlyjs, filename, auto_open, image=image, image_filename=image_filename, image_height=image_height, image_width=image_width)
[ "def", "plot_mpl", "(", "mpl_fig", ",", "resize", "=", "False", ",", "strip_style", "=", "False", ",", "verbose", "=", "False", ",", "show_link", "=", "True", ",", "link_text", "=", "'Export to plot.ly'", ",", "validate", "=", "True", ",", "output_type", "...
convert a matplotlib figure to a plotly graph stored locally as html .
train
false
31,757
def p_function_definition_2(t): pass
[ "def", "p_function_definition_2", "(", "t", ")", ":", "pass" ]
function_definition : declarator declaration_list compound_statement .
train
false
31,758
def _supports_universal_builds(): osx_version = _get_system_version() if osx_version: try: osx_version = tuple((int(i) for i in osx_version.split('.'))) except ValueError: osx_version = '' return (bool((osx_version >= (10, 4))) if osx_version else False)
[ "def", "_supports_universal_builds", "(", ")", ":", "osx_version", "=", "_get_system_version", "(", ")", "if", "osx_version", ":", "try", ":", "osx_version", "=", "tuple", "(", "(", "int", "(", "i", ")", "for", "i", "in", "osx_version", ".", "split", "(", ...
returns true if universal builds are supported on this system .
train
false
31,759
def p_declaration_specifiers_6(t): pass
[ "def", "p_declaration_specifiers_6", "(", "t", ")", ":", "pass" ]
declaration_specifiers : type_qualifier .
train
false
31,760
def cert_get_not_before(cert): return cert.not_valid_before
[ "def", "cert_get_not_before", "(", "cert", ")", ":", "return", "cert", ".", "not_valid_before" ]
gets the naive datetime of the certificates not_before field .
train
false
31,761
def is_x11_compositing_enabled(): try: from AnyQt.QtX11Extras import QX11Info except ImportError: return False if hasattr(QX11Info, 'isCompositingManagerRunning'): return QX11Info.isCompositingManagerRunning() else: return False
[ "def", "is_x11_compositing_enabled", "(", ")", ":", "try", ":", "from", "AnyQt", ".", "QtX11Extras", "import", "QX11Info", "except", "ImportError", ":", "return", "False", "if", "hasattr", "(", "QX11Info", ",", "'isCompositingManagerRunning'", ")", ":", "return", ...
is x11 compositing manager running .
train
false
31,762
def paths_from_items(items, item_type=TreeWidgetItem.TYPE, item_filter=None): if (item_filter is None): item_filter = _true_filter return [i.path for i in items if ((i.type() == item_type) and item_filter(i))]
[ "def", "paths_from_items", "(", "items", ",", "item_type", "=", "TreeWidgetItem", ".", "TYPE", ",", "item_filter", "=", "None", ")", ":", "if", "(", "item_filter", "is", "None", ")", ":", "item_filter", "=", "_true_filter", "return", "[", "i", ".", "path",...
return a list of paths from a list of items .
train
false
31,763
def get_rfx_object(packetid): import RFXtrx as rfxtrxmod try: binarypacket = bytearray.fromhex(packetid) except ValueError: return None pkt = rfxtrxmod.lowlevel.parse(binarypacket) if (pkt is None): return None if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket): obj = rfxtrxmod.SensorEvent(pkt) elif isinstance(pkt, rfxtrxmod.lowlevel.Status): obj = rfxtrxmod.StatusEvent(pkt) else: obj = rfxtrxmod.ControlEvent(pkt) return obj
[ "def", "get_rfx_object", "(", "packetid", ")", ":", "import", "RFXtrx", "as", "rfxtrxmod", "try", ":", "binarypacket", "=", "bytearray", ".", "fromhex", "(", "packetid", ")", "except", "ValueError", ":", "return", "None", "pkt", "=", "rfxtrxmod", ".", "lowle...
return the rfxobject with the packetid .
train
false
31,764
def _ParseCronYaml(): cronyaml_files = ('cron.yaml', 'cron.yml') for cronyaml in cronyaml_files: try: fh = open(cronyaml, 'r') except IOError: continue try: cron_info = croninfo.LoadSingleCron(fh) return cron_info finally: fh.close() return None
[ "def", "_ParseCronYaml", "(", ")", ":", "cronyaml_files", "=", "(", "'cron.yaml'", ",", "'cron.yml'", ")", "for", "cronyaml", "in", "cronyaml_files", ":", "try", ":", "fh", "=", "open", "(", "cronyaml", ",", "'r'", ")", "except", "IOError", ":", "continue"...
loads the cron .
train
false
31,765
def is_local_file(file_location): return (not re.match('[a-z]+://', file_location))
[ "def", "is_local_file", "(", "file_location", ")", ":", "return", "(", "not", "re", ".", "match", "(", "'[a-z]+://'", ",", "file_location", ")", ")" ]
check if file is in the local file system .
train
false
31,766
def idzr_rid(m, n, matveca, k): (idx, proj) = _id.idzr_rid(m, n, matveca, k) proj = proj[:(k * (n - k))].reshape((k, (n - k)), order='F') return (idx, proj)
[ "def", "idzr_rid", "(", "m", ",", "n", ",", "matveca", ",", "k", ")", ":", "(", "idx", ",", "proj", ")", "=", "_id", ".", "idzr_rid", "(", "m", ",", "n", ",", "matveca", ",", "k", ")", "proj", "=", "proj", "[", ":", "(", "k", "*", "(", "n...
compute id of a complex matrix to a specified rank using random matrix-vector multiplication .
train
false
31,767
def whitespace_around_operator(logical_line): for match in OPERATOR_REGEX.finditer(logical_line): (before, after) = match.groups() if (' DCTB ' in before): (yield (match.start(1), 'E223 tab before operator')) elif (len(before) > 1): (yield (match.start(1), 'E221 multiple spaces before operator')) if (' DCTB ' in after): (yield (match.start(2), 'E224 tab after operator')) elif (len(after) > 1): (yield (match.start(2), 'E222 multiple spaces after operator'))
[ "def", "whitespace_around_operator", "(", "logical_line", ")", ":", "for", "match", "in", "OPERATOR_REGEX", ".", "finditer", "(", "logical_line", ")", ":", "(", "before", ",", "after", ")", "=", "match", ".", "groups", "(", ")", "if", "(", "' DCTB '", "in"...
avoid extraneous whitespace around an operator .
train
true