id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
53,222
def numToDottedQuad(num): import socket, struct if ((num > long(4294967295)) or (num < 0)): raise ValueError(('Not a good numeric IP: %s' % num)) try: return socket.inet_ntoa(struct.pack('!L', long(num))) except (socket.error, struct.error, OverflowError): raise ValueError(('Not a good numeric IP: %s' % num))
[ "def", "numToDottedQuad", "(", "num", ")", ":", "import", "socket", ",", "struct", "if", "(", "(", "num", ">", "long", "(", "4294967295", ")", ")", "or", "(", "num", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "(", "'Not a good numeric IP: %s'", "%", "num", ")", ")", "try", ":", "return", "socket", ".", "inet_ntoa", "(", "struct", ".", "pack", "(", "'!L'", ",", "long", "(", "num", ")", ")", ")", "except", "(", "socket", ".", "error", ",", "struct", ".", "error", ",", "OverflowError", ")", ":", "raise", "ValueError", "(", "(", "'Not a good numeric IP: %s'", "%", "num", ")", ")" ]
convert int or long int to dotted quad string .
train
true
53,223
def delete_record(zone_id, record_id, profile): conn = _get_driver(profile=profile) record = conn.get_record(zone_id=zone_id, record_id=record_id) return conn.delete_record(record)
[ "def", "delete_record", "(", "zone_id", ",", "record_id", ",", "profile", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "record", "=", "conn", ".", "get_record", "(", "zone_id", "=", "zone_id", ",", "record_id", "=", "record_id", ")", "return", "conn", ".", "delete_record", "(", "record", ")" ]
delete a record .
train
true
53,224
def queues(request): return render('queues.html', request, {'queuelist': request.jt.queues()})
[ "def", "queues", "(", "request", ")", ":", "return", "render", "(", "'queues.html'", ",", "request", ",", "{", "'queuelist'", ":", "request", ".", "jt", ".", "queues", "(", ")", "}", ")" ]
we get here from /queues .
train
false
53,226
def json_objs_equal(left, right): return (_ordered(left) == _ordered(right))
[ "def", "json_objs_equal", "(", "left", ",", "right", ")", ":", "return", "(", "_ordered", "(", "left", ")", "==", "_ordered", "(", "right", ")", ")" ]
compare two parsed json objects .
train
false
53,227
def restart_program(): logging.info('Scheduled restart request') sabnzbd.TRIGGER_RESTART = True
[ "def", "restart_program", "(", ")", ":", "logging", ".", "info", "(", "'Scheduled restart request'", ")", "sabnzbd", ".", "TRIGGER_RESTART", "=", "True" ]
restart program .
train
false
53,229
def process_failed(dirName, nzbName, result): if sickbeard.USE_FAILED_DOWNLOADS: processor = None try: processor = failedProcessor.FailedProcessor(dirName, nzbName) result.result = processor.process() process_fail_message = u'' except FailedPostProcessingFailedException as e: result.result = False process_fail_message = ex(e) if processor: result.output += processor.log if (sickbeard.DELETE_FAILED and result.result): if delete_folder(dirName, check_empty=False): result.output += logHelper(u'Deleted folder: {0}'.format(dirName), logger.DEBUG) if result.result: result.output += logHelper(u'Failed Download Processing succeeded: ({0}, {1})'.format(nzbName, dirName)) else: result.output += logHelper(u'Failed Download Processing failed: ({0}, {1}): {2}'.format(nzbName, dirName, process_fail_message), logger.WARNING)
[ "def", "process_failed", "(", "dirName", ",", "nzbName", ",", "result", ")", ":", "if", "sickbeard", ".", "USE_FAILED_DOWNLOADS", ":", "processor", "=", "None", "try", ":", "processor", "=", "failedProcessor", ".", "FailedProcessor", "(", "dirName", ",", "nzbName", ")", "result", ".", "result", "=", "processor", ".", "process", "(", ")", "process_fail_message", "=", "u''", "except", "FailedPostProcessingFailedException", "as", "e", ":", "result", ".", "result", "=", "False", "process_fail_message", "=", "ex", "(", "e", ")", "if", "processor", ":", "result", ".", "output", "+=", "processor", ".", "log", "if", "(", "sickbeard", ".", "DELETE_FAILED", "and", "result", ".", "result", ")", ":", "if", "delete_folder", "(", "dirName", ",", "check_empty", "=", "False", ")", ":", "result", ".", "output", "+=", "logHelper", "(", "u'Deleted folder: {0}'", ".", "format", "(", "dirName", ")", ",", "logger", ".", "DEBUG", ")", "if", "result", ".", "result", ":", "result", ".", "output", "+=", "logHelper", "(", "u'Failed Download Processing succeeded: ({0}, {1})'", ".", "format", "(", "nzbName", ",", "dirName", ")", ")", "else", ":", "result", ".", "output", "+=", "logHelper", "(", "u'Failed Download Processing failed: ({0}, {1}): {2}'", ".", "format", "(", "nzbName", ",", "dirName", ",", "process_fail_message", ")", ",", "logger", ".", "WARNING", ")" ]
process a download that did not complete correctly .
train
false
53,230
@decorator.decorator def apply_to_mask(f, clip, *a, **k): newclip = f(clip, *a, **k) if (hasattr(newclip, 'mask') and (newclip.mask is not None)): newclip.mask = f(newclip.mask, *a, **k) return newclip
[ "@", "decorator", ".", "decorator", "def", "apply_to_mask", "(", "f", ",", "clip", ",", "*", "a", ",", "**", "k", ")", ":", "newclip", "=", "f", "(", "clip", ",", "*", "a", ",", "**", "k", ")", "if", "(", "hasattr", "(", "newclip", ",", "'mask'", ")", "and", "(", "newclip", ".", "mask", "is", "not", "None", ")", ")", ":", "newclip", ".", "mask", "=", "f", "(", "newclip", ".", "mask", ",", "*", "a", ",", "**", "k", ")", "return", "newclip" ]
this decorator will apply the same function f to the mask of the clip created with f .
train
false
53,231
def whoisCallersCaller(): import inspect frameObj = inspect.stack()[2][0] return inspect.getframeinfo(frameObj)
[ "def", "whoisCallersCaller", "(", ")", ":", "import", "inspect", "frameObj", "=", "inspect", ".", "stack", "(", ")", "[", "2", "]", "[", "0", "]", "return", "inspect", ".", "getframeinfo", "(", "frameObj", ")" ]
returns: traceback namedtuple for our callers caller .
train
false
53,234
def get_feedback(score, sequence): global default_feedback if (len(sequence) == 0): return default_feedback if (score > 2): return dict({u'warning': u'', u'suggestions': []}) longest_match = max(sequence, key=(lambda x: len(x[u'token']))) feedback = get_match_feedback(longest_match, (len(sequence) == 1)) if (not feedback): feedback = {u'warning': u'', u'suggestions': [_(u'Better add a few more letters or another word')]} return feedback
[ "def", "get_feedback", "(", "score", ",", "sequence", ")", ":", "global", "default_feedback", "if", "(", "len", "(", "sequence", ")", "==", "0", ")", ":", "return", "default_feedback", "if", "(", "score", ">", "2", ")", ":", "return", "dict", "(", "{", "u'warning'", ":", "u''", ",", "u'suggestions'", ":", "[", "]", "}", ")", "longest_match", "=", "max", "(", "sequence", ",", "key", "=", "(", "lambda", "x", ":", "len", "(", "x", "[", "u'token'", "]", ")", ")", ")", "feedback", "=", "get_match_feedback", "(", "longest_match", ",", "(", "len", "(", "sequence", ")", "==", "1", ")", ")", "if", "(", "not", "feedback", ")", ":", "feedback", "=", "{", "u'warning'", ":", "u''", ",", "u'suggestions'", ":", "[", "_", "(", "u'Better add a few more letters or another word'", ")", "]", "}", "return", "feedback" ]
returns the feedback dictionary consisting of for the given sequences .
train
false
53,235
@click.command(u'bulk-rename') @click.argument(u'doctype') @click.argument(u'path') @pass_context def _bulk_rename(context, doctype, path): from frappe.model.rename_doc import bulk_rename from frappe.utils.csvutils import read_csv_content site = get_site(context) with open(path, u'r') as csvfile: rows = read_csv_content(csvfile.read()) frappe.init(site=site) frappe.connect() bulk_rename(doctype, rows, via_console=True) frappe.destroy()
[ "@", "click", ".", "command", "(", "u'bulk-rename'", ")", "@", "click", ".", "argument", "(", "u'doctype'", ")", "@", "click", ".", "argument", "(", "u'path'", ")", "@", "pass_context", "def", "_bulk_rename", "(", "context", ",", "doctype", ",", "path", ")", ":", "from", "frappe", ".", "model", ".", "rename_doc", "import", "bulk_rename", "from", "frappe", ".", "utils", ".", "csvutils", "import", "read_csv_content", "site", "=", "get_site", "(", "context", ")", "with", "open", "(", "path", ",", "u'r'", ")", "as", "csvfile", ":", "rows", "=", "read_csv_content", "(", "csvfile", ".", "read", "(", ")", ")", "frappe", ".", "init", "(", "site", "=", "site", ")", "frappe", ".", "connect", "(", ")", "bulk_rename", "(", "doctype", ",", "rows", ",", "via_console", "=", "True", ")", "frappe", ".", "destroy", "(", ")" ]
rename multiple records via csv file .
train
false
53,236
def mocked_get_release_by_id(id_, includes=[], release_status=[], release_type=[]): releases = {ImportMusicBrainzIdTest.ID_RELEASE_0: ('VALID_RELEASE_0', 'TAG ARTIST'), ImportMusicBrainzIdTest.ID_RELEASE_1: ('VALID_RELEASE_1', 'DISTANT_MATCH')} return {'release': {'title': releases[id_][0], 'id': id_, 'medium-list': [{'track-list': [{'recording': {'title': 'foo', 'id': 'bar', 'length': 59}, 'position': 9, 'number': 'A2'}], 'position': 5}], 'artist-credit': [{'artist': {'name': releases[id_][1], 'id': 'some-id'}}], 'release-group': {'id': 'another-id'}}}
[ "def", "mocked_get_release_by_id", "(", "id_", ",", "includes", "=", "[", "]", ",", "release_status", "=", "[", "]", ",", "release_type", "=", "[", "]", ")", ":", "releases", "=", "{", "ImportMusicBrainzIdTest", ".", "ID_RELEASE_0", ":", "(", "'VALID_RELEASE_0'", ",", "'TAG ARTIST'", ")", ",", "ImportMusicBrainzIdTest", ".", "ID_RELEASE_1", ":", "(", "'VALID_RELEASE_1'", ",", "'DISTANT_MATCH'", ")", "}", "return", "{", "'release'", ":", "{", "'title'", ":", "releases", "[", "id_", "]", "[", "0", "]", ",", "'id'", ":", "id_", ",", "'medium-list'", ":", "[", "{", "'track-list'", ":", "[", "{", "'recording'", ":", "{", "'title'", ":", "'foo'", ",", "'id'", ":", "'bar'", ",", "'length'", ":", "59", "}", ",", "'position'", ":", "9", ",", "'number'", ":", "'A2'", "}", "]", ",", "'position'", ":", "5", "}", "]", ",", "'artist-credit'", ":", "[", "{", "'artist'", ":", "{", "'name'", ":", "releases", "[", "id_", "]", "[", "1", "]", ",", "'id'", ":", "'some-id'", "}", "}", "]", ",", "'release-group'", ":", "{", "'id'", ":", "'another-id'", "}", "}", "}" ]
mimic musicbrainzngs .
train
false
53,237
def test_finder_only_installs_stable_releases(data): req = InstallRequirement.from_line('bar', None) finder = PackageFinder([], [data.index_url('pre')], session=PipSession()) link = finder.find_requirement(req, False) assert link.url.endswith('bar-1.0.tar.gz'), link.url links = ['https://foo/bar-1.0.tar.gz', 'https://foo/bar-2.0b1.tar.gz'] finder = PackageFinder(links, [], session=PipSession()) with patch.object(finder, '_get_pages', (lambda x, y: [])): link = finder.find_requirement(req, False) assert (link.url == 'https://foo/bar-1.0.tar.gz') links.reverse() finder = PackageFinder(links, [], session=PipSession()) with patch.object(finder, '_get_pages', (lambda x, y: [])): link = finder.find_requirement(req, False) assert (link.url == 'https://foo/bar-1.0.tar.gz')
[ "def", "test_finder_only_installs_stable_releases", "(", "data", ")", ":", "req", "=", "InstallRequirement", ".", "from_line", "(", "'bar'", ",", "None", ")", "finder", "=", "PackageFinder", "(", "[", "]", ",", "[", "data", ".", "index_url", "(", "'pre'", ")", "]", ",", "session", "=", "PipSession", "(", ")", ")", "link", "=", "finder", ".", "find_requirement", "(", "req", ",", "False", ")", "assert", "link", ".", "url", ".", "endswith", "(", "'bar-1.0.tar.gz'", ")", ",", "link", ".", "url", "links", "=", "[", "'https://foo/bar-1.0.tar.gz'", ",", "'https://foo/bar-2.0b1.tar.gz'", "]", "finder", "=", "PackageFinder", "(", "links", ",", "[", "]", ",", "session", "=", "PipSession", "(", ")", ")", "with", "patch", ".", "object", "(", "finder", ",", "'_get_pages'", ",", "(", "lambda", "x", ",", "y", ":", "[", "]", ")", ")", ":", "link", "=", "finder", ".", "find_requirement", "(", "req", ",", "False", ")", "assert", "(", "link", ".", "url", "==", "'https://foo/bar-1.0.tar.gz'", ")", "links", ".", "reverse", "(", ")", "finder", "=", "PackageFinder", "(", "links", ",", "[", "]", ",", "session", "=", "PipSession", "(", ")", ")", "with", "patch", ".", "object", "(", "finder", ",", "'_get_pages'", ",", "(", "lambda", "x", ",", "y", ":", "[", "]", ")", ")", ":", "link", "=", "finder", ".", "find_requirement", "(", "req", ",", "False", ")", "assert", "(", "link", ".", "url", "==", "'https://foo/bar-1.0.tar.gz'", ")" ]
test packagefinder only accepts stable versioned releases by default .
train
false
53,238
@must_have_permission('write') @must_have_addon(SHORT_NAME, 'user') @must_have_addon(SHORT_NAME, 'node') def dataverse_get_datasets(node_addon, **kwargs): alias = request.json.get('alias') connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, alias) datasets = client.get_datasets(dataverse) ret = {'alias': alias, 'datasets': [{'title': dataset.title, 'doi': dataset.doi} for dataset in datasets]} return (ret, http.OK)
[ "@", "must_have_permission", "(", "'write'", ")", "@", "must_have_addon", "(", "SHORT_NAME", ",", "'user'", ")", "@", "must_have_addon", "(", "SHORT_NAME", ",", "'node'", ")", "def", "dataverse_get_datasets", "(", "node_addon", ",", "**", "kwargs", ")", ":", "alias", "=", "request", ".", "json", ".", "get", "(", "'alias'", ")", "connection", "=", "client", ".", "connect_from_settings", "(", "node_addon", ")", "dataverse", "=", "client", ".", "get_dataverse", "(", "connection", ",", "alias", ")", "datasets", "=", "client", ".", "get_datasets", "(", "dataverse", ")", "ret", "=", "{", "'alias'", ":", "alias", ",", "'datasets'", ":", "[", "{", "'title'", ":", "dataset", ".", "title", ",", "'doi'", ":", "dataset", ".", "doi", "}", "for", "dataset", "in", "datasets", "]", "}", "return", "(", "ret", ",", "http", ".", "OK", ")" ]
get list of datasets from provided dataverse alias .
train
false
53,239
def _compute_row_norms(data): norms = np.sqrt(np.sum((data ** 2), axis=1)) norms[(norms == 0)] = 1.0 return norms
[ "def", "_compute_row_norms", "(", "data", ")", ":", "norms", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "data", "**", "2", ")", ",", "axis", "=", "1", ")", ")", "norms", "[", "(", "norms", "==", "0", ")", "]", "=", "1.0", "return", "norms" ]
compute scaling based on estimated norm .
train
false
53,240
def setval(key, val, destructive=False): return setvals({key: val}, destructive)
[ "def", "setval", "(", "key", ",", "val", ",", "destructive", "=", "False", ")", ":", "return", "setvals", "(", "{", "key", ":", "val", "}", ",", "destructive", ")" ]
set a grains value in the grains config file key the grain key to be set .
train
false
53,241
def sortNDHelperA(fitnesses, obj, front): if (len(fitnesses) < 2): return elif (len(fitnesses) == 2): (s1, s2) = (fitnesses[0], fitnesses[1]) if isDominated(s2[:(obj + 1)], s1[:(obj + 1)]): front[s2] = max(front[s2], (front[s1] + 1)) elif (obj == 1): sweepA(fitnesses, front) elif (len(frozenset(map(itemgetter(obj), fitnesses))) == 1): sortNDHelperA(fitnesses, (obj - 1), front) else: (best, worst) = splitA(fitnesses, obj) sortNDHelperA(best, obj, front) sortNDHelperB(best, worst, (obj - 1), front) sortNDHelperA(worst, obj, front)
[ "def", "sortNDHelperA", "(", "fitnesses", ",", "obj", ",", "front", ")", ":", "if", "(", "len", "(", "fitnesses", ")", "<", "2", ")", ":", "return", "elif", "(", "len", "(", "fitnesses", ")", "==", "2", ")", ":", "(", "s1", ",", "s2", ")", "=", "(", "fitnesses", "[", "0", "]", ",", "fitnesses", "[", "1", "]", ")", "if", "isDominated", "(", "s2", "[", ":", "(", "obj", "+", "1", ")", "]", ",", "s1", "[", ":", "(", "obj", "+", "1", ")", "]", ")", ":", "front", "[", "s2", "]", "=", "max", "(", "front", "[", "s2", "]", ",", "(", "front", "[", "s1", "]", "+", "1", ")", ")", "elif", "(", "obj", "==", "1", ")", ":", "sweepA", "(", "fitnesses", ",", "front", ")", "elif", "(", "len", "(", "frozenset", "(", "map", "(", "itemgetter", "(", "obj", ")", ",", "fitnesses", ")", ")", ")", "==", "1", ")", ":", "sortNDHelperA", "(", "fitnesses", ",", "(", "obj", "-", "1", ")", ",", "front", ")", "else", ":", "(", "best", ",", "worst", ")", "=", "splitA", "(", "fitnesses", ",", "obj", ")", "sortNDHelperA", "(", "best", ",", "obj", ",", "front", ")", "sortNDHelperB", "(", "best", ",", "worst", ",", "(", "obj", "-", "1", ")", ",", "front", ")", "sortNDHelperA", "(", "worst", ",", "obj", ",", "front", ")" ]
create a non-dominated sorting of s on the first m objectives .
train
false
53,242
def cxESBlend(ind1, ind2, alpha): for (i, (x1, s1, x2, s2)) in enumerate(zip(ind1, ind1.strategy, ind2, ind2.strategy)): gamma = (((1.0 + (2.0 * alpha)) * random.random()) - alpha) ind1[i] = (((1.0 - gamma) * x1) + (gamma * x2)) ind2[i] = ((gamma * x1) + ((1.0 - gamma) * x2)) gamma = (((1.0 + (2.0 * alpha)) * random.random()) - alpha) ind1.strategy[i] = (((1.0 - gamma) * s1) + (gamma * s2)) ind2.strategy[i] = ((gamma * s1) + ((1.0 - gamma) * s2)) return (ind1, ind2)
[ "def", "cxESBlend", "(", "ind1", ",", "ind2", ",", "alpha", ")", ":", "for", "(", "i", ",", "(", "x1", ",", "s1", ",", "x2", ",", "s2", ")", ")", "in", "enumerate", "(", "zip", "(", "ind1", ",", "ind1", ".", "strategy", ",", "ind2", ",", "ind2", ".", "strategy", ")", ")", ":", "gamma", "=", "(", "(", "(", "1.0", "+", "(", "2.0", "*", "alpha", ")", ")", "*", "random", ".", "random", "(", ")", ")", "-", "alpha", ")", "ind1", "[", "i", "]", "=", "(", "(", "(", "1.0", "-", "gamma", ")", "*", "x1", ")", "+", "(", "gamma", "*", "x2", ")", ")", "ind2", "[", "i", "]", "=", "(", "(", "gamma", "*", "x1", ")", "+", "(", "(", "1.0", "-", "gamma", ")", "*", "x2", ")", ")", "gamma", "=", "(", "(", "(", "1.0", "+", "(", "2.0", "*", "alpha", ")", ")", "*", "random", ".", "random", "(", ")", ")", "-", "alpha", ")", "ind1", ".", "strategy", "[", "i", "]", "=", "(", "(", "(", "1.0", "-", "gamma", ")", "*", "s1", ")", "+", "(", "gamma", "*", "s2", ")", ")", "ind2", ".", "strategy", "[", "i", "]", "=", "(", "(", "gamma", "*", "s1", ")", "+", "(", "(", "1.0", "-", "gamma", ")", "*", "s2", ")", ")", "return", "(", "ind1", ",", "ind2", ")" ]
executes a blend crossover on both .
train
false
53,243
def validate_encoding_and_error_handler(setting, value, option_parser, config_parser=None, config_section=None): if (':' in value): (encoding, handler) = value.split(':') validate_encoding_error_handler((setting + '_error_handler'), handler, option_parser, config_parser, config_section) if config_parser: config_parser.set(config_section, (setting + '_error_handler'), handler) else: setattr(option_parser.values, (setting + '_error_handler'), handler) else: encoding = value validate_encoding(setting, encoding, option_parser, config_parser, config_section) return encoding
[ "def", "validate_encoding_and_error_handler", "(", "setting", ",", "value", ",", "option_parser", ",", "config_parser", "=", "None", ",", "config_section", "=", "None", ")", ":", "if", "(", "':'", "in", "value", ")", ":", "(", "encoding", ",", "handler", ")", "=", "value", ".", "split", "(", "':'", ")", "validate_encoding_error_handler", "(", "(", "setting", "+", "'_error_handler'", ")", ",", "handler", ",", "option_parser", ",", "config_parser", ",", "config_section", ")", "if", "config_parser", ":", "config_parser", ".", "set", "(", "config_section", ",", "(", "setting", "+", "'_error_handler'", ")", ",", "handler", ")", "else", ":", "setattr", "(", "option_parser", ".", "values", ",", "(", "setting", "+", "'_error_handler'", ")", ",", "handler", ")", "else", ":", "encoding", "=", "value", "validate_encoding", "(", "setting", ",", "encoding", ",", "option_parser", ",", "config_parser", ",", "config_section", ")", "return", "encoding" ]
side-effect: if an error handler is included in the value .
train
false
53,244
def extract_public_key(args): sk = _load_key(args) vk = sk.get_verifying_key() args.public_keyfile.write(vk.to_string()) print ('%s public key extracted to %s' % (args.keyfile.name, args.public_keyfile.name))
[ "def", "extract_public_key", "(", "args", ")", ":", "sk", "=", "_load_key", "(", "args", ")", "vk", "=", "sk", ".", "get_verifying_key", "(", ")", "args", ".", "public_keyfile", ".", "write", "(", "vk", ".", "to_string", "(", ")", ")", "print", "(", "'%s public key extracted to %s'", "%", "(", "args", ".", "keyfile", ".", "name", ",", "args", ".", "public_keyfile", ".", "name", ")", ")" ]
load an ecdsa private key and extract the embedded public key as raw binary data .
train
true
53,245
def requires_reload(action, plugins): return any((p.get_plugin_class_instance().requires_reload(action) for p in plugins))
[ "def", "requires_reload", "(", "action", ",", "plugins", ")", ":", "return", "any", "(", "(", "p", ".", "get_plugin_class_instance", "(", ")", ".", "requires_reload", "(", "action", ")", "for", "p", "in", "plugins", ")", ")" ]
returns true if any of the plugins require a page reload when action is taking place .
train
false
53,246
def is_jinja_expression(value): if ((not value) or (not isinstance(value, six.string_types))): return False for marker in JINJA_EXPRESSIONS_START_MARKERS: if (marker in value): return True return False
[ "def", "is_jinja_expression", "(", "value", ")", ":", "if", "(", "(", "not", "value", ")", "or", "(", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ")", ")", ":", "return", "False", "for", "marker", "in", "JINJA_EXPRESSIONS_START_MARKERS", ":", "if", "(", "marker", "in", "value", ")", ":", "return", "True", "return", "False" ]
function which very simplisticly detect if the provided value contains or is a jinja expression .
train
false
53,248
@preloaderStop def failureMessage(message): printLine(message, '\n', sys.stderr)
[ "@", "preloaderStop", "def", "failureMessage", "(", "message", ")", ":", "printLine", "(", "message", ",", "'\\n'", ",", "sys", ".", "stderr", ")" ]
displaying a message .
train
false
53,249
@require_POST @login_required def watch_document(request, document_slug): document = get_object_or_404(Document, locale=request.LANGUAGE_CODE, slug=document_slug) EditDocumentEvent.notify(request.user, document) statsd.incr('wiki.watches.document') return HttpResponseRedirect(document.get_absolute_url())
[ "@", "require_POST", "@", "login_required", "def", "watch_document", "(", "request", ",", "document_slug", ")", ":", "document", "=", "get_object_or_404", "(", "Document", ",", "locale", "=", "request", ".", "LANGUAGE_CODE", ",", "slug", "=", "document_slug", ")", "EditDocumentEvent", ".", "notify", "(", "request", ".", "user", ",", "document", ")", "statsd", ".", "incr", "(", "'wiki.watches.document'", ")", "return", "HttpResponseRedirect", "(", "document", ".", "get_absolute_url", "(", ")", ")" ]
start watching a document for edits .
train
false
53,251
@auth.errorhandler(429) def login_rate_limit_error(error): return render_template('errors/too_many_logins.html', timeout=error.description)
[ "@", "auth", ".", "errorhandler", "(", "429", ")", "def", "login_rate_limit_error", "(", "error", ")", ":", "return", "render_template", "(", "'errors/too_many_logins.html'", ",", "timeout", "=", "error", ".", "description", ")" ]
register a custom error handler for a too many requests error .
train
false
53,252
def migration_get_in_progress_by_host_and_node(context, host, node): return IMPL.migration_get_in_progress_by_host_and_node(context, host, node)
[ "def", "migration_get_in_progress_by_host_and_node", "(", "context", ",", "host", ",", "node", ")", ":", "return", "IMPL", ".", "migration_get_in_progress_by_host_and_node", "(", "context", ",", "host", ",", "node", ")" ]
finds all migrations for the given host + node that are not yet confirmed or reverted .
train
false
53,253
def _check_throttles_decorator(func): @wraps(func) def _decorated(*args, **kwargs): if RateLimitConfiguration.current().enabled: return func(*args, **kwargs) else: msg = 'Rate limiting is disabled because `RateLimitConfiguration` is not enabled.' LOGGER.info(msg) return return _decorated
[ "def", "_check_throttles_decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_decorated", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "RateLimitConfiguration", ".", "current", "(", ")", ".", "enabled", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "msg", "=", "'Rate limiting is disabled because `RateLimitConfiguration` is not enabled.'", "LOGGER", ".", "info", "(", "msg", ")", "return", "return", "_decorated" ]
decorator for apiview .
train
false
53,255
def raise_config_error(key, dummy): raise ConfigurationError(('Unknown option %s' % (key,)))
[ "def", "raise_config_error", "(", "key", ",", "dummy", ")", ":", "raise", "ConfigurationError", "(", "(", "'Unknown option %s'", "%", "(", "key", ",", ")", ")", ")" ]
raise configurationerror with the given key name .
train
false
53,256
def read_has_prefix(path): ParseResult = namedtuple(u'ParseResult', (u'placeholder', u'filemode', u'filepath')) def parse_line(line): parts = tuple((x.strip(u'"\'') for x in shlex.split(line, posix=False))) if (len(parts) == 1): return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0]) elif (len(parts) == 3): return ParseResult(parts[0], FileMode(parts[1]), parts[2]) else: raise RuntimeError((u'Invalid has_prefix file at path: %s' % path)) parsed_lines = (parse_line(line) for line in yield_lines(path)) return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
[ "def", "read_has_prefix", "(", "path", ")", ":", "ParseResult", "=", "namedtuple", "(", "u'ParseResult'", ",", "(", "u'placeholder'", ",", "u'filemode'", ",", "u'filepath'", ")", ")", "def", "parse_line", "(", "line", ")", ":", "parts", "=", "tuple", "(", "(", "x", ".", "strip", "(", "u'\"\\''", ")", "for", "x", "in", "shlex", ".", "split", "(", "line", ",", "posix", "=", "False", ")", ")", ")", "if", "(", "len", "(", "parts", ")", "==", "1", ")", ":", "return", "ParseResult", "(", "PREFIX_PLACEHOLDER", ",", "FileMode", ".", "text", ",", "parts", "[", "0", "]", ")", "elif", "(", "len", "(", "parts", ")", "==", "3", ")", ":", "return", "ParseResult", "(", "parts", "[", "0", "]", ",", "FileMode", "(", "parts", "[", "1", "]", ")", ",", "parts", "[", "2", "]", ")", "else", ":", "raise", "RuntimeError", "(", "(", "u'Invalid has_prefix file at path: %s'", "%", "path", ")", ")", "parsed_lines", "=", "(", "parse_line", "(", "line", ")", "for", "line", "in", "yield_lines", "(", "path", ")", ")", "return", "{", "pr", ".", "filepath", ":", "(", "pr", ".", "placeholder", ",", "pr", ".", "filemode", ")", "for", "pr", "in", "parsed_lines", "}" ]
reads has_prefix file and return dict mapping filepaths to tuples a line in has_prefix contains one of * filepath * placeholder mode filepath mode values are one of * text * binary .
train
false
53,258
@_np.deprecate(message='scipy.constants.K2F is deprecated in scipy 0.18.0. Use scipy.constants.convert_temperature instead. Note that the new function has a different signature.') def K2F(K): return C2F(K2C(_np.asanyarray(K)))
[ "@", "_np", ".", "deprecate", "(", "message", "=", "'scipy.constants.K2F is deprecated in scipy 0.18.0. Use scipy.constants.convert_temperature instead. Note that the new function has a different signature.'", ")", "def", "K2F", "(", "K", ")", ":", "return", "C2F", "(", "K2C", "(", "_np", ".", "asanyarray", "(", "K", ")", ")", ")" ]
convert kelvin to fahrenheit parameters k : array_like kelvin temperature(s) to be converted .
train
false
53,260
def decode_terminated(data, encoding, strict=True): codec_info = codecs.lookup(encoding) encoding = codec_info.name if (encoding in ('utf-8', 'iso8859-1')): index = data.find('\x00') if (index == (-1)): res = (data.decode(encoding), '') if strict: raise ValueError('not null terminated') else: return res return (data[:index].decode(encoding), data[(index + 1):]) decoder = codec_info.incrementaldecoder() r = [] for (i, b) in enumerate(iterbytes(data)): c = decoder.decode(b) if (c == u'\x00'): return (u''.join(r), data[(i + 1):]) r.append(c) else: r.append(decoder.decode('', True)) if strict: raise ValueError('not null terminated') return (u''.join(r), '')
[ "def", "decode_terminated", "(", "data", ",", "encoding", ",", "strict", "=", "True", ")", ":", "codec_info", "=", "codecs", ".", "lookup", "(", "encoding", ")", "encoding", "=", "codec_info", ".", "name", "if", "(", "encoding", "in", "(", "'utf-8'", ",", "'iso8859-1'", ")", ")", ":", "index", "=", "data", ".", "find", "(", "'\\x00'", ")", "if", "(", "index", "==", "(", "-", "1", ")", ")", ":", "res", "=", "(", "data", ".", "decode", "(", "encoding", ")", ",", "''", ")", "if", "strict", ":", "raise", "ValueError", "(", "'not null terminated'", ")", "else", ":", "return", "res", "return", "(", "data", "[", ":", "index", "]", ".", "decode", "(", "encoding", ")", ",", "data", "[", "(", "index", "+", "1", ")", ":", "]", ")", "decoder", "=", "codec_info", ".", "incrementaldecoder", "(", ")", "r", "=", "[", "]", "for", "(", "i", ",", "b", ")", "in", "enumerate", "(", "iterbytes", "(", "data", ")", ")", ":", "c", "=", "decoder", ".", "decode", "(", "b", ")", "if", "(", "c", "==", "u'\\x00'", ")", ":", "return", "(", "u''", ".", "join", "(", "r", ")", ",", "data", "[", "(", "i", "+", "1", ")", ":", "]", ")", "r", ".", "append", "(", "c", ")", "else", ":", "r", ".", "append", "(", "decoder", ".", "decode", "(", "''", ",", "True", ")", ")", "if", "strict", ":", "raise", "ValueError", "(", "'not null terminated'", ")", "return", "(", "u''", ".", "join", "(", "r", ")", ",", "''", ")" ]
returns the decoded data until the first null terminator and all data after it .
train
true
53,261
def rfc822_escape(header): lines = header.split('\n') sep = ('\n' + (8 * ' ')) return sep.join(lines)
[ "def", "rfc822_escape", "(", "header", ")", ":", "lines", "=", "header", ".", "split", "(", "'\\n'", ")", "sep", "=", "(", "'\\n'", "+", "(", "8", "*", "' '", ")", ")", "return", "sep", ".", "join", "(", "lines", ")" ]
return a version of the string escaped for inclusion in an rfc-822 header .
train
false
53,263
def wrap_file(environ, file, buffer_size=8192): return environ.get('wsgi.file_wrapper', FileWrapper)(file, buffer_size)
[ "def", "wrap_file", "(", "environ", ",", "file", ",", "buffer_size", "=", "8192", ")", ":", "return", "environ", ".", "get", "(", "'wsgi.file_wrapper'", ",", "FileWrapper", ")", "(", "file", ",", "buffer_size", ")" ]
wraps a file .
train
false
53,264
def _encode_float(name, value, dummy0, dummy1): return (('\x01' + name) + _PACK_FLOAT(value))
[ "def", "_encode_float", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "return", "(", "(", "'\\x01'", "+", "name", ")", "+", "_PACK_FLOAT", "(", "value", ")", ")" ]
encode a float .
train
false
53,266
def _set_controls(coordinator, root, namespace): namespaces = {'n': namespace} controls_list = root.xpath('n:controls', namespaces=namespaces) if controls_list: controls = controls_list[0] concurrency = controls.xpath('n:concurrency', namespaces=namespaces) timeout = controls.xpath('n:timeout', namespaces=namespaces) execution = controls.xpath('n:execution', namespaces=namespaces) throttle = controls.xpath('n:throttle', namespaces=namespaces) if concurrency: coordinator.concurrency = concurrency[0].text if timeout: coordinator.timeout = timeout[0].text if execution: coordinator.execution = execution[0].text if throttle: coordinator.throttle = throttle[0].text
[ "def", "_set_controls", "(", "coordinator", ",", "root", ",", "namespace", ")", ":", "namespaces", "=", "{", "'n'", ":", "namespace", "}", "controls_list", "=", "root", ".", "xpath", "(", "'n:controls'", ",", "namespaces", "=", "namespaces", ")", "if", "controls_list", ":", "controls", "=", "controls_list", "[", "0", "]", "concurrency", "=", "controls", ".", "xpath", "(", "'n:concurrency'", ",", "namespaces", "=", "namespaces", ")", "timeout", "=", "controls", ".", "xpath", "(", "'n:timeout'", ",", "namespaces", "=", "namespaces", ")", "execution", "=", "controls", ".", "xpath", "(", "'n:execution'", ",", "namespaces", "=", "namespaces", ")", "throttle", "=", "controls", ".", "xpath", "(", "'n:throttle'", ",", "namespaces", "=", "namespaces", ")", "if", "concurrency", ":", "coordinator", ".", "concurrency", "=", "concurrency", "[", "0", "]", ".", "text", "if", "timeout", ":", "coordinator", ".", "timeout", "=", "timeout", "[", "0", "]", ".", "text", "if", "execution", ":", "coordinator", ".", "execution", "=", "execution", "[", "0", "]", ".", "text", "if", "throttle", ":", "coordinator", ".", "throttle", "=", "throttle", "[", "0", "]", ".", "text" ]
get controls from coordinator xml set properties on coordinator with controls from xml etree root .
train
false
53,267
def loaded_module_info(module_name): l_raw = utils.system_output('/sbin/lsmod') return parse_lsmod_for_module(l_raw, module_name)
[ "def", "loaded_module_info", "(", "module_name", ")", ":", "l_raw", "=", "utils", ".", "system_output", "(", "'/sbin/lsmod'", ")", "return", "parse_lsmod_for_module", "(", "l_raw", ",", "module_name", ")" ]
get loaded module details: size and submodules .
train
false
53,268
def test_keyword(): defs = Script('print').goto_definitions() if is_py3: assert [d.doc for d in defs] else: assert (defs == []) assert (Script('import').goto_assignments() == []) completions = Script('import', 1, 1).completions() assert ((len(completions) > 10) and ('if' in [c.name for c in completions])) assert (Script('assert').goto_definitions() == [])
[ "def", "test_keyword", "(", ")", ":", "defs", "=", "Script", "(", "'print'", ")", ".", "goto_definitions", "(", ")", "if", "is_py3", ":", "assert", "[", "d", ".", "doc", "for", "d", "in", "defs", "]", "else", ":", "assert", "(", "defs", "==", "[", "]", ")", "assert", "(", "Script", "(", "'import'", ")", ".", "goto_assignments", "(", ")", "==", "[", "]", ")", "completions", "=", "Script", "(", "'import'", ",", "1", ",", "1", ")", ".", "completions", "(", ")", "assert", "(", "(", "len", "(", "completions", ")", ">", "10", ")", "and", "(", "'if'", "in", "[", "c", ".", "name", "for", "c", "in", "completions", "]", ")", ")", "assert", "(", "Script", "(", "'assert'", ")", ".", "goto_definitions", "(", ")", "==", "[", "]", ")" ]
github jedi-vim issue #44 .
train
false
53,269
def _get_sequence_list(): from django.db import models apps = models.get_apps() sequence_list = [] for app in apps: for model in models.get_models(app): for f in model._meta.fields: if isinstance(f, models.AutoField): sequence_list.append({'table': model._meta.db_table, 'column': f.column}) break for f in model._meta.many_to_many: sequence_list.append({'table': f.m2m_db_table(), 'column': None}) return sequence_list
[ "def", "_get_sequence_list", "(", ")", ":", "from", "django", ".", "db", "import", "models", "apps", "=", "models", ".", "get_apps", "(", ")", "sequence_list", "=", "[", "]", "for", "app", "in", "apps", ":", "for", "model", "in", "models", ".", "get_models", "(", "app", ")", ":", "for", "f", "in", "model", ".", "_meta", ".", "fields", ":", "if", "isinstance", "(", "f", ",", "models", ".", "AutoField", ")", ":", "sequence_list", ".", "append", "(", "{", "'table'", ":", "model", ".", "_meta", ".", "db_table", ",", "'column'", ":", "f", ".", "column", "}", ")", "break", "for", "f", "in", "model", ".", "_meta", ".", "many_to_many", ":", "sequence_list", ".", "append", "(", "{", "'table'", ":", "f", ".", "m2m_db_table", "(", ")", ",", "'column'", ":", "None", "}", ")", "return", "sequence_list" ]
returns a list of information about all db sequences for all models in all apps .
train
false
53,270
def _format_slug_for_request(slug): index = slug.find(TEMPLATE_TITLE_PREFIX) if (index != (-1)): slug = ('%s%s' % (TEMPLATE_TITLE_PREFIX, slug[(index + len(TEMPLATE_TITLE_PREFIX)):].lower())) return slug
[ "def", "_format_slug_for_request", "(", "slug", ")", ":", "index", "=", "slug", ".", "find", "(", "TEMPLATE_TITLE_PREFIX", ")", "if", "(", "index", "!=", "(", "-", "1", ")", ")", ":", "slug", "=", "(", "'%s%s'", "%", "(", "TEMPLATE_TITLE_PREFIX", ",", "slug", "[", "(", "index", "+", "len", "(", "TEMPLATE_TITLE_PREFIX", ")", ")", ":", "]", ".", "lower", "(", ")", ")", ")", "return", "slug" ]
formats a document slug which will play nice with kumascript caching .
train
false
53,271
def sdm_deg(f): return max((sdm_monomial_deg(M[0]) for M in f))
[ "def", "sdm_deg", "(", "f", ")", ":", "return", "max", "(", "(", "sdm_monomial_deg", "(", "M", "[", "0", "]", ")", "for", "M", "in", "f", ")", ")" ]
degree of f .
train
false
53,274
def _ssh_read_next_mpint(data): (mpint_data, rest) = _ssh_read_next_string(data) return (utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest)
[ "def", "_ssh_read_next_mpint", "(", "data", ")", ":", "(", "mpint_data", ",", "rest", ")", "=", "_ssh_read_next_string", "(", "data", ")", "return", "(", "utils", ".", "int_from_bytes", "(", "mpint_data", ",", "byteorder", "=", "'big'", ",", "signed", "=", "False", ")", ",", "rest", ")" ]
reads the next mpint from the data .
train
false
53,276
def getdraw(im=None, hints=None): handler = None if ((not hints) or ('nicest' in hints)): try: from . import _imagingagg as handler except ImportError: pass if (handler is None): from . import ImageDraw2 as handler if im: im = handler.Draw(im) return (im, handler)
[ "def", "getdraw", "(", "im", "=", "None", ",", "hints", "=", "None", ")", ":", "handler", "=", "None", "if", "(", "(", "not", "hints", ")", "or", "(", "'nicest'", "in", "hints", ")", ")", ":", "try", ":", "from", ".", "import", "_imagingagg", "as", "handler", "except", "ImportError", ":", "pass", "if", "(", "handler", "is", "None", ")", ":", "from", ".", "import", "ImageDraw2", "as", "handler", "if", "im", ":", "im", "=", "handler", ".", "Draw", "(", "im", ")", "return", "(", "im", ",", "handler", ")" ]
a more advanced 2d drawing interface for pil images .
train
false
53,277
def fix_arg(arg): if isinstance(arg, str): arg = _intrinsics.InvokeCommand.ExpandString(arg) elif isinstance(arg, PSObjectWrapper): arg = arg.data elif isinstance(arg, ShellOutput): arg = arg.data return arg
[ "def", "fix_arg", "(", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "str", ")", ":", "arg", "=", "_intrinsics", ".", "InvokeCommand", ".", "ExpandString", "(", "arg", ")", "elif", "isinstance", "(", "arg", ",", "PSObjectWrapper", ")", ":", "arg", "=", "arg", ".", "data", "elif", "isinstance", "(", "arg", ",", "ShellOutput", ")", ":", "arg", "=", "arg", ".", "data", "return", "arg" ]
utility function converts arg to type string .
train
false
53,278
def payload_from_raw(raw, linktype=1): ip = iplayer_from_raw(raw, linktype) try: return ip.data.data except: return ''
[ "def", "payload_from_raw", "(", "raw", ",", "linktype", "=", "1", ")", ":", "ip", "=", "iplayer_from_raw", "(", "raw", ",", "linktype", ")", "try", ":", "return", "ip", ".", "data", ".", "data", "except", ":", "return", "''" ]
get the payload from a packet .
train
false
53,279
def _turtle_docrevise(docstr): import re if (docstr is None): return None turtlename = _CFG['exampleturtle'] newdocstr = docstr.replace(('%s.' % turtlename), '') parexp = re.compile((' \\(.+ %s\\):' % turtlename)) newdocstr = parexp.sub(':', newdocstr) return newdocstr
[ "def", "_turtle_docrevise", "(", "docstr", ")", ":", "import", "re", "if", "(", "docstr", "is", "None", ")", ":", "return", "None", "turtlename", "=", "_CFG", "[", "'exampleturtle'", "]", "newdocstr", "=", "docstr", ".", "replace", "(", "(", "'%s.'", "%", "turtlename", ")", ",", "''", ")", "parexp", "=", "re", ".", "compile", "(", "(", "' \\\\(.+ %s\\\\):'", "%", "turtlename", ")", ")", "newdocstr", "=", "parexp", ".", "sub", "(", "':'", ",", "newdocstr", ")", "return", "newdocstr" ]
to reduce docstrings from rawturtle class for functions .
train
false
53,280
def EventHandler(source_restriction=False, auth_required=True, allow_client_access=False): def Decorator(f): 'Initialised Decorator.' @functools.wraps(f) def Decorated(self, msg): 'A decorator that assists in enforcing EventListener restrictions.' if (auth_required and (msg.auth_state != msg.AuthorizationState.AUTHENTICATED)): raise RuntimeError(('Message from %s not authenticated.' % msg.source)) if ((not allow_client_access) and msg.source and rdf_client.ClientURN.Validate(msg.source)): raise RuntimeError('Event does not support clients.') if source_restriction: source_check_method = getattr(self, 'CheckSource') if (not source_check_method): raise RuntimeError('CheckSource method not found.') if (not source_check_method(msg.source)): raise RuntimeError('Message source invalid.') stats.STATS.IncrementCounter('grr_worker_states_run') rdf_msg = rdf_flows.GrrMessage(msg) res = f(self, message=rdf_msg, event=rdf_msg.payload) return res return Decorated return Decorator
[ "def", "EventHandler", "(", "source_restriction", "=", "False", ",", "auth_required", "=", "True", ",", "allow_client_access", "=", "False", ")", ":", "def", "Decorator", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "Decorated", "(", "self", ",", "msg", ")", ":", "if", "(", "auth_required", "and", "(", "msg", ".", "auth_state", "!=", "msg", ".", "AuthorizationState", ".", "AUTHENTICATED", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'Message from %s not authenticated.'", "%", "msg", ".", "source", ")", ")", "if", "(", "(", "not", "allow_client_access", ")", "and", "msg", ".", "source", "and", "rdf_client", ".", "ClientURN", ".", "Validate", "(", "msg", ".", "source", ")", ")", ":", "raise", "RuntimeError", "(", "'Event does not support clients.'", ")", "if", "source_restriction", ":", "source_check_method", "=", "getattr", "(", "self", ",", "'CheckSource'", ")", "if", "(", "not", "source_check_method", ")", ":", "raise", "RuntimeError", "(", "'CheckSource method not found.'", ")", "if", "(", "not", "source_check_method", "(", "msg", ".", "source", ")", ")", ":", "raise", "RuntimeError", "(", "'Message source invalid.'", ")", "stats", ".", "STATS", ".", "IncrementCounter", "(", "'grr_worker_states_run'", ")", "rdf_msg", "=", "rdf_flows", ".", "GrrMessage", "(", "msg", ")", "res", "=", "f", "(", "self", ",", "message", "=", "rdf_msg", ",", "event", "=", "rdf_msg", ".", "payload", ")", "return", "res", "return", "Decorated", "return", "Decorator" ]
a convenience decorator for event handlers .
train
false
53,281
def _index_to_ticklabels(index): if isinstance(index, pd.MultiIndex): return ['-'.join(map(str, i)) for i in index.values] else: return index.values
[ "def", "_index_to_ticklabels", "(", "index", ")", ":", "if", "isinstance", "(", "index", ",", "pd", ".", "MultiIndex", ")", ":", "return", "[", "'-'", ".", "join", "(", "map", "(", "str", ",", "i", ")", ")", "for", "i", "in", "index", ".", "values", "]", "else", ":", "return", "index", ".", "values" ]
convert a pandas index or multiindex into ticklabels .
train
false
53,282
def filter_pad(val, width, fillchar=u'0'): return str(val).rjust(width, fillchar)
[ "def", "filter_pad", "(", "val", ",", "width", ",", "fillchar", "=", "u'0'", ")", ":", "return", "str", "(", "val", ")", ".", "rjust", "(", "width", ",", "fillchar", ")" ]
pads a number or string with fillchar to the specified width .
train
false
53,283
def test_dispatch(): gotoutput = pretty.pretty(MyDict()) expectedoutput = 'MyDict(...)' nt.assert_equal(gotoutput, expectedoutput)
[ "def", "test_dispatch", "(", ")", ":", "gotoutput", "=", "pretty", ".", "pretty", "(", "MyDict", "(", ")", ")", "expectedoutput", "=", "'MyDict(...)'", "nt", ".", "assert_equal", "(", "gotoutput", ",", "expectedoutput", ")" ]
test correct dispatching: the _repr_pretty_ method for mydict must be found before the registered printer for dict .
train
false
53,284
def path_as_windows(fpath): winepath = ('C:\\' + fpath.split('drive_c')[1]) return winepath
[ "def", "path_as_windows", "(", "fpath", ")", ":", "winepath", "=", "(", "'C:\\\\'", "+", "fpath", ".", "split", "(", "'drive_c'", ")", "[", "1", "]", ")", "return", "winepath" ]
return the file path as wine expects .
train
false
53,285
def _determine_default_project(project=None): if (project is None): (_, project) = google.auth.default() return project
[ "def", "_determine_default_project", "(", "project", "=", "None", ")", ":", "if", "(", "project", "is", "None", ")", ":", "(", "_", ",", "project", ")", "=", "google", ".", "auth", ".", "default", "(", ")", "return", "project" ]
determine default project id explicitly or implicitly as fall-back .
train
false
53,286
def squish(text): return re.sub('\\s+', ' ', text)
[ "def", "squish", "(", "text", ")", ":", "return", "re", ".", "sub", "(", "'\\\\s+'", ",", "' '", ",", "text", ")" ]
turn any run of whitespace into one space .
train
false
53,288
def kill(coro): return KillEvent(coro)
[ "def", "kill", "(", "coro", ")", ":", "return", "KillEvent", "(", "coro", ")" ]
halt the execution of a different spawned thread .
train
false
53,290
def _serialize_range(start, end): if (start < 0): range_str = ('%d' % start) elif (end is None): range_str = ('%d-' % start) else: range_str = ('%d-%d' % (start, end)) return ('bytes=%s' % range_str)
[ "def", "_serialize_range", "(", "start", ",", "end", ")", ":", "if", "(", "start", "<", "0", ")", ":", "range_str", "=", "(", "'%d'", "%", "start", ")", "elif", "(", "end", "is", "None", ")", ":", "range_str", "=", "(", "'%d-'", "%", "start", ")", "else", ":", "range_str", "=", "(", "'%d-%d'", "%", "(", "start", ",", "end", ")", ")", "return", "(", "'bytes=%s'", "%", "range_str", ")" ]
return a string suitable for use as a value in a range header .
train
false
53,292
def ftp(registry, xml_parent, data): console_prefix = 'FTP: ' plugin_tag = 'jenkins.plugins.publish__over__ftp.BapFtpPublisherPlugin' publisher_tag = 'jenkins.plugins.publish__over__ftp.BapFtpPublisher' transfer_tag = 'jenkins.plugins.publish__over__ftp.BapFtpTransfer' plugin_reference_tag = 'jenkins.plugins.publish_over_ftp.BapFtpPublisherPlugin' (_, transfer_node) = base_publish_over(xml_parent, data, console_prefix, plugin_tag, publisher_tag, transfer_tag, plugin_reference_tag) XML.SubElement(transfer_node, 'asciiMode').text = 'false'
[ "def", "ftp", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "console_prefix", "=", "'FTP: '", "plugin_tag", "=", "'jenkins.plugins.publish__over__ftp.BapFtpPublisherPlugin'", "publisher_tag", "=", "'jenkins.plugins.publish__over__ftp.BapFtpPublisher'", "transfer_tag", "=", "'jenkins.plugins.publish__over__ftp.BapFtpTransfer'", "plugin_reference_tag", "=", "'jenkins.plugins.publish_over_ftp.BapFtpPublisherPlugin'", "(", "_", ",", "transfer_node", ")", "=", "base_publish_over", "(", "xml_parent", ",", "data", ",", "console_prefix", ",", "plugin_tag", ",", "publisher_tag", ",", "transfer_tag", ",", "plugin_reference_tag", ")", "XML", ".", "SubElement", "(", "transfer_node", ",", "'asciiMode'", ")", ".", "text", "=", "'false'" ]
yaml: ftp upload files via ftp .
train
false
53,293
def _worker_ctl(worker, lbn, vwa, profile='default'): cmd = {'cmd': 'update', 'mime': 'prop', 'w': lbn, 'sw': worker, 'vwa': vwa} return (_do_http(cmd, profile)['worker.result.type'] == 'OK')
[ "def", "_worker_ctl", "(", "worker", ",", "lbn", ",", "vwa", ",", "profile", "=", "'default'", ")", ":", "cmd", "=", "{", "'cmd'", ":", "'update'", ",", "'mime'", ":", "'prop'", ",", "'w'", ":", "lbn", ",", "'sw'", ":", "worker", ",", "'vwa'", ":", "vwa", "}", "return", "(", "_do_http", "(", "cmd", ",", "profile", ")", "[", "'worker.result.type'", "]", "==", "'OK'", ")" ]
enable/disable/stop a worker .
train
true
53,295
def find_readline_lib(): import readline f = open(readline.__file__, 'rb') try: data = f.read() finally: f.close() import re m = re.search('\x00([^\x00]*libreadline[^\x00]*)\x00', data) if m: return m.group(1) return None
[ "def", "find_readline_lib", "(", ")", ":", "import", "readline", "f", "=", "open", "(", "readline", ".", "__file__", ",", "'rb'", ")", "try", ":", "data", "=", "f", ".", "read", "(", ")", "finally", ":", "f", ".", "close", "(", ")", "import", "re", "m", "=", "re", ".", "search", "(", "'\\x00([^\\x00]*libreadline[^\\x00]*)\\x00'", ",", "data", ")", "if", "m", ":", "return", "m", ".", "group", "(", "1", ")", "return", "None" ]
return the name of the readline library linked to the given readline module .
train
false
53,297
def integrate_hypertangent_polynomial(p, DE): (q, r) = polynomial_reduce(p, DE) a = DE.d.exquo(Poly(((DE.t ** 2) + 1), DE.t)) c = Poly((r.nth(1) / (2 * a.as_expr())), DE.t) return (q, c)
[ "def", "integrate_hypertangent_polynomial", "(", "p", ",", "DE", ")", ":", "(", "q", ",", "r", ")", "=", "polynomial_reduce", "(", "p", ",", "DE", ")", "a", "=", "DE", ".", "d", ".", "exquo", "(", "Poly", "(", "(", "(", "DE", ".", "t", "**", "2", ")", "+", "1", ")", ",", "DE", ".", "t", ")", ")", "c", "=", "Poly", "(", "(", "r", ".", "nth", "(", "1", ")", "/", "(", "2", "*", "a", ".", "as_expr", "(", ")", ")", ")", ",", "DE", ".", "t", ")", "return", "(", "q", ",", "c", ")" ]
integration of hypertangent polynomials .
train
false
53,298
def _wait_for_status(linode_id, status=None, timeout=300, quiet=True): if (status is None): status = _get_status_id_by_name('brand_new') status_desc_waiting = _get_status_descr_by_id(status) interval = 5 iterations = int((timeout / interval)) for i in range(0, iterations): result = get_linode(kwargs={'linode_id': linode_id}) if (result['STATUS'] == status): return True status_desc_result = _get_status_descr_by_id(result['STATUS']) time.sleep(interval) if quiet: log.info("Status for Linode {0} is '{1}', waiting for '{2}'.".format(linode_id, status_desc_result, status_desc_waiting)) else: log.debug("Status for Linode {0} is '{1}', waiting for '{2}'.".format(linode_id, status_desc_result, status_desc_waiting)) return False
[ "def", "_wait_for_status", "(", "linode_id", ",", "status", "=", "None", ",", "timeout", "=", "300", ",", "quiet", "=", "True", ")", ":", "if", "(", "status", "is", "None", ")", ":", "status", "=", "_get_status_id_by_name", "(", "'brand_new'", ")", "status_desc_waiting", "=", "_get_status_descr_by_id", "(", "status", ")", "interval", "=", "5", "iterations", "=", "int", "(", "(", "timeout", "/", "interval", ")", ")", "for", "i", "in", "range", "(", "0", ",", "iterations", ")", ":", "result", "=", "get_linode", "(", "kwargs", "=", "{", "'linode_id'", ":", "linode_id", "}", ")", "if", "(", "result", "[", "'STATUS'", "]", "==", "status", ")", ":", "return", "True", "status_desc_result", "=", "_get_status_descr_by_id", "(", "result", "[", "'STATUS'", "]", ")", "time", ".", "sleep", "(", "interval", ")", "if", "quiet", ":", "log", ".", "info", "(", "\"Status for Linode {0} is '{1}', waiting for '{2}'.\"", ".", "format", "(", "linode_id", ",", "status_desc_result", ",", "status_desc_waiting", ")", ")", "else", ":", "log", ".", "debug", "(", "\"Status for Linode {0} is '{1}', waiting for '{2}'.\"", ".", "format", "(", "linode_id", ",", "status_desc_result", ",", "status_desc_waiting", ")", ")", "return", "False" ]
wait for a certain status from linode .
train
false
53,300
def uri_param(event_name, param, value, **kwargs): cli_argument = param qualified_param_name = '.'.join(event_name.split('.')[1:]) if ((qualified_param_name in PARAMFILE_DISABLED) or getattr(cli_argument, 'no_paramfile', None)): return else: return _check_for_uri_param(cli_argument, value)
[ "def", "uri_param", "(", "event_name", ",", "param", ",", "value", ",", "**", "kwargs", ")", ":", "cli_argument", "=", "param", "qualified_param_name", "=", "'.'", ".", "join", "(", "event_name", ".", "split", "(", "'.'", ")", "[", "1", ":", "]", ")", "if", "(", "(", "qualified_param_name", "in", "PARAMFILE_DISABLED", ")", "or", "getattr", "(", "cli_argument", ",", "'no_paramfile'", ",", "None", ")", ")", ":", "return", "else", ":", "return", "_check_for_uri_param", "(", "cli_argument", ",", "value", ")" ]
handler that supports param values from uris .
train
false
53,301
@shared_task(bind=True, base=DebugBasketTask, default_retry_delay=BASKET_TASK_RETRY_DELAY, max_retries=BASKET_TASK_MAX_RETRIES) def unsubscribe_user_task(self, result, newsletters=[], optout=False): if (not result): return None newsletters_to_unsubscribe = [] if (result.get('status') == 'ok'): email = result.get('email') token = result.get('token') if newsletters: newsletters_to_unsubscribe = list(set(newsletters).intersection(result['newsletters'])) else: newsletters_to_unsubscribe = list(set(MOZILLIANS_NEWSLETTERS).intersection(result['newsletters'])) if newsletters_to_unsubscribe: try: unsubscribe_result = basket.unsubscribe(token=token, email=email, newsletters=newsletters_to_unsubscribe, optout=optout) except MaxRetriesExceededError as exc: raise exc except basket.BasketException as exc: raise self.retry(exc=exc) return unsubscribe_result return None
[ "@", "shared_task", "(", "bind", "=", "True", ",", "base", "=", "DebugBasketTask", ",", "default_retry_delay", "=", "BASKET_TASK_RETRY_DELAY", ",", "max_retries", "=", "BASKET_TASK_MAX_RETRIES", ")", "def", "unsubscribe_user_task", "(", "self", ",", "result", ",", "newsletters", "=", "[", "]", ",", "optout", "=", "False", ")", ":", "if", "(", "not", "result", ")", ":", "return", "None", "newsletters_to_unsubscribe", "=", "[", "]", "if", "(", "result", ".", "get", "(", "'status'", ")", "==", "'ok'", ")", ":", "email", "=", "result", ".", "get", "(", "'email'", ")", "token", "=", "result", ".", "get", "(", "'token'", ")", "if", "newsletters", ":", "newsletters_to_unsubscribe", "=", "list", "(", "set", "(", "newsletters", ")", ".", "intersection", "(", "result", "[", "'newsletters'", "]", ")", ")", "else", ":", "newsletters_to_unsubscribe", "=", "list", "(", "set", "(", "MOZILLIANS_NEWSLETTERS", ")", ".", "intersection", "(", "result", "[", "'newsletters'", "]", ")", ")", "if", "newsletters_to_unsubscribe", ":", "try", ":", "unsubscribe_result", "=", "basket", ".", "unsubscribe", "(", "token", "=", "token", ",", "email", "=", "email", ",", "newsletters", "=", "newsletters_to_unsubscribe", ",", "optout", "=", "optout", ")", "except", "MaxRetriesExceededError", "as", "exc", ":", "raise", "exc", "except", "basket", ".", "BasketException", "as", "exc", ":", "raise", "self", ".", "retry", "(", "exc", "=", "exc", ")", "return", "unsubscribe_result", "return", "None" ]
removes a user from the basket subscription .
train
false
53,302
def oauth_url(client_id, permissions=None, server=None, redirect_uri=None): url = 'https://discordapp.com/oauth2/authorize?client_id={}&scope=bot'.format(client_id) if (permissions is not None): url = ((url + '&permissions=') + str(permissions.value)) if (server is not None): url = ((url + '&guild_id=') + server.id) if (redirect_uri is not None): from urllib.parse import urlencode url = ((url + '&response_type=code&') + urlencode({'redirect_uri': redirect_uri})) return url
[ "def", "oauth_url", "(", "client_id", ",", "permissions", "=", "None", ",", "server", "=", "None", ",", "redirect_uri", "=", "None", ")", ":", "url", "=", "'https://discordapp.com/oauth2/authorize?client_id={}&scope=bot'", ".", "format", "(", "client_id", ")", "if", "(", "permissions", "is", "not", "None", ")", ":", "url", "=", "(", "(", "url", "+", "'&permissions='", ")", "+", "str", "(", "permissions", ".", "value", ")", ")", "if", "(", "server", "is", "not", "None", ")", ":", "url", "=", "(", "(", "url", "+", "'&guild_id='", ")", "+", "server", ".", "id", ")", "if", "(", "redirect_uri", "is", "not", "None", ")", ":", "from", "urllib", ".", "parse", "import", "urlencode", "url", "=", "(", "(", "url", "+", "'&response_type=code&'", ")", "+", "urlencode", "(", "{", "'redirect_uri'", ":", "redirect_uri", "}", ")", ")", "return", "url" ]
a helper function that returns the oauth2 url for inviting the bot into servers .
train
true
53,303
def event(name, priority=128): def decorator(func): add_event_handler(name, func, priority) return func return decorator
[ "def", "event", "(", "name", ",", "priority", "=", "128", ")", ":", "def", "decorator", "(", "func", ")", ":", "add_event_handler", "(", "name", ",", "func", ",", "priority", ")", "return", "func", "return", "decorator" ]
watch salts event bus and block until the given tag is matched .
train
false
53,304
def is_string(obj): try: return isinstance(obj, basestring) except NameError: return isinstance(obj, str)
[ "def", "is_string", "(", "obj", ")", ":", "try", ":", "return", "isinstance", "(", "obj", ",", "basestring", ")", "except", "NameError", ":", "return", "isinstance", "(", "obj", ",", "str", ")" ]
returns true if s is string or string-like object .
train
false
53,307
def file_requires_unicode(x): try: x.write('') except TypeError: return True else: return False
[ "def", "file_requires_unicode", "(", "x", ")", ":", "try", ":", "x", ".", "write", "(", "''", ")", "except", "TypeError", ":", "return", "True", "else", ":", "return", "False" ]
returns true if the given writable file-like object requires unicode to be written to it .
train
false
53,308
def _build_args(method, ip, comment): opt = _get_opt(method) args = '{0} {1}'.format(opt, ip) if comment: args += ' {0}'.format(comment) return args
[ "def", "_build_args", "(", "method", ",", "ip", ",", "comment", ")", ":", "opt", "=", "_get_opt", "(", "method", ")", "args", "=", "'{0} {1}'", ".", "format", "(", "opt", ",", "ip", ")", "if", "comment", ":", "args", "+=", "' {0}'", ".", "format", "(", "comment", ")", "return", "args" ]
returns the cmd args for csf basic allow/deny commands .
train
true
53,309
def _inherit_from(context, uri, calling_uri): if (uri is None): return None template = _lookup_template(context, uri, calling_uri) self_ns = context['self'] ih = self_ns while (ih.inherits is not None): ih = ih.inherits lclcontext = context._locals({'next': ih}) ih.inherits = TemplateNamespace(('self:%s' % template.uri), lclcontext, template=template, populate_self=False) context._data['parent'] = lclcontext._data['local'] = ih.inherits callable_ = getattr(template.module, '_mako_inherit', None) if (callable_ is not None): ret = callable_(template, lclcontext) if ret: return ret gen_ns = getattr(template.module, '_mako_generate_namespaces', None) if (gen_ns is not None): gen_ns(context) return (template.callable_, lclcontext)
[ "def", "_inherit_from", "(", "context", ",", "uri", ",", "calling_uri", ")", ":", "if", "(", "uri", "is", "None", ")", ":", "return", "None", "template", "=", "_lookup_template", "(", "context", ",", "uri", ",", "calling_uri", ")", "self_ns", "=", "context", "[", "'self'", "]", "ih", "=", "self_ns", "while", "(", "ih", ".", "inherits", "is", "not", "None", ")", ":", "ih", "=", "ih", ".", "inherits", "lclcontext", "=", "context", ".", "_locals", "(", "{", "'next'", ":", "ih", "}", ")", "ih", ".", "inherits", "=", "TemplateNamespace", "(", "(", "'self:%s'", "%", "template", ".", "uri", ")", ",", "lclcontext", ",", "template", "=", "template", ",", "populate_self", "=", "False", ")", "context", ".", "_data", "[", "'parent'", "]", "=", "lclcontext", ".", "_data", "[", "'local'", "]", "=", "ih", ".", "inherits", "callable_", "=", "getattr", "(", "template", ".", "module", ",", "'_mako_inherit'", ",", "None", ")", "if", "(", "callable_", "is", "not", "None", ")", ":", "ret", "=", "callable_", "(", "template", ",", "lclcontext", ")", "if", "ret", ":", "return", "ret", "gen_ns", "=", "getattr", "(", "template", ".", "module", ",", "'_mako_generate_namespaces'", ",", "None", ")", "if", "(", "gen_ns", "is", "not", "None", ")", ":", "gen_ns", "(", "context", ")", "return", "(", "template", ".", "callable_", ",", "lclcontext", ")" ]
called by the _inherit method in template modules to set up the inheritance chain at the start of a templates execution .
train
true
53,310
def clean_opf(container): manifest_id_map = container.manifest_id_map for meta in container.opf_xpath(u'//opf:meta[@name="cover" and @content]'): name = manifest_id_map.get(meta.get(u'content', None), None) container.remove_from_xml(meta) if (name and (name in container.name_path_map)): (yield name) gtm = container.guide_type_map for ref in container.opf_xpath(u'//opf:guide/opf:reference[@type]'): typ = ref.get(u'type', u'') if (typ.lower() in COVER_TYPES): container.remove_from_xml(ref) name = gtm.get(typ, None) if (name and (name in container.name_path_map)): (yield name) removed_names = container.apply_unique_properties(None, u'cover-image', u'calibre:title-page')[0] for name in removed_names: (yield name) container.dirty(container.opf_name)
[ "def", "clean_opf", "(", "container", ")", ":", "manifest_id_map", "=", "container", ".", "manifest_id_map", "for", "meta", "in", "container", ".", "opf_xpath", "(", "u'//opf:meta[@name=\"cover\" and @content]'", ")", ":", "name", "=", "manifest_id_map", ".", "get", "(", "meta", ".", "get", "(", "u'content'", ",", "None", ")", ",", "None", ")", "container", ".", "remove_from_xml", "(", "meta", ")", "if", "(", "name", "and", "(", "name", "in", "container", ".", "name_path_map", ")", ")", ":", "(", "yield", "name", ")", "gtm", "=", "container", ".", "guide_type_map", "for", "ref", "in", "container", ".", "opf_xpath", "(", "u'//opf:guide/opf:reference[@type]'", ")", ":", "typ", "=", "ref", ".", "get", "(", "u'type'", ",", "u''", ")", "if", "(", "typ", ".", "lower", "(", ")", "in", "COVER_TYPES", ")", ":", "container", ".", "remove_from_xml", "(", "ref", ")", "name", "=", "gtm", ".", "get", "(", "typ", ",", "None", ")", "if", "(", "name", "and", "(", "name", "in", "container", ".", "name_path_map", ")", ")", ":", "(", "yield", "name", ")", "removed_names", "=", "container", ".", "apply_unique_properties", "(", "None", ",", "u'cover-image'", ",", "u'calibre:title-page'", ")", "[", "0", "]", "for", "name", "in", "removed_names", ":", "(", "yield", "name", ")", "container", ".", "dirty", "(", "container", ".", "opf_name", ")" ]
remove all references to covers from the opf .
train
false
53,311
def saltversion(): from salt.version import __version__ return {'saltversion': __version__}
[ "def", "saltversion", "(", ")", ":", "from", "salt", ".", "version", "import", "__version__", "return", "{", "'saltversion'", ":", "__version__", "}" ]
return the version of salt .
train
false
53,312
def _check_filter_and_make_params(entity, includes, release_status=[], release_type=[]): if isinstance(release_status, compat.basestring): release_status = [release_status] if isinstance(release_type, compat.basestring): release_type = [release_type] _check_filter(release_status, VALID_RELEASE_STATUSES) _check_filter(release_type, VALID_RELEASE_TYPES) if (release_status and ('releases' not in includes) and (entity != 'release')): raise InvalidFilterError("Can't have a status with no release include") if (release_type and ('release-groups' not in includes) and ('releases' not in includes) and (entity not in ['release-group', 'release'])): raise InvalidFilterError("Can't have a release type with no releases or release-groups involved") params = {} if len(release_status): params['status'] = '|'.join(release_status) if len(release_type): params['type'] = '|'.join(release_type) return params
[ "def", "_check_filter_and_make_params", "(", "entity", ",", "includes", ",", "release_status", "=", "[", "]", ",", "release_type", "=", "[", "]", ")", ":", "if", "isinstance", "(", "release_status", ",", "compat", ".", "basestring", ")", ":", "release_status", "=", "[", "release_status", "]", "if", "isinstance", "(", "release_type", ",", "compat", ".", "basestring", ")", ":", "release_type", "=", "[", "release_type", "]", "_check_filter", "(", "release_status", ",", "VALID_RELEASE_STATUSES", ")", "_check_filter", "(", "release_type", ",", "VALID_RELEASE_TYPES", ")", "if", "(", "release_status", "and", "(", "'releases'", "not", "in", "includes", ")", "and", "(", "entity", "!=", "'release'", ")", ")", ":", "raise", "InvalidFilterError", "(", "\"Can't have a status with no release include\"", ")", "if", "(", "release_type", "and", "(", "'release-groups'", "not", "in", "includes", ")", "and", "(", "'releases'", "not", "in", "includes", ")", "and", "(", "entity", "not", "in", "[", "'release-group'", ",", "'release'", "]", ")", ")", ":", "raise", "InvalidFilterError", "(", "\"Can't have a release type with no releases or release-groups involved\"", ")", "params", "=", "{", "}", "if", "len", "(", "release_status", ")", ":", "params", "[", "'status'", "]", "=", "'|'", ".", "join", "(", "release_status", ")", "if", "len", "(", "release_type", ")", ":", "params", "[", "'type'", "]", "=", "'|'", ".", "join", "(", "release_type", ")", "return", "params" ]
check that the status or type values are valid .
train
false
53,314
def raisehttp(): raise HTTP(400, 'internal error')
[ "def", "raisehttp", "(", ")", ":", "raise", "HTTP", "(", "400", ",", "'internal error'", ")" ]
returns an http 400 error page .
train
false
53,316
@_ConfigurableFilter(executable='HTML_TIDY_EXECUTABLE') def html_tidy_wrap(infile, executable='tidy5'): return _html_tidy_runner(infile, '-quiet --show-info no --show-warnings no -utf8 -indent --indent-attributes no --sort-attributes alpha --wrap 80 --wrap-sections no --drop-empty-elements no --tidy-mark no -modify %1', executable=executable)
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'HTML_TIDY_EXECUTABLE'", ")", "def", "html_tidy_wrap", "(", "infile", ",", "executable", "=", "'tidy5'", ")", ":", "return", "_html_tidy_runner", "(", "infile", ",", "'-quiet --show-info no --show-warnings no -utf8 -indent --indent-attributes no --sort-attributes alpha --wrap 80 --wrap-sections no --drop-empty-elements no --tidy-mark no -modify %1'", ",", "executable", "=", "executable", ")" ]
run html tidy with line wrapping .
train
false
53,318
def pearson_correlation(X, Y): if (X is Y): X = Y = np.asanyarray(X) else: X = np.asanyarray(X) Y = np.asanyarray(Y) if (X.shape[1] != Y.shape[1]): raise ValueError('Incompatible dimension for X and Y matrices') XY = ssd.cdist(X, Y, 'correlation', 2) return (1 - XY)
[ "def", "pearson_correlation", "(", "X", ",", "Y", ")", ":", "if", "(", "X", "is", "Y", ")", ":", "X", "=", "Y", "=", "np", ".", "asanyarray", "(", "X", ")", "else", ":", "X", "=", "np", ".", "asanyarray", "(", "X", ")", "Y", "=", "np", ".", "asanyarray", "(", "Y", ")", "if", "(", "X", ".", "shape", "[", "1", "]", "!=", "Y", ".", "shape", "[", "1", "]", ")", ":", "raise", "ValueError", "(", "'Incompatible dimension for X and Y matrices'", ")", "XY", "=", "ssd", ".", "cdist", "(", "X", ",", "Y", ",", "'correlation'", ",", "2", ")", "return", "(", "1", "-", "XY", ")" ]
considering the rows of x as vectors .
train
false
53,319
def _GetLargePdbShimCcPath(): this_dir = os.path.abspath(os.path.dirname(__file__)) src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) win_data_dir = os.path.join(src_dir, 'data', 'win') large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') return large_pdb_shim_cc
[ "def", "_GetLargePdbShimCcPath", "(", ")", ":", "this_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "src_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "this_dir", ",", "'..'", ",", "'..'", ")", ")", "win_data_dir", "=", "os", ".", "path", ".", "join", "(", "src_dir", ",", "'data'", ",", "'win'", ")", "large_pdb_shim_cc", "=", "os", ".", "path", ".", "join", "(", "win_data_dir", ",", "'large-pdb-shim.cc'", ")", "return", "large_pdb_shim_cc" ]
returns the path of the large_pdb_shim .
train
false
53,320
def get_mask_ipv6(bits): if ((bits > 128) or (bits < 0)): raise ValueError(('A mask can only be 0-128 bits, got %i' % bits)) elif (bits == 128): return FULL_IPv6_MASK mask_bin = _get_binary(((2 ** bits) - 1), 128)[::(-1)] groupings = [mask_bin[(16 * i):(16 * (i + 1))] for i in range(8)] return ':'.join([('%04x' % int(group, 2)) for group in groupings]).upper()
[ "def", "get_mask_ipv6", "(", "bits", ")", ":", "if", "(", "(", "bits", ">", "128", ")", "or", "(", "bits", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "(", "'A mask can only be 0-128 bits, got %i'", "%", "bits", ")", ")", "elif", "(", "bits", "==", "128", ")", ":", "return", "FULL_IPv6_MASK", "mask_bin", "=", "_get_binary", "(", "(", "(", "2", "**", "bits", ")", "-", "1", ")", ",", "128", ")", "[", ":", ":", "(", "-", "1", ")", "]", "groupings", "=", "[", "mask_bin", "[", "(", "16", "*", "i", ")", ":", "(", "16", "*", "(", "i", "+", "1", ")", ")", "]", "for", "i", "in", "range", "(", "8", ")", "]", "return", "':'", ".", "join", "(", "[", "(", "'%04x'", "%", "int", "(", "group", ",", "2", ")", ")", "for", "group", "in", "groupings", "]", ")", ".", "upper", "(", ")" ]
provides the ipv6 mask for a given number of bits .
train
false
53,321
def _norm_encoding(encoding): try: return codecs.lookup(encoding).name except LookupError: return encoding
[ "def", "_norm_encoding", "(", "encoding", ")", ":", "try", ":", "return", "codecs", ".", "lookup", "(", "encoding", ")", ".", "name", "except", "LookupError", ":", "return", "encoding" ]
normalize the encoding name -- where "normalized" is what pythons codecs module calls it .
train
false
53,322
def is_stopped(): return (status() == 'stopped')
[ "def", "is_stopped", "(", ")", ":", "return", "(", "status", "(", ")", "==", "'stopped'", ")" ]
check if the firewall is stopped .
train
false
53,323
def cluster_exists(version, name='main'): return ('{0}/{1}'.format(version, name) in cluster_list())
[ "def", "cluster_exists", "(", "version", ",", "name", "=", "'main'", ")", ":", "return", "(", "'{0}/{1}'", ".", "format", "(", "version", ",", "name", ")", "in", "cluster_list", "(", ")", ")" ]
checks if a given version and name of a cluster exists .
train
false
53,324
def write_drawing(worksheet): if (worksheet._charts or worksheet._images): rel = Relationship(type='drawing', target='') worksheet._rels.append(rel) drawing = Related() drawing.id = ('rId%s' % len(worksheet._rels)) return drawing.to_tree('drawing')
[ "def", "write_drawing", "(", "worksheet", ")", ":", "if", "(", "worksheet", ".", "_charts", "or", "worksheet", ".", "_images", ")", ":", "rel", "=", "Relationship", "(", "type", "=", "'drawing'", ",", "target", "=", "''", ")", "worksheet", ".", "_rels", ".", "append", "(", "rel", ")", "drawing", "=", "Related", "(", ")", "drawing", ".", "id", "=", "(", "'rId%s'", "%", "len", "(", "worksheet", ".", "_rels", ")", ")", "return", "drawing", ".", "to_tree", "(", "'drawing'", ")" ]
add link to drawing if required .
train
false
53,325
def mdft(n): mat = [[None for x in range(n)] for y in range(n)] base = exp(((((-2) * pi) * I) / n)) mat[0] = ([1] * n) for i in range(n): mat[i][0] = 1 for i in range(1, n): for j in range(i, n): mat[i][j] = mat[j][i] = (base ** (i * j)) return ((1 / sqrt(n)) * Matrix(mat))
[ "def", "mdft", "(", "n", ")", ":", "mat", "=", "[", "[", "None", "for", "x", "in", "range", "(", "n", ")", "]", "for", "y", "in", "range", "(", "n", ")", "]", "base", "=", "exp", "(", "(", "(", "(", "(", "-", "2", ")", "*", "pi", ")", "*", "I", ")", "/", "n", ")", ")", "mat", "[", "0", "]", "=", "(", "[", "1", "]", "*", "n", ")", "for", "i", "in", "range", "(", "n", ")", ":", "mat", "[", "i", "]", "[", "0", "]", "=", "1", "for", "i", "in", "range", "(", "1", ",", "n", ")", ":", "for", "j", "in", "range", "(", "i", ",", "n", ")", ":", "mat", "[", "i", "]", "[", "j", "]", "=", "mat", "[", "j", "]", "[", "i", "]", "=", "(", "base", "**", "(", "i", "*", "j", ")", ")", "return", "(", "(", "1", "/", "sqrt", "(", "n", ")", ")", "*", "Matrix", "(", "mat", ")", ")" ]
returns an expression of a discrete fourier transform as a matrix multiplication .
train
false
53,326
@pytest.mark.xfail('not HAS_BZ2') def test_guessing_file_object(): t = ascii.read(open('t/ipac.dat.bz2', 'rb')) assert (t.colnames == ['ra', 'dec', 'sai', 'v2', 'sptype'])
[ "@", "pytest", ".", "mark", ".", "xfail", "(", "'not HAS_BZ2'", ")", "def", "test_guessing_file_object", "(", ")", ":", "t", "=", "ascii", ".", "read", "(", "open", "(", "'t/ipac.dat.bz2'", ",", "'rb'", ")", ")", "assert", "(", "t", ".", "colnames", "==", "[", "'ra'", ",", "'dec'", ",", "'sai'", ",", "'v2'", ",", "'sptype'", "]", ")" ]
test guessing a file object .
train
false
53,327
def _pretty_fulltext_sentence(sent): outstr = u'' outstr += u'full-text sentence ({0.ID}) in {1}:\n\n'.format(sent, sent.doc.get(u'name', sent.doc.description)) outstr += u'\n[POS] {0} tags\n'.format(len(sent.POS)) outstr += u'\n[POS_tagset] {0}\n\n'.format(sent.POS_tagset) outstr += u'[text] + [annotationSet]\n\n' outstr += sent._ascii() outstr += u'\n' return outstr
[ "def", "_pretty_fulltext_sentence", "(", "sent", ")", ":", "outstr", "=", "u''", "outstr", "+=", "u'full-text sentence ({0.ID}) in {1}:\\n\\n'", ".", "format", "(", "sent", ",", "sent", ".", "doc", ".", "get", "(", "u'name'", ",", "sent", ".", "doc", ".", "description", ")", ")", "outstr", "+=", "u'\\n[POS] {0} tags\\n'", ".", "format", "(", "len", "(", "sent", ".", "POS", ")", ")", "outstr", "+=", "u'\\n[POS_tagset] {0}\\n\\n'", ".", "format", "(", "sent", ".", "POS_tagset", ")", "outstr", "+=", "u'[text] + [annotationSet]\\n\\n'", "outstr", "+=", "sent", ".", "_ascii", "(", ")", "outstr", "+=", "u'\\n'", "return", "outstr" ]
helper function for pretty-printing an annotated sentence from a full-text document .
train
false
53,328
def _fixup_cdef_enums(string, reg=re.compile('=\\s*(\\d+)\\s*<<\\s*(\\d+)')): def repl_shift(match): shift_by = int(match.group(2)) value = int(match.group(1)) int_value = ctypes.c_int((value << shift_by)).value return ('= %s' % str(int_value)) return reg.sub(repl_shift, string)
[ "def", "_fixup_cdef_enums", "(", "string", ",", "reg", "=", "re", ".", "compile", "(", "'=\\\\s*(\\\\d+)\\\\s*<<\\\\s*(\\\\d+)'", ")", ")", ":", "def", "repl_shift", "(", "match", ")", ":", "shift_by", "=", "int", "(", "match", ".", "group", "(", "2", ")", ")", "value", "=", "int", "(", "match", ".", "group", "(", "1", ")", ")", "int_value", "=", "ctypes", ".", "c_int", "(", "(", "value", "<<", "shift_by", ")", ")", ".", "value", "return", "(", "'= %s'", "%", "str", "(", "int_value", ")", ")", "return", "reg", ".", "sub", "(", "repl_shift", ",", "string", ")" ]
converts some common enum expressions to constants .
train
true
53,330
def test_no_dot(): line = Line() line.add('no dot', []) q = line.render_pyquery() assert (q('.text-overlay text').text() == 'No data')
[ "def", "test_no_dot", "(", ")", ":", "line", "=", "Line", "(", ")", "line", ".", "add", "(", "'no dot'", ",", "[", "]", ")", "q", "=", "line", ".", "render_pyquery", "(", ")", "assert", "(", "q", "(", "'.text-overlay text'", ")", ".", "text", "(", ")", "==", "'No data'", ")" ]
line test with an empty serie .
train
false
53,331
@handle_dashboard_error @require_POST @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_level('staff') @require_post_params('url') def show_unit_extensions(request, course_id): course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id)) unit = find_unit(course, request.POST.get('url')) return JsonResponse(dump_module_extensions(course, unit))
[ "@", "handle_dashboard_error", "@", "require_POST", "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_level", "(", "'staff'", ")", "@", "require_post_params", "(", "'url'", ")", "def", "show_unit_extensions", "(", "request", ",", "course_id", ")", ":", "course", "=", "get_course_by_id", "(", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", ")", "unit", "=", "find_unit", "(", "course", ",", "request", ".", "POST", ".", "get", "(", "'url'", ")", ")", "return", "JsonResponse", "(", "dump_module_extensions", "(", "course", ",", "unit", ")", ")" ]
shows all of the students which have due date extensions for the given unit .
train
false
53,332
def get_preferred_file_name_encoding(): return (sys.getfilesystemencoding() or locale.getpreferredencoding() or u'utf-8')
[ "def", "get_preferred_file_name_encoding", "(", ")", ":", "return", "(", "sys", ".", "getfilesystemencoding", "(", ")", "or", "locale", ".", "getpreferredencoding", "(", ")", "or", "u'utf-8'", ")" ]
get preferred file name encoding .
train
false
53,333
def cleanup_sys_modules(directories): cleaned = [] for (modname, module) in list(sys.modules.items()): modfile = getattr(module, '__file__', None) if modfile: for directory in directories: if modfile.startswith(directory): cleaned.append(modname) del sys.modules[modname] break return cleaned
[ "def", "cleanup_sys_modules", "(", "directories", ")", ":", "cleaned", "=", "[", "]", "for", "(", "modname", ",", "module", ")", "in", "list", "(", "sys", ".", "modules", ".", "items", "(", ")", ")", ":", "modfile", "=", "getattr", "(", "module", ",", "'__file__'", ",", "None", ")", "if", "modfile", ":", "for", "directory", "in", "directories", ":", "if", "modfile", ".", "startswith", "(", "directory", ")", ":", "cleaned", ".", "append", "(", "modname", ")", "del", "sys", ".", "modules", "[", "modname", "]", "break", "return", "cleaned" ]
remove submodules of directories from sys .
train
false
53,334
def check_cli(module, cli): vrouter_name = module.params['pn_vrouter_name'] interface_ip = module.params['pn_interface_ip'] global VROUTER_EXISTS, LB_INTERFACE_EXISTS check_vrouter = (cli + ' vrouter-show format name no-show-headers ') check_vrouter = shlex.split(check_vrouter) out = module.run_command(check_vrouter)[1] out = out.split() if (vrouter_name in out): VROUTER_EXISTS = True else: VROUTER_EXISTS = False show = (cli + (' vrouter-loopback-interface-show vrouter-name %s format ip no-show-headers' % vrouter_name)) show = shlex.split(show) out = module.run_command(show)[1] out = out.split() if (interface_ip in out): LB_INTERFACE_EXISTS = True else: LB_INTERFACE_EXISTS = False
[ "def", "check_cli", "(", "module", ",", "cli", ")", ":", "vrouter_name", "=", "module", ".", "params", "[", "'pn_vrouter_name'", "]", "interface_ip", "=", "module", ".", "params", "[", "'pn_interface_ip'", "]", "global", "VROUTER_EXISTS", ",", "LB_INTERFACE_EXISTS", "check_vrouter", "=", "(", "cli", "+", "' vrouter-show format name no-show-headers '", ")", "check_vrouter", "=", "shlex", ".", "split", "(", "check_vrouter", ")", "out", "=", "module", ".", "run_command", "(", "check_vrouter", ")", "[", "1", "]", "out", "=", "out", ".", "split", "(", ")", "if", "(", "vrouter_name", "in", "out", ")", ":", "VROUTER_EXISTS", "=", "True", "else", ":", "VROUTER_EXISTS", "=", "False", "show", "=", "(", "cli", "+", "(", "' vrouter-loopback-interface-show vrouter-name %s format ip no-show-headers'", "%", "vrouter_name", ")", ")", "show", "=", "shlex", ".", "split", "(", "show", ")", "out", "=", "module", ".", "run_command", "(", "show", ")", "[", "1", "]", "out", "=", "out", ".", "split", "(", ")", "if", "(", "interface_ip", "in", "out", ")", ":", "LB_INTERFACE_EXISTS", "=", "True", "else", ":", "LB_INTERFACE_EXISTS", "=", "False" ]
this method checks if vrouter exists on the target node .
train
false
53,335
def get_soap_accessor(): db_ip = appscale_info.get_db_master_ip() bindport = constants.UA_SERVER_PORT return SOAPpy.SOAPProxy('https://{0}:{1}'.format(db_ip, bindport))
[ "def", "get_soap_accessor", "(", ")", ":", "db_ip", "=", "appscale_info", ".", "get_db_master_ip", "(", ")", "bindport", "=", "constants", ".", "UA_SERVER_PORT", "return", "SOAPpy", ".", "SOAPProxy", "(", "'https://{0}:{1}'", ".", "format", "(", "db_ip", ",", "bindport", ")", ")" ]
returns the soap server accessor to deal with application and users .
train
false
53,336
def relative_path(filename, start=os.path.curdir): try: (dirname, basename) = os.path.split(filename) relative_dir = os.path.relpath(dirname, start) return os.path.join(relative_dir, basename) except ValueError: return os.path.abspath(filename)
[ "def", "relative_path", "(", "filename", ",", "start", "=", "os", ".", "path", ".", "curdir", ")", ":", "try", ":", "(", "dirname", ",", "basename", ")", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "relative_dir", "=", "os", ".", "path", ".", "relpath", "(", "dirname", ",", "start", ")", "return", "os", ".", "path", ".", "join", "(", "relative_dir", ",", "basename", ")", "except", "ValueError", ":", "return", "os", ".", "path", ".", "abspath", "(", "filename", ")" ]
cross platform relative path of a filename .
train
false
53,337
@click.command(u'show-pending-jobs') @click.option(u'--site', help=u'site name') @pass_context def show_pending_jobs(context, site=None): from frappe.utils.doctor import pending_jobs as _pending_jobs if (not site): site = get_site(context) with frappe.init_site(site): pending_jobs = _pending_jobs(site=site) return pending_jobs
[ "@", "click", ".", "command", "(", "u'show-pending-jobs'", ")", "@", "click", ".", "option", "(", "u'--site'", ",", "help", "=", "u'site name'", ")", "@", "pass_context", "def", "show_pending_jobs", "(", "context", ",", "site", "=", "None", ")", ":", "from", "frappe", ".", "utils", ".", "doctor", "import", "pending_jobs", "as", "_pending_jobs", "if", "(", "not", "site", ")", ":", "site", "=", "get_site", "(", "context", ")", "with", "frappe", ".", "init_site", "(", "site", ")", ":", "pending_jobs", "=", "_pending_jobs", "(", "site", "=", "site", ")", "return", "pending_jobs" ]
get diagnostic info about background jobs .
train
false
53,338
def get_top_state_rule_answers(exploration_id, state_name, rule_str_list): return get_top_state_rule_answers_multi([(exploration_id, state_name)], rule_str_list)[0]
[ "def", "get_top_state_rule_answers", "(", "exploration_id", ",", "state_name", ",", "rule_str_list", ")", ":", "return", "get_top_state_rule_answers_multi", "(", "[", "(", "exploration_id", ",", "state_name", ")", "]", ",", "rule_str_list", ")", "[", "0", "]" ]
returns a list of top answers submitted to the given state in the given exploration which were mapped to any of the rules listed in rule_str_list .
train
false
53,340
def _recurse_config_to_dict(t_data): if (not isinstance(t_data, type(None))): if isinstance(t_data, list): t_list = [] for i in t_data: t_list.append(_recurse_config_to_dict(i)) return t_list elif isinstance(t_data, dict): t_dict = {} for (k, v) in t_data.iteritems(): t_dict[k] = _recurse_config_to_dict(v) return t_dict elif hasattr(t_data, '__dict__'): return _recurse_config_to_dict(t_data.__dict__) else: return _serializer(t_data)
[ "def", "_recurse_config_to_dict", "(", "t_data", ")", ":", "if", "(", "not", "isinstance", "(", "t_data", ",", "type", "(", "None", ")", ")", ")", ":", "if", "isinstance", "(", "t_data", ",", "list", ")", ":", "t_list", "=", "[", "]", "for", "i", "in", "t_data", ":", "t_list", ".", "append", "(", "_recurse_config_to_dict", "(", "i", ")", ")", "return", "t_list", "elif", "isinstance", "(", "t_data", ",", "dict", ")", ":", "t_dict", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "t_data", ".", "iteritems", "(", ")", ":", "t_dict", "[", "k", "]", "=", "_recurse_config_to_dict", "(", "v", ")", "return", "t_dict", "elif", "hasattr", "(", "t_data", ",", "'__dict__'", ")", ":", "return", "_recurse_config_to_dict", "(", "t_data", ".", "__dict__", ")", "else", ":", "return", "_serializer", "(", "t_data", ")" ]
helper function to recurse through a vim object and attempt to return all child objects .
train
true
53,342
def translatePoints(elementNode, points, prefix): translateVector3 = matrix.getCumulativeVector3Remove(Vector3(), elementNode, prefix) if (abs(translateVector3) > 0.0): euclidean.translateVector3Path(points, translateVector3)
[ "def", "translatePoints", "(", "elementNode", ",", "points", ",", "prefix", ")", ":", "translateVector3", "=", "matrix", ".", "getCumulativeVector3Remove", "(", "Vector3", "(", ")", ",", "elementNode", ",", "prefix", ")", "if", "(", "abs", "(", "translateVector3", ")", ">", "0.0", ")", ":", "euclidean", ".", "translateVector3Path", "(", "points", ",", "translateVector3", ")" ]
translate the points .
train
false
53,343
@login_required def email_login(request): if (request.method == u'POST'): form = EmailForm(request.POST) if form.is_valid(): return complete(request, u'email') else: form = EmailForm() return render(request, u'accounts/email.html', {u'title': _(u'Register email'), u'form': form})
[ "@", "login_required", "def", "email_login", "(", "request", ")", ":", "if", "(", "request", ".", "method", "==", "u'POST'", ")", ":", "form", "=", "EmailForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "return", "complete", "(", "request", ",", "u'email'", ")", "else", ":", "form", "=", "EmailForm", "(", ")", "return", "render", "(", "request", ",", "u'accounts/email.html'", ",", "{", "u'title'", ":", "_", "(", "u'Register email'", ")", ",", "u'form'", ":", "form", "}", ")" ]
connect email .
train
false
53,344
def validate_website(url): validate_url = URLValidator() if (url and ('://' not in url)): url = (u'http://%s' % url) try: validate_url(url) except ValidationError: raise ValidationError(_('Enter a valid URL.')) return url
[ "def", "validate_website", "(", "url", ")", ":", "validate_url", "=", "URLValidator", "(", ")", "if", "(", "url", "and", "(", "'://'", "not", "in", "url", ")", ")", ":", "url", "=", "(", "u'http://%s'", "%", "url", ")", "try", ":", "validate_url", "(", "url", ")", "except", "ValidationError", ":", "raise", "ValidationError", "(", "_", "(", "'Enter a valid URL.'", ")", ")", "return", "url" ]
validate and return a properly formatted website url .
train
false
53,345
def _average_flowgrams(mapping, flowgrams, sample_keys): flows = defaultdict(list) invert_map = invert_mapping(mapping) for f in flowgrams: key = invert_map[f.Name] samples = sample_keys[key] if (f.Name in samples): flows[key].append(f.flowgram) samples.remove(f.Name) if (len(samples) == 0): ave_flowgram = build_averaged_flowgram(flows[key]) ave_f = Flowgram(ave_flowgram, Name=key) del flows[key] (yield (ave_f, key))
[ "def", "_average_flowgrams", "(", "mapping", ",", "flowgrams", ",", "sample_keys", ")", ":", "flows", "=", "defaultdict", "(", "list", ")", "invert_map", "=", "invert_mapping", "(", "mapping", ")", "for", "f", "in", "flowgrams", ":", "key", "=", "invert_map", "[", "f", ".", "Name", "]", "samples", "=", "sample_keys", "[", "key", "]", "if", "(", "f", ".", "Name", "in", "samples", ")", ":", "flows", "[", "key", "]", ".", "append", "(", "f", ".", "flowgram", ")", "samples", ".", "remove", "(", "f", ".", "Name", ")", "if", "(", "len", "(", "samples", ")", "==", "0", ")", ":", "ave_flowgram", "=", "build_averaged_flowgram", "(", "flows", "[", "key", "]", ")", "ave_f", "=", "Flowgram", "(", "ave_flowgram", ",", "Name", "=", "key", ")", "del", "flows", "[", "key", "]", "(", "yield", "(", "ave_f", ",", "key", ")", ")" ]
average flowgrams according to cluster mapping .
train
false
53,346
def create_ikepolicy(name, profile=None, **kwargs): conn = _auth(profile) return conn.create_ikepolicy(name, **kwargs)
[ "def", "create_ikepolicy", "(", "name", ",", "profile", "=", "None", ",", "**", "kwargs", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "create_ikepolicy", "(", "name", ",", "**", "kwargs", ")" ]
creates a new ikepolicy cli example: .
train
true
53,348
def clean_orphaned_pyc(paths): orphaned_pyc = [] for path in paths: for pyc_path in stem.util.system.files_with_suffix(path, '.pyc'): py_path = pyc_path[:(-1)] pycache = ('%s__pycache__%s' % (os.path.sep, os.path.sep)) if (pycache in pyc_path): (directory, pycache_filename) = pyc_path.split(pycache, 1) if (not pycache_filename.endswith('.pyc')): continue py_path = os.path.join(directory, (pycache_filename.split('.')[0] + '.py')) if (not os.path.exists(py_path)): orphaned_pyc.append(pyc_path) os.remove(pyc_path) return orphaned_pyc
[ "def", "clean_orphaned_pyc", "(", "paths", ")", ":", "orphaned_pyc", "=", "[", "]", "for", "path", "in", "paths", ":", "for", "pyc_path", "in", "stem", ".", "util", ".", "system", ".", "files_with_suffix", "(", "path", ",", "'.pyc'", ")", ":", "py_path", "=", "pyc_path", "[", ":", "(", "-", "1", ")", "]", "pycache", "=", "(", "'%s__pycache__%s'", "%", "(", "os", ".", "path", ".", "sep", ",", "os", ".", "path", ".", "sep", ")", ")", "if", "(", "pycache", "in", "pyc_path", ")", ":", "(", "directory", ",", "pycache_filename", ")", "=", "pyc_path", ".", "split", "(", "pycache", ",", "1", ")", "if", "(", "not", "pycache_filename", ".", "endswith", "(", "'.pyc'", ")", ")", ":", "continue", "py_path", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "(", "pycache_filename", ".", "split", "(", "'.'", ")", "[", "0", "]", "+", "'.py'", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "py_path", ")", ")", ":", "orphaned_pyc", ".", "append", "(", "pyc_path", ")", "os", ".", "remove", "(", "pyc_path", ")", "return", "orphaned_pyc" ]
deletes any file with a * .
train
false
53,350
def getSender(email): sender = email['From'] m = re.match('(.*)\\s<.*>', sender) if m: return m.group(1) return sender
[ "def", "getSender", "(", "email", ")", ":", "sender", "=", "email", "[", "'From'", "]", "m", "=", "re", ".", "match", "(", "'(.*)\\\\s<.*>'", ",", "sender", ")", "if", "m", ":", "return", "m", ".", "group", "(", "1", ")", "return", "sender" ]
returns the best-guess sender of an email .
train
false
53,351
def test_routing_class_with_cli_commands(): @hug.object(name='git', version='1.0.0') class GIT(object, ): 'An example of command like calls via an Object' @hug.object.cli def push(self, branch='master'): return 'Pushing {}'.format(branch) @hug.object.cli def pull(self, branch='master'): return 'Pulling {}'.format(branch) assert ('token' in hug.test.cli(GIT.push, branch='token')) assert ('another token' in hug.test.cli(GIT.pull, branch='another token'))
[ "def", "test_routing_class_with_cli_commands", "(", ")", ":", "@", "hug", ".", "object", "(", "name", "=", "'git'", ",", "version", "=", "'1.0.0'", ")", "class", "GIT", "(", "object", ",", ")", ":", "@", "hug", ".", "object", ".", "cli", "def", "push", "(", "self", ",", "branch", "=", "'master'", ")", ":", "return", "'Pushing {}'", ".", "format", "(", "branch", ")", "@", "hug", ".", "object", ".", "cli", "def", "pull", "(", "self", ",", "branch", "=", "'master'", ")", ":", "return", "'Pulling {}'", ".", "format", "(", "branch", ")", "assert", "(", "'token'", "in", "hug", ".", "test", ".", "cli", "(", "GIT", ".", "push", ",", "branch", "=", "'token'", ")", ")", "assert", "(", "'another token'", "in", "hug", ".", "test", ".", "cli", "(", "GIT", ".", "pull", ",", "branch", "=", "'another token'", ")", ")" ]
basic operation test .
train
false