id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
2,834
@_default_app.route(u'/') def start_page(): if (not _home): abort(404) return redirect(_home)
[ "@", "_default_app", ".", "route", "(", "u'/'", ")", "def", "start_page", "(", ")", ":", "if", "(", "not", "_home", ")", ":", "abort", "(", "404", ")", "return", "redirect", "(", "_home", ")" ]
redirect user to registered ui home .
train
false
2,836
@register.assignment_tag(takes_context=True) def assignment_tag_without_context_parameter(arg): return 'Expected result'
[ "@", "register", ".", "assignment_tag", "(", "takes_context", "=", "True", ")", "def", "assignment_tag_without_context_parameter", "(", "arg", ")", ":", "return", "'Expected result'" ]
expected assignment_tag_without_context_parameter __doc__ .
train
false
2,837
def writePlist(rootObject, pathOrFile): didOpen = 0 if isinstance(pathOrFile, (str, unicode)): pathOrFile = open(pathOrFile, 'w') didOpen = 1 writer = PlistWriter(pathOrFile) writer.writeln('<plist version="1.0">') writer.writeValue(rootObject) writer.writeln('</plist>') if didOpen: pathOrFile.close()
[ "def", "writePlist", "(", "rootObject", ",", "pathOrFile", ")", ":", "didOpen", "=", "0", "if", "isinstance", "(", "pathOrFile", ",", "(", "str", ",", "unicode", ")", ")", ":", "pathOrFile", "=", "open", "(", "pathOrFile", ",", "'w'", ")", "didOpen", "=", "1", "writer", "=", "PlistWriter", "(", "pathOrFile", ")", "writer", ".", "writeln", "(", "'<plist version=\"1.0\">'", ")", "writer", ".", "writeValue", "(", "rootObject", ")", "writer", ".", "writeln", "(", "'</plist>'", ")", "if", "didOpen", ":", "pathOrFile", ".", "close", "(", ")" ]
write rootobject to a .
train
false
2,838
def evtable_parse_input(menuobject, raw_string, caller): cmd = raw_string.strip().lower() if (cmd in menuobject.options): (goto, callback) = menuobject.options[cmd] menuobject.callback_goto(callback, goto, raw_string) elif (menuobject.auto_look and (cmd in ('look', 'l'))): menuobject.display_nodetext() elif (menuobject.auto_help and (cmd in ('help', 'h'))): menuobject.display_helptext() elif (menuobject.auto_quit and (cmd in ('quit', 'q', 'exit'))): menuobject.close_menu() elif menuobject.default: (goto, callback) = menuobject.default menuobject.callback_goto(callback, goto, raw_string) else: caller.msg(_HELP_NO_OPTION_MATCH, session=menuobject._session) if (not (menuobject.options or menuobject.default)): menuobject.close_menu()
[ "def", "evtable_parse_input", "(", "menuobject", ",", "raw_string", ",", "caller", ")", ":", "cmd", "=", "raw_string", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "(", "cmd", "in", "menuobject", ".", "options", ")", ":", "(", "goto", ",", "callback", ")", "=", "menuobject", ".", "options", "[", "cmd", "]", "menuobject", ".", "callback_goto", "(", "callback", ",", "goto", ",", "raw_string", ")", "elif", "(", "menuobject", ".", "auto_look", "and", "(", "cmd", "in", "(", "'look'", ",", "'l'", ")", ")", ")", ":", "menuobject", ".", "display_nodetext", "(", ")", "elif", "(", "menuobject", ".", "auto_help", "and", "(", "cmd", "in", "(", "'help'", ",", "'h'", ")", ")", ")", ":", "menuobject", ".", "display_helptext", "(", ")", "elif", "(", "menuobject", ".", "auto_quit", "and", "(", "cmd", "in", "(", "'quit'", ",", "'q'", ",", "'exit'", ")", ")", ")", ":", "menuobject", ".", "close_menu", "(", ")", "elif", "menuobject", ".", "default", ":", "(", "goto", ",", "callback", ")", "=", "menuobject", ".", "default", "menuobject", ".", "callback_goto", "(", "callback", ",", "goto", ",", "raw_string", ")", "else", ":", "caller", ".", "msg", "(", "_HELP_NO_OPTION_MATCH", ",", "session", "=", "menuobject", ".", "_session", ")", "if", "(", "not", "(", "menuobject", ".", "options", "or", "menuobject", ".", "default", ")", ")", ":", "menuobject", ".", "close_menu", "(", ")" ]
processes the users node inputs .
train
false
2,840
def _check_datetime_field(name, measure): if (not isinstance(measure[name], (Date, DateTime))): raise TypeError("'{name}' field must be a '{dt}', not: '{dshape}'".format(name=name, dt=DateTime(), dshape=measure[name]))
[ "def", "_check_datetime_field", "(", "name", ",", "measure", ")", ":", "if", "(", "not", "isinstance", "(", "measure", "[", "name", "]", ",", "(", "Date", ",", "DateTime", ")", ")", ")", ":", "raise", "TypeError", "(", "\"'{name}' field must be a '{dt}', not: '{dshape}'\"", ".", "format", "(", "name", "=", "name", ",", "dt", "=", "DateTime", "(", ")", ",", "dshape", "=", "measure", "[", "name", "]", ")", ")" ]
check that a field is a datetime inside some measure .
train
true
2,842
def get_bounds(reads, start_pos_index, end_pos_index): max_low = sys.maxint max_high = (- sys.maxint) for read in reads: if (read[start_pos_index] < max_low): max_low = read[start_pos_index] if (read[end_pos_index] > max_high): max_high = read[end_pos_index] return (max_low, max_high)
[ "def", "get_bounds", "(", "reads", ",", "start_pos_index", ",", "end_pos_index", ")", ":", "max_low", "=", "sys", ".", "maxint", "max_high", "=", "(", "-", "sys", ".", "maxint", ")", "for", "read", "in", "reads", ":", "if", "(", "read", "[", "start_pos_index", "]", "<", "max_low", ")", ":", "max_low", "=", "read", "[", "start_pos_index", "]", "if", "(", "read", "[", "end_pos_index", "]", ">", "max_high", ")", ":", "max_high", "=", "read", "[", "end_pos_index", "]", "return", "(", "max_low", ",", "max_high", ")" ]
returns the minimum and maximum position for a set of reads .
train
false
2,843
def multireader(*streams): class reader(object, ): def __init__(self, stream): self.stream = stream def __call__(self): self.text = self.stream.readlines() threads = [] readers = [] for stream in streams: curReader = reader(stream) thread = Threading.Thread(Threading.ThreadStart(curReader)) readers.append(curReader) threads.append(thread) thread.Start() for thread in threads: thread.Join() return [curReader.text for curReader in readers]
[ "def", "multireader", "(", "*", "streams", ")", ":", "class", "reader", "(", "object", ",", ")", ":", "def", "__init__", "(", "self", ",", "stream", ")", ":", "self", ".", "stream", "=", "stream", "def", "__call__", "(", "self", ")", ":", "self", ".", "text", "=", "self", ".", "stream", ".", "readlines", "(", ")", "threads", "=", "[", "]", "readers", "=", "[", "]", "for", "stream", "in", "streams", ":", "curReader", "=", "reader", "(", "stream", ")", "thread", "=", "Threading", ".", "Thread", "(", "Threading", ".", "ThreadStart", "(", "curReader", ")", ")", "readers", ".", "append", "(", "curReader", ")", "threads", ".", "append", "(", "thread", ")", "thread", ".", "Start", "(", ")", "for", "thread", "in", "threads", ":", "thread", ".", "Join", "(", ")", "return", "[", "curReader", ".", "text", "for", "curReader", "in", "readers", "]" ]
creates multiple threads to read std err/std out at the same time to avoid blocking .
train
false
2,844
def is_local(path): if (not running_under_virtualenv()): return True return normalize_path(path).startswith(normalize_path(sys.prefix))
[ "def", "is_local", "(", "path", ")", ":", "if", "(", "not", "running_under_virtualenv", "(", ")", ")", ":", "return", "True", "return", "normalize_path", "(", "path", ")", ".", "startswith", "(", "normalize_path", "(", "sys", ".", "prefix", ")", ")" ]
determine if the given ip address refers to this machine or not .
train
true
2,845
def _sync(form, saltenv=None): if (saltenv is None): saltenv = _get_top_file_envs() if isinstance(saltenv, six.string_types): saltenv = saltenv.split(',') (ret, touched) = salt.utils.extmods.sync(__opts__, form, saltenv=saltenv) if touched: mod_file = os.path.join(__opts__['cachedir'], 'module_refresh') with salt.utils.fopen(mod_file, 'a+') as ofile: ofile.write('') if ((form == 'grains') and __opts__.get('grains_cache') and os.path.isfile(os.path.join(__opts__['cachedir'], 'grains.cache.p'))): try: os.remove(os.path.join(__opts__['cachedir'], 'grains.cache.p')) except OSError: log.error('Could not remove grains cache!') return ret
[ "def", "_sync", "(", "form", ",", "saltenv", "=", "None", ")", ":", "if", "(", "saltenv", "is", "None", ")", ":", "saltenv", "=", "_get_top_file_envs", "(", ")", "if", "isinstance", "(", "saltenv", ",", "six", ".", "string_types", ")", ":", "saltenv", "=", "saltenv", ".", "split", "(", "','", ")", "(", "ret", ",", "touched", ")", "=", "salt", ".", "utils", ".", "extmods", ".", "sync", "(", "__opts__", ",", "form", ",", "saltenv", "=", "saltenv", ")", "if", "touched", ":", "mod_file", "=", "os", ".", "path", ".", "join", "(", "__opts__", "[", "'cachedir'", "]", ",", "'module_refresh'", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "mod_file", ",", "'a+'", ")", "as", "ofile", ":", "ofile", ".", "write", "(", "''", ")", "if", "(", "(", "form", "==", "'grains'", ")", "and", "__opts__", ".", "get", "(", "'grains_cache'", ")", "and", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "__opts__", "[", "'cachedir'", "]", ",", "'grains.cache.p'", ")", ")", ")", ":", "try", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "__opts__", "[", "'cachedir'", "]", ",", "'grains.cache.p'", ")", ")", "except", "OSError", ":", "log", ".", "error", "(", "'Could not remove grains cache!'", ")", "return", "ret" ]
sync the given directory in the given environment .
train
true
2,846
@conf.commands.register def pkt2uptime(pkt, HZ=100): if (not isinstance(pkt, Packet)): raise TypeError('Not a TCP packet') if isinstance(pkt, NoPayload): raise TypeError('Not a TCP packet') if (not isinstance(pkt, TCP)): return pkt2uptime(pkt.payload) for opt in pkt.options: if (opt[0] == 'Timestamp'): t = (opt[1][0] / HZ) return t raise TypeError('No timestamp option')
[ "@", "conf", ".", "commands", ".", "register", "def", "pkt2uptime", "(", "pkt", ",", "HZ", "=", "100", ")", ":", "if", "(", "not", "isinstance", "(", "pkt", ",", "Packet", ")", ")", ":", "raise", "TypeError", "(", "'Not a TCP packet'", ")", "if", "isinstance", "(", "pkt", ",", "NoPayload", ")", ":", "raise", "TypeError", "(", "'Not a TCP packet'", ")", "if", "(", "not", "isinstance", "(", "pkt", ",", "TCP", ")", ")", ":", "return", "pkt2uptime", "(", "pkt", ".", "payload", ")", "for", "opt", "in", "pkt", ".", "options", ":", "if", "(", "opt", "[", "0", "]", "==", "'Timestamp'", ")", ":", "t", "=", "(", "opt", "[", "1", "]", "[", "0", "]", "/", "HZ", ")", "return", "t", "raise", "TypeError", "(", "'No timestamp option'", ")" ]
calculate the date the machine which emitted the packet booted using tcp timestamp pkt2uptime .
train
true
2,847
@gen.coroutine def _SetWelcomeIds(user, upload_request): unique_id = 1 act_dict = upload_request['activity'] act_dict['activity_id'] = Activity.ConstructActivityId(act_dict['timestamp'], user.webapp_dev_id, unique_id) unique_id += 1 ep_dict = upload_request['episode'] ep_dict['episode_id'] = Episode.ConstructEpisodeId(ep_dict['timestamp'], user.webapp_dev_id, unique_id) unique_id += 1 for ph_dict in upload_request['photos']: ph_dict['photo_id'] = Photo.ConstructPhotoId(ph_dict['timestamp'], user.webapp_dev_id, unique_id) unique_id += 1
[ "@", "gen", ".", "coroutine", "def", "_SetWelcomeIds", "(", "user", ",", "upload_request", ")", ":", "unique_id", "=", "1", "act_dict", "=", "upload_request", "[", "'activity'", "]", "act_dict", "[", "'activity_id'", "]", "=", "Activity", ".", "ConstructActivityId", "(", "act_dict", "[", "'timestamp'", "]", ",", "user", ".", "webapp_dev_id", ",", "unique_id", ")", "unique_id", "+=", "1", "ep_dict", "=", "upload_request", "[", "'episode'", "]", "ep_dict", "[", "'episode_id'", "]", "=", "Episode", ".", "ConstructEpisodeId", "(", "ep_dict", "[", "'timestamp'", "]", ",", "user", ".", "webapp_dev_id", ",", "unique_id", ")", "unique_id", "+=", "1", "for", "ph_dict", "in", "upload_request", "[", "'photos'", "]", ":", "ph_dict", "[", "'photo_id'", "]", "=", "Photo", ".", "ConstructPhotoId", "(", "ph_dict", "[", "'timestamp'", "]", ",", "user", ".", "webapp_dev_id", ",", "unique_id", ")", "unique_id", "+=", "1" ]
assigns activity .
train
false
2,848
def TestApp(name=None, config=None, enable_logging=False, set_as_current=False, log=UnitLogging, backend=None, broker=None, **kwargs): from . import tasks config = dict(deepcopy(DEFAULT_TEST_CONFIG), **(config or {})) if (broker is not None): config.pop(u'broker_url', None) if (backend is not None): config.pop(u'result_backend', None) log = (None if enable_logging else log) test_app = Celery((name or u'celery.tests'), set_as_current=set_as_current, log=log, broker=broker, backend=backend, **kwargs) test_app.add_defaults(config) return test_app
[ "def", "TestApp", "(", "name", "=", "None", ",", "config", "=", "None", ",", "enable_logging", "=", "False", ",", "set_as_current", "=", "False", ",", "log", "=", "UnitLogging", ",", "backend", "=", "None", ",", "broker", "=", "None", ",", "**", "kwargs", ")", ":", "from", ".", "import", "tasks", "config", "=", "dict", "(", "deepcopy", "(", "DEFAULT_TEST_CONFIG", ")", ",", "**", "(", "config", "or", "{", "}", ")", ")", "if", "(", "broker", "is", "not", "None", ")", ":", "config", ".", "pop", "(", "u'broker_url'", ",", "None", ")", "if", "(", "backend", "is", "not", "None", ")", ":", "config", ".", "pop", "(", "u'result_backend'", ",", "None", ")", "log", "=", "(", "None", "if", "enable_logging", "else", "log", ")", "test_app", "=", "Celery", "(", "(", "name", "or", "u'celery.tests'", ")", ",", "set_as_current", "=", "set_as_current", ",", "log", "=", "log", ",", "broker", "=", "broker", ",", "backend", "=", "backend", ",", "**", "kwargs", ")", "test_app", ".", "add_defaults", "(", "config", ")", "return", "test_app" ]
app used for testing .
train
false
2,851
def LocatePath(fileName, searchPaths): import os return os.path.abspath(os.path.split(LocateFileName(fileName, searchPaths))[0])
[ "def", "LocatePath", "(", "fileName", ",", "searchPaths", ")", ":", "import", "os", "return", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "split", "(", "LocateFileName", "(", "fileName", ",", "searchPaths", ")", ")", "[", "0", "]", ")" ]
like locatefilename .
train
false
2,852
def crypt(word, salt=None, iterations=None): if (salt is None): salt = _makesalt() if isinstance(salt, unicode): salt = salt.encode('us-ascii') if (not isinstance(salt, str)): raise TypeError('salt must be a string') if isinstance(word, unicode): word = word.encode('UTF-8') if (not isinstance(word, str)): raise TypeError('word must be a string or unicode') if salt.startswith('$p5k2$'): (iterations, salt, dummy) = salt.split('$')[2:5] if (iterations == ''): iterations = 400 else: converted = int(iterations, 16) if (iterations != ('%x' % converted)): raise ValueError('Invalid salt') iterations = converted if (not (iterations >= 1)): raise ValueError('Invalid salt') allowed = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./' for ch in salt: if (ch not in allowed): raise ValueError(('Illegal character %r in salt' % (ch,))) if ((iterations is None) or (iterations == 400)): iterations = 400 salt = ('$p5k2$$' + salt) else: salt = ('$p5k2$%x$%s' % (iterations, salt)) rawhash = PBKDF2(word, salt, iterations).read(24) return ((salt + '$') + b64encode(rawhash, './'))
[ "def", "crypt", "(", "word", ",", "salt", "=", "None", ",", "iterations", "=", "None", ")", ":", "if", "(", "salt", "is", "None", ")", ":", "salt", "=", "_makesalt", "(", ")", "if", "isinstance", "(", "salt", ",", "unicode", ")", ":", "salt", "=", "salt", ".", "encode", "(", "'us-ascii'", ")", "if", "(", "not", "isinstance", "(", "salt", ",", "str", ")", ")", ":", "raise", "TypeError", "(", "'salt must be a string'", ")", "if", "isinstance", "(", "word", ",", "unicode", ")", ":", "word", "=", "word", ".", "encode", "(", "'UTF-8'", ")", "if", "(", "not", "isinstance", "(", "word", ",", "str", ")", ")", ":", "raise", "TypeError", "(", "'word must be a string or unicode'", ")", "if", "salt", ".", "startswith", "(", "'$p5k2$'", ")", ":", "(", "iterations", ",", "salt", ",", "dummy", ")", "=", "salt", ".", "split", "(", "'$'", ")", "[", "2", ":", "5", "]", "if", "(", "iterations", "==", "''", ")", ":", "iterations", "=", "400", "else", ":", "converted", "=", "int", "(", "iterations", ",", "16", ")", "if", "(", "iterations", "!=", "(", "'%x'", "%", "converted", ")", ")", ":", "raise", "ValueError", "(", "'Invalid salt'", ")", "iterations", "=", "converted", "if", "(", "not", "(", "iterations", ">=", "1", ")", ")", ":", "raise", "ValueError", "(", "'Invalid salt'", ")", "allowed", "=", "'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./'", "for", "ch", "in", "salt", ":", "if", "(", "ch", "not", "in", "allowed", ")", ":", "raise", "ValueError", "(", "(", "'Illegal character %r in salt'", "%", "(", "ch", ",", ")", ")", ")", "if", "(", "(", "iterations", "is", "None", ")", "or", "(", "iterations", "==", "400", ")", ")", ":", "iterations", "=", "400", "salt", "=", "(", "'$p5k2$$'", "+", "salt", ")", "else", ":", "salt", "=", "(", "'$p5k2$%x$%s'", "%", "(", "iterations", ",", "salt", ")", ")", "rawhash", "=", "PBKDF2", "(", "word", ",", "salt", ",", "iterations", ")", ".", "read", "(", "24", ")", "return", "(", "(", "salt", "+", "'$'", ")", "+", "b64encode", "(", "rawhash", ",", "'./'", ")", ")" ]
method to encrypt streams using a pdf security handler .
train
false
2,853
def _update_rs_with_primary_from_member(sds, replica_set_name, server_description): assert (replica_set_name is not None) if (replica_set_name != server_description.replica_set_name): sds.pop(server_description.address) elif (server_description.me and (server_description.address != server_description.me)): sds.pop(server_description.address) return _check_has_primary(sds)
[ "def", "_update_rs_with_primary_from_member", "(", "sds", ",", "replica_set_name", ",", "server_description", ")", ":", "assert", "(", "replica_set_name", "is", "not", "None", ")", "if", "(", "replica_set_name", "!=", "server_description", ".", "replica_set_name", ")", ":", "sds", ".", "pop", "(", "server_description", ".", "address", ")", "elif", "(", "server_description", ".", "me", "and", "(", "server_description", ".", "address", "!=", "server_description", ".", "me", ")", ")", ":", "sds", ".", "pop", "(", "server_description", ".", "address", ")", "return", "_check_has_primary", "(", "sds", ")" ]
rs with known primary .
train
true
2,854
@mock_ec2 def test_request_spot_instances_fulfilled(): conn = boto.ec2.connect_to_region(u'us-east-1') request = conn.request_spot_instances(price=0.5, image_id=u'ami-abcd1234') requests = conn.get_all_spot_instance_requests() requests.should.have.length_of(1) request = requests[0] request.state.should.equal(u'open') get_model(u'SpotInstanceRequest')[0].state = u'active' requests = conn.get_all_spot_instance_requests() requests.should.have.length_of(1) request = requests[0] request.state.should.equal(u'active')
[ "@", "mock_ec2", "def", "test_request_spot_instances_fulfilled", "(", ")", ":", "conn", "=", "boto", ".", "ec2", ".", "connect_to_region", "(", "u'us-east-1'", ")", "request", "=", "conn", ".", "request_spot_instances", "(", "price", "=", "0.5", ",", "image_id", "=", "u'ami-abcd1234'", ")", "requests", "=", "conn", ".", "get_all_spot_instance_requests", "(", ")", "requests", ".", "should", ".", "have", ".", "length_of", "(", "1", ")", "request", "=", "requests", "[", "0", "]", "request", ".", "state", ".", "should", ".", "equal", "(", "u'open'", ")", "get_model", "(", "u'SpotInstanceRequest'", ")", "[", "0", "]", ".", "state", "=", "u'active'", "requests", "=", "conn", ".", "get_all_spot_instance_requests", "(", ")", "requests", ".", "should", ".", "have", ".", "length_of", "(", "1", ")", "request", "=", "requests", "[", "0", "]", "request", ".", "state", ".", "should", ".", "equal", "(", "u'active'", ")" ]
test that moto correctly fullfills a spot instance request .
train
false
2,855
def compute_chunk(source, target, chunk, chunk_expr, parts): (source_part, target_part) = parts part = source[source_part] result = compute(chunk_expr, {chunk: part}, return_type='native') target[target_part] = result
[ "def", "compute_chunk", "(", "source", ",", "target", ",", "chunk", ",", "chunk_expr", ",", "parts", ")", ":", "(", "source_part", ",", "target_part", ")", "=", "parts", "part", "=", "source", "[", "source_part", "]", "result", "=", "compute", "(", "chunk_expr", ",", "{", "chunk", ":", "part", "}", ",", "return_type", "=", "'native'", ")", "target", "[", "target_part", "]", "=", "result" ]
pull out a part .
train
false
2,856
def test_language_is_english_by_default(): lang = Language() assert_equals(lang.code, 'en') assert_equals(lang.name, 'English') assert_equals(lang.native, 'English') assert_equals(lang.feature, 'Feature') assert_equals(lang.scenario, 'Scenario') assert_equals(lang.examples, 'Examples|Scenarios') assert_equals(lang.scenario_outline, 'Scenario Outline')
[ "def", "test_language_is_english_by_default", "(", ")", ":", "lang", "=", "Language", "(", ")", "assert_equals", "(", "lang", ".", "code", ",", "'en'", ")", "assert_equals", "(", "lang", ".", "name", ",", "'English'", ")", "assert_equals", "(", "lang", ".", "native", ",", "'English'", ")", "assert_equals", "(", "lang", ".", "feature", ",", "'Feature'", ")", "assert_equals", "(", "lang", ".", "scenario", ",", "'Scenario'", ")", "assert_equals", "(", "lang", ".", "examples", ",", "'Examples|Scenarios'", ")", "assert_equals", "(", "lang", ".", "scenario_outline", ",", "'Scenario Outline'", ")" ]
language class is english by default .
train
false
2,857
def handle_clear(request, basket, **kwargs): basket.clear_all() return {u'ok': True}
[ "def", "handle_clear", "(", "request", ",", "basket", ",", "**", "kwargs", ")", ":", "basket", ".", "clear_all", "(", ")", "return", "{", "u'ok'", ":", "True", "}" ]
handle fully clearing the basket .
train
false
2,858
@gen.engine def Cat(args, callback): assert (len(args) == 1) filename = args[0] resolved = store_utils.ParseFullPath(filename) assert (resolved is not None), ('Cannot determine bucket from %s' % filename) (bucket, path) = resolved store = ObjectStore.GetInstance(bucket) contents = (yield gen.Task(store_utils.GetFileContents, store, path)) print contents callback()
[ "@", "gen", ".", "engine", "def", "Cat", "(", "args", ",", "callback", ")", ":", "assert", "(", "len", "(", "args", ")", "==", "1", ")", "filename", "=", "args", "[", "0", "]", "resolved", "=", "store_utils", ".", "ParseFullPath", "(", "filename", ")", "assert", "(", "resolved", "is", "not", "None", ")", ",", "(", "'Cannot determine bucket from %s'", "%", "filename", ")", "(", "bucket", ",", "path", ")", "=", "resolved", "store", "=", "ObjectStore", ".", "GetInstance", "(", "bucket", ")", "contents", "=", "(", "yield", "gen", ".", "Task", "(", "store_utils", ".", "GetFileContents", ",", "store", ",", "path", ")", ")", "print", "contents", "callback", "(", ")" ]
cat a single file .
train
false
2,859
def _unsugar_count_from(**kw): count_from = kw.pop('count_from', None) if ((kw.get('ordering_func', None) is None) and (count_from is not None)): if (count_from == 0): kw['ordering_func'] = count_from_0 elif (count_from == 1): kw['ordering_func'] = count_from_1 else: kw['ordering_func'] = count_from_n_factory(count_from) return kw
[ "def", "_unsugar_count_from", "(", "**", "kw", ")", ":", "count_from", "=", "kw", ".", "pop", "(", "'count_from'", ",", "None", ")", "if", "(", "(", "kw", ".", "get", "(", "'ordering_func'", ",", "None", ")", "is", "None", ")", "and", "(", "count_from", "is", "not", "None", ")", ")", ":", "if", "(", "count_from", "==", "0", ")", ":", "kw", "[", "'ordering_func'", "]", "=", "count_from_0", "elif", "(", "count_from", "==", "1", ")", ":", "kw", "[", "'ordering_func'", "]", "=", "count_from_1", "else", ":", "kw", "[", "'ordering_func'", "]", "=", "count_from_n_factory", "(", "count_from", ")", "return", "kw" ]
builds counting functions from keyword arguments .
train
false
2,860
def assign_items(items, tracks): costs = [] for item in items: row = [] for (i, track) in enumerate(tracks): row.append(track_distance(item, track)) costs.append(row) matching = Munkres().compute(costs) mapping = dict(((items[i], tracks[j]) for (i, j) in matching)) extra_items = list((set(items) - set(mapping.keys()))) extra_items.sort(key=(lambda i: (i.disc, i.track, i.title))) extra_tracks = list((set(tracks) - set(mapping.values()))) extra_tracks.sort(key=(lambda t: (t.index, t.title))) return (mapping, extra_items, extra_tracks)
[ "def", "assign_items", "(", "items", ",", "tracks", ")", ":", "costs", "=", "[", "]", "for", "item", "in", "items", ":", "row", "=", "[", "]", "for", "(", "i", ",", "track", ")", "in", "enumerate", "(", "tracks", ")", ":", "row", ".", "append", "(", "track_distance", "(", "item", ",", "track", ")", ")", "costs", ".", "append", "(", "row", ")", "matching", "=", "Munkres", "(", ")", ".", "compute", "(", "costs", ")", "mapping", "=", "dict", "(", "(", "(", "items", "[", "i", "]", ",", "tracks", "[", "j", "]", ")", "for", "(", "i", ",", "j", ")", "in", "matching", ")", ")", "extra_items", "=", "list", "(", "(", "set", "(", "items", ")", "-", "set", "(", "mapping", ".", "keys", "(", ")", ")", ")", ")", "extra_items", ".", "sort", "(", "key", "=", "(", "lambda", "i", ":", "(", "i", ".", "disc", ",", "i", ".", "track", ",", "i", ".", "title", ")", ")", ")", "extra_tracks", "=", "list", "(", "(", "set", "(", "tracks", ")", "-", "set", "(", "mapping", ".", "values", "(", ")", ")", ")", ")", "extra_tracks", ".", "sort", "(", "key", "=", "(", "lambda", "t", ":", "(", "t", ".", "index", ",", "t", ".", "title", ")", ")", ")", "return", "(", "mapping", ",", "extra_items", ",", "extra_tracks", ")" ]
given a list of items and a list of trackinfo objects .
train
false
2,861
def layer_from_config(config, custom_objects=None): if custom_objects: get_custom_objects().update(custom_objects) class_name = config['class_name'] if (class_name == 'Sequential'): layer_class = Sequential elif (class_name in ['Model', 'Container']): layer_class = Model else: layer_class = get_from_module(class_name, globals(), 'layer', instantiate=False) arg_spec = inspect.getargspec(layer_class.from_config) if ('custom_objects' in arg_spec.args): return layer_class.from_config(config['config'], custom_objects=custom_objects) else: return layer_class.from_config(config['config'])
[ "def", "layer_from_config", "(", "config", ",", "custom_objects", "=", "None", ")", ":", "if", "custom_objects", ":", "get_custom_objects", "(", ")", ".", "update", "(", "custom_objects", ")", "class_name", "=", "config", "[", "'class_name'", "]", "if", "(", "class_name", "==", "'Sequential'", ")", ":", "layer_class", "=", "Sequential", "elif", "(", "class_name", "in", "[", "'Model'", ",", "'Container'", "]", ")", ":", "layer_class", "=", "Model", "else", ":", "layer_class", "=", "get_from_module", "(", "class_name", ",", "globals", "(", ")", ",", "'layer'", ",", "instantiate", "=", "False", ")", "arg_spec", "=", "inspect", ".", "getargspec", "(", "layer_class", ".", "from_config", ")", "if", "(", "'custom_objects'", "in", "arg_spec", ".", "args", ")", ":", "return", "layer_class", ".", "from_config", "(", "config", "[", "'config'", "]", ",", "custom_objects", "=", "custom_objects", ")", "else", ":", "return", "layer_class", ".", "from_config", "(", "config", "[", "'config'", "]", ")" ]
instantiate a layer from a config dictionary .
train
false
2,862
def CDL3STARSINSOUTH(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDL3STARSINSOUTH)
[ "def", "CDL3STARSINSOUTH", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDL3STARSINSOUTH", ")" ]
three stars in the south .
train
false
2,864
def setup_pipeline(conf, transformer_manager=None): default = extension.ExtensionManager('ceilometer.transformer') cfg_file = conf.pipeline_cfg_file return PipelineManager(conf, cfg_file, (transformer_manager or default), SAMPLE_TYPE)
[ "def", "setup_pipeline", "(", "conf", ",", "transformer_manager", "=", "None", ")", ":", "default", "=", "extension", ".", "ExtensionManager", "(", "'ceilometer.transformer'", ")", "cfg_file", "=", "conf", ".", "pipeline_cfg_file", "return", "PipelineManager", "(", "conf", ",", "cfg_file", ",", "(", "transformer_manager", "or", "default", ")", ",", "SAMPLE_TYPE", ")" ]
setup pipeline manager according to yaml config file .
train
false
2,865
def openExplorerPath(filename): if ((sys.platform == 'win32') or (sys.platform == 'cygwin')): subprocess.Popen(('explorer "%s"' % filename)) if (sys.platform == 'darwin'): subprocess.Popen(['open', filename]) if sys.platform.startswith('linux'): if os.path.isfile('/usr/bin/xdg-open'): subprocess.Popen(['/usr/bin/xdg-open', filename])
[ "def", "openExplorerPath", "(", "filename", ")", ":", "if", "(", "(", "sys", ".", "platform", "==", "'win32'", ")", "or", "(", "sys", ".", "platform", "==", "'cygwin'", ")", ")", ":", "subprocess", ".", "Popen", "(", "(", "'explorer \"%s\"'", "%", "filename", ")", ")", "if", "(", "sys", ".", "platform", "==", "'darwin'", ")", ":", "subprocess", ".", "Popen", "(", "[", "'open'", ",", "filename", "]", ")", "if", "sys", ".", "platform", ".", "startswith", "(", "'linux'", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "'/usr/bin/xdg-open'", ")", ":", "subprocess", ".", "Popen", "(", "[", "'/usr/bin/xdg-open'", ",", "filename", "]", ")" ]
open a file dialog inside a directory .
train
false
2,868
def get_store(path, **kwargs): return HDFStore(path, **kwargs)
[ "def", "get_store", "(", "path", ",", "**", "kwargs", ")", ":", "return", "HDFStore", "(", "path", ",", "**", "kwargs", ")" ]
load the piston oauth store .
train
false
2,869
def attach_translations(collections): attach_trans_dict(Collection, collections)
[ "def", "attach_translations", "(", "collections", ")", ":", "attach_trans_dict", "(", "Collection", ",", "collections", ")" ]
put all translations into a translations dict .
train
false
2,870
def format_iso8601(obj): return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
[ "def", "format_iso8601", "(", "obj", ")", ":", "return", "obj", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")" ]
format a datetime object for iso8601 .
train
false
2,871
def _local_tasks(): if (getattr(_local, 'tasks', None) is None): _local.tasks = set() return _local.tasks
[ "def", "_local_tasks", "(", ")", ":", "if", "(", "getattr", "(", "_local", ",", "'tasks'", ",", "None", ")", "is", "None", ")", ":", "_local", ".", "tasks", "=", "set", "(", ")", "return", "_local", ".", "tasks" ]
return the threadlocal set of indexing tasks .
train
false
2,873
def clear_html_element(text, preserve=None): if (not preserve): p = re.compile('<[^>]*>') return p.sub('', text) if isinstance(preserve, basestring): preserve = [preserve] p = MyHTMLParser.parse(text, preserve) return p
[ "def", "clear_html_element", "(", "text", ",", "preserve", "=", "None", ")", ":", "if", "(", "not", "preserve", ")", ":", "p", "=", "re", ".", "compile", "(", "'<[^>]*>'", ")", "return", "p", ".", "sub", "(", "''", ",", "text", ")", "if", "isinstance", "(", "preserve", ",", "basestring", ")", ":", "preserve", "=", "[", "preserve", "]", "p", "=", "MyHTMLParser", ".", "parse", "(", "text", ",", "preserve", ")", "return", "p" ]
clear the html element in text .
train
false
2,874
def hpauth(u, p): global hpuser, hppassword, mb_auth hpuser = u hppassword = p mb_auth = True
[ "def", "hpauth", "(", "u", ",", "p", ")", ":", "global", "hpuser", ",", "hppassword", ",", "mb_auth", "hpuser", "=", "u", "hppassword", "=", "p", "mb_auth", "=", "True" ]
set the username and password to be used in subsequent queries to the musicbrainz xml api that require authentication .
train
false
2,876
@nox.parametrize('sample', GAE_STANDARD_SAMPLES) def session_gae(session, sample): session.interpreter = 'python2.7' session.install(GCP_REPO_TOOLS_REQ) _setup_appengine_sdk(session) if (not os.path.isdir(os.path.join(sample, 'lib'))): os.mkdir(os.path.join(sample, 'lib')) _session_tests(session, sample)
[ "@", "nox", ".", "parametrize", "(", "'sample'", ",", "GAE_STANDARD_SAMPLES", ")", "def", "session_gae", "(", "session", ",", "sample", ")", ":", "session", ".", "interpreter", "=", "'python2.7'", "session", ".", "install", "(", "GCP_REPO_TOOLS_REQ", ")", "_setup_appengine_sdk", "(", "session", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "sample", ",", "'lib'", ")", ")", ")", ":", "os", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "sample", ",", "'lib'", ")", ")", "_session_tests", "(", "session", ",", "sample", ")" ]
runs py .
train
false
2,877
def handle_es_errors(template, status_code=503): def handler(fun): @wraps(fun) def _handler(request, *args, **kwargs): try: return fun(request, *args, **kwargs) except ES_EXCEPTIONS as exc: is_json = (request.GET.get('format') == 'json') callback = request.GET.get('callback', '').strip() content_type = ('application/x-javascript' if callback else 'application/json') if is_json: return HttpResponse(json.dumps({'error': _('Search Unavailable')}), content_type=content_type, status=status_code) if callable(template): actual_template = template(request, *args, **kwargs) else: actual_template = template log.exception(exc) return render(request, actual_template, status=503) return _handler return handler
[ "def", "handle_es_errors", "(", "template", ",", "status_code", "=", "503", ")", ":", "def", "handler", "(", "fun", ")", ":", "@", "wraps", "(", "fun", ")", "def", "_handler", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "fun", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "except", "ES_EXCEPTIONS", "as", "exc", ":", "is_json", "=", "(", "request", ".", "GET", ".", "get", "(", "'format'", ")", "==", "'json'", ")", "callback", "=", "request", ".", "GET", ".", "get", "(", "'callback'", ",", "''", ")", ".", "strip", "(", ")", "content_type", "=", "(", "'application/x-javascript'", "if", "callback", "else", "'application/json'", ")", "if", "is_json", ":", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'error'", ":", "_", "(", "'Search Unavailable'", ")", "}", ")", ",", "content_type", "=", "content_type", ",", "status", "=", "status_code", ")", "if", "callable", "(", "template", ")", ":", "actual_template", "=", "template", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "else", ":", "actual_template", "=", "template", "log", ".", "exception", "(", "exc", ")", "return", "render", "(", "request", ",", "actual_template", ",", "status", "=", "503", ")", "return", "_handler", "return", "handler" ]
handles elasticsearch exceptions for views wrap the entire view in this and dont worry about elasticsearch exceptions again! :arg template: template path string or function to generate the template path string for html requests :arg status_code: status code to return :returns: content-type-appropriate httpresponse .
train
false
2,878
def addHook(hook, func): if (not _hooks.get(hook, None)): _hooks[hook] = [] if (func not in _hooks[hook]): _hooks[hook].append(func)
[ "def", "addHook", "(", "hook", ",", "func", ")", ":", "if", "(", "not", "_hooks", ".", "get", "(", "hook", ",", "None", ")", ")", ":", "_hooks", "[", "hook", "]", "=", "[", "]", "if", "(", "func", "not", "in", "_hooks", "[", "hook", "]", ")", ":", "_hooks", "[", "hook", "]", ".", "append", "(", "func", ")" ]
add a function to hook .
train
false
2,879
def match_column_identifier(column_names, c, column_offset=1): if (isinstance(c, six.string_types) and (not c.isdigit()) and (c in column_names)): return column_names.index(c) else: try: c = (int(c) - column_offset) except: raise ColumnIdentifierError(("Column '%s' is invalid. It is neither an integer nor a column name. Column names are: %s" % (c, repr(column_names)[1:(-1)]))) if (c < 0): raise ColumnIdentifierError('Column 0 is invalid. Columns are 1-based.') if (c >= len(column_names)): raise ColumnIdentifierError(("Column %i is invalid. The last column is '%s' at index %i." % (c, column_names[(-1)], (len(column_names) - 1)))) return c
[ "def", "match_column_identifier", "(", "column_names", ",", "c", ",", "column_offset", "=", "1", ")", ":", "if", "(", "isinstance", "(", "c", ",", "six", ".", "string_types", ")", "and", "(", "not", "c", ".", "isdigit", "(", ")", ")", "and", "(", "c", "in", "column_names", ")", ")", ":", "return", "column_names", ".", "index", "(", "c", ")", "else", ":", "try", ":", "c", "=", "(", "int", "(", "c", ")", "-", "column_offset", ")", "except", ":", "raise", "ColumnIdentifierError", "(", "(", "\"Column '%s' is invalid. It is neither an integer nor a column name. Column names are: %s\"", "%", "(", "c", ",", "repr", "(", "column_names", ")", "[", "1", ":", "(", "-", "1", ")", "]", ")", ")", ")", "if", "(", "c", "<", "0", ")", ":", "raise", "ColumnIdentifierError", "(", "'Column 0 is invalid. Columns are 1-based.'", ")", "if", "(", "c", ">=", "len", "(", "column_names", ")", ")", ":", "raise", "ColumnIdentifierError", "(", "(", "\"Column %i is invalid. The last column is '%s' at index %i.\"", "%", "(", "c", ",", "column_names", "[", "(", "-", "1", ")", "]", ",", "(", "len", "(", "column_names", ")", "-", "1", ")", ")", ")", ")", "return", "c" ]
determine what column a single column id matches in a series of column names .
train
false
2,880
def test_terrain_import_exception(): string = 'Lettuce has tried to load the conventional environment module "terrain"\nbut it has errors, check its contents and try to run lettuce again.\n\nOriginal traceback below:\n\n' mox = Mox() mox.StubOutWithMock(lettuce.fs, 'FileSystem') mox.StubOutWithMock(lettuce.exceptions, 'traceback') mox.StubOutWithMock(lettuce.sys, 'stderr') exc = Exception('foo bar') lettuce.fs.FileSystem._import('terrain').AndRaise(exc) lettuce.exceptions.traceback.format_exc(exc).AndReturn('I AM THE TRACEBACK FOR IMPORT ERROR') lettuce.sys.stderr.write(string) lettuce.sys.stderr.write('I AM THE TRACEBACK FOR IMPORT ERROR') mox.ReplayAll() try: reload(lettuce) except LettuceRunnerError: mox.VerifyAll() finally: mox.UnsetStubs()
[ "def", "test_terrain_import_exception", "(", ")", ":", "string", "=", "'Lettuce has tried to load the conventional environment module \"terrain\"\\nbut it has errors, check its contents and try to run lettuce again.\\n\\nOriginal traceback below:\\n\\n'", "mox", "=", "Mox", "(", ")", "mox", ".", "StubOutWithMock", "(", "lettuce", ".", "fs", ",", "'FileSystem'", ")", "mox", ".", "StubOutWithMock", "(", "lettuce", ".", "exceptions", ",", "'traceback'", ")", "mox", ".", "StubOutWithMock", "(", "lettuce", ".", "sys", ",", "'stderr'", ")", "exc", "=", "Exception", "(", "'foo bar'", ")", "lettuce", ".", "fs", ".", "FileSystem", ".", "_import", "(", "'terrain'", ")", ".", "AndRaise", "(", "exc", ")", "lettuce", ".", "exceptions", ".", "traceback", ".", "format_exc", "(", "exc", ")", ".", "AndReturn", "(", "'I AM THE TRACEBACK FOR IMPORT ERROR'", ")", "lettuce", ".", "sys", ".", "stderr", ".", "write", "(", "string", ")", "lettuce", ".", "sys", ".", "stderr", ".", "write", "(", "'I AM THE TRACEBACK FOR IMPORT ERROR'", ")", "mox", ".", "ReplayAll", "(", ")", "try", ":", "reload", "(", "lettuce", ")", "except", "LettuceRunnerError", ":", "mox", ".", "VerifyAll", "(", ")", "finally", ":", "mox", ".", "UnsetStubs", "(", ")" ]
lettuce error tries to import .
train
false
2,881
def assemble_distance_matrix(dm_components): data = {} for c in dm_components: col_ids = [] for line in c: fields = line.strip().split() if fields: if (not col_ids): col_ids = fields else: sid = fields[0] data[sid] = dict(zip(col_ids, fields[1:])) labels = data.keys() dm = [] for l1 in labels: dm.append([float(data[l1][l2]) for l2 in labels]) dm = format_distance_matrix(labels, dm) return dm
[ "def", "assemble_distance_matrix", "(", "dm_components", ")", ":", "data", "=", "{", "}", "for", "c", "in", "dm_components", ":", "col_ids", "=", "[", "]", "for", "line", "in", "c", ":", "fields", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "if", "fields", ":", "if", "(", "not", "col_ids", ")", ":", "col_ids", "=", "fields", "else", ":", "sid", "=", "fields", "[", "0", "]", "data", "[", "sid", "]", "=", "dict", "(", "zip", "(", "col_ids", ",", "fields", "[", "1", ":", "]", ")", ")", "labels", "=", "data", ".", "keys", "(", ")", "dm", "=", "[", "]", "for", "l1", "in", "labels", ":", "dm", ".", "append", "(", "[", "float", "(", "data", "[", "l1", "]", "[", "l2", "]", ")", "for", "l2", "in", "labels", "]", ")", "dm", "=", "format_distance_matrix", "(", "labels", ",", "dm", ")", "return", "dm" ]
assemble distance matrix components into a complete dm string .
train
false
2,882
def xmlFile(path, mode='r'): fh = file(path, mode) while (fh.read(1) != '<'): pass fh.seek((-1), 1) return fh
[ "def", "xmlFile", "(", "path", ",", "mode", "=", "'r'", ")", ":", "fh", "=", "file", "(", "path", ",", "mode", ")", "while", "(", "fh", ".", "read", "(", "1", ")", "!=", "'<'", ")", ":", "pass", "fh", ".", "seek", "(", "(", "-", "1", ")", ",", "1", ")", "return", "fh" ]
lxml cannot parse xml files starting with a bom in case such xml file is used .
train
true
2,883
def CreateIndex(index): resp = api_base_pb.Integer64Proto() resp = _Call('CreateIndex', index, resp) return resp.value()
[ "def", "CreateIndex", "(", "index", ")", ":", "resp", "=", "api_base_pb", ".", "Integer64Proto", "(", ")", "resp", "=", "_Call", "(", "'CreateIndex'", ",", "index", ",", "resp", ")", "return", "resp", ".", "value", "(", ")" ]
creates a new composite index in the datastore for this app .
train
false
2,886
def apply_path_wildcard(stats, path_wildcard): paths = Paths(tuple((Path(normpath(join(path_wildcard.symbolic_path, basename(s.path))), s) for s in stats.dependencies if fnmatch(basename(s.path), path_wildcard.wildcard)))) return PathsExpansion(paths, tuple())
[ "def", "apply_path_wildcard", "(", "stats", ",", "path_wildcard", ")", ":", "paths", "=", "Paths", "(", "tuple", "(", "(", "Path", "(", "normpath", "(", "join", "(", "path_wildcard", ".", "symbolic_path", ",", "basename", "(", "s", ".", "path", ")", ")", ")", ",", "s", ")", "for", "s", "in", "stats", ".", "dependencies", "if", "fnmatch", "(", "basename", "(", "s", ".", "path", ")", ",", "path_wildcard", ".", "wildcard", ")", ")", ")", ")", "return", "PathsExpansion", "(", "paths", ",", "tuple", "(", ")", ")" ]
filter the given directorylisting object using the given pathwildcard .
train
false
2,887
def _lookup_identifier_method(): if (not hasattr(settings, u'HAYSTACK_IDENTIFIER_METHOD')): return default_get_identifier (module_path, method_name) = settings.HAYSTACK_IDENTIFIER_METHOD.rsplit(u'.', 1) try: module = importlib.import_module(module_path) except ImportError: raise ImportError((u"Unable to import module '%s' provided for HAYSTACK_IDENTIFIER_METHOD." % module_path)) identifier_method = getattr(module, method_name, None) if (not identifier_method): raise AttributeError((u"Provided method '%s' for HAYSTACK_IDENTIFIER_METHOD does not exist in '%s'." % (method_name, module_path))) return identifier_method
[ "def", "_lookup_identifier_method", "(", ")", ":", "if", "(", "not", "hasattr", "(", "settings", ",", "u'HAYSTACK_IDENTIFIER_METHOD'", ")", ")", ":", "return", "default_get_identifier", "(", "module_path", ",", "method_name", ")", "=", "settings", ".", "HAYSTACK_IDENTIFIER_METHOD", ".", "rsplit", "(", "u'.'", ",", "1", ")", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "module_path", ")", "except", "ImportError", ":", "raise", "ImportError", "(", "(", "u\"Unable to import module '%s' provided for HAYSTACK_IDENTIFIER_METHOD.\"", "%", "module_path", ")", ")", "identifier_method", "=", "getattr", "(", "module", ",", "method_name", ",", "None", ")", "if", "(", "not", "identifier_method", ")", ":", "raise", "AttributeError", "(", "(", "u\"Provided method '%s' for HAYSTACK_IDENTIFIER_METHOD does not exist in '%s'.\"", "%", "(", "method_name", ",", "module_path", ")", ")", ")", "return", "identifier_method" ]
if the user has set haystack_identifier_method .
train
false
2,888
def install_scipy(): chdir(SRC_DIR) apt_command('build-dep python-scipy') if glob.glob('*scipy*.deb'): run_command('dpkg -i *scipy*.deb') return apt_command('source python-scipy') chdir('python-scipy*') sitecfg = open('site.cfg', 'w') sitecfg.write(NUMPY_SCIPY_SITE_CFG) sitecfg.close() _fix_atlas_rules() def _deb_failure_callback(retval): if (not glob.glob('../*numpy*.deb')): return False return True run_command('dpkg-buildpackage -rfakeroot -b', failure_callback=_deb_failure_callback) run_command('dpkg -i ../*scipy*.deb')
[ "def", "install_scipy", "(", ")", ":", "chdir", "(", "SRC_DIR", ")", "apt_command", "(", "'build-dep python-scipy'", ")", "if", "glob", ".", "glob", "(", "'*scipy*.deb'", ")", ":", "run_command", "(", "'dpkg -i *scipy*.deb'", ")", "return", "apt_command", "(", "'source python-scipy'", ")", "chdir", "(", "'python-scipy*'", ")", "sitecfg", "=", "open", "(", "'site.cfg'", ",", "'w'", ")", "sitecfg", ".", "write", "(", "NUMPY_SCIPY_SITE_CFG", ")", "sitecfg", ".", "close", "(", ")", "_fix_atlas_rules", "(", ")", "def", "_deb_failure_callback", "(", "retval", ")", ":", "if", "(", "not", "glob", ".", "glob", "(", "'../*numpy*.deb'", ")", ")", ":", "return", "False", "return", "True", "run_command", "(", "'dpkg-buildpackage -rfakeroot -b'", ",", "failure_callback", "=", "_deb_failure_callback", ")", "run_command", "(", "'dpkg -i ../*scipy*.deb'", ")" ]
docstring for install_scipy .
train
false
2,889
def generate_close_and_callback(iterable, callback, environ): try: for item in iterable: (yield item) except GeneratorExit: if hasattr(iterable, 'close'): iterable.close() raise finally: callback(environ)
[ "def", "generate_close_and_callback", "(", "iterable", ",", "callback", ",", "environ", ")", ":", "try", ":", "for", "item", "in", "iterable", ":", "(", "yield", "item", ")", "except", "GeneratorExit", ":", "if", "hasattr", "(", "iterable", ",", "'close'", ")", ":", "iterable", ".", "close", "(", ")", "raise", "finally", ":", "callback", "(", "environ", ")" ]
return a generator that passes through items from iterable then calls callback .
train
false
2,890
def _is_namespace_visible(context, namespace): if context.is_admin: return True if (namespace.get('visibility', '') == 'public'): return True if (namespace['owner'] is None): return True if (context.owner is not None): if (context.owner == namespace['owner']): return True return False
[ "def", "_is_namespace_visible", "(", "context", ",", "namespace", ")", ":", "if", "context", ".", "is_admin", ":", "return", "True", "if", "(", "namespace", ".", "get", "(", "'visibility'", ",", "''", ")", "==", "'public'", ")", ":", "return", "True", "if", "(", "namespace", "[", "'owner'", "]", "is", "None", ")", ":", "return", "True", "if", "(", "context", ".", "owner", "is", "not", "None", ")", ":", "if", "(", "context", ".", "owner", "==", "namespace", "[", "'owner'", "]", ")", ":", "return", "True", "return", "False" ]
return true if namespace is visible in this context .
train
false
2,891
def update_old_plan(old_plan): plan = [] for line in old_plan: if line.startswith(u'#'): continue if (u' ' not in line): raise ArgumentError((u"The instruction '%s' takes at least one argument" % line)) (instruction, arg) = line.split(u' ', 1) plan.append((instruction, arg)) return plan
[ "def", "update_old_plan", "(", "old_plan", ")", ":", "plan", "=", "[", "]", "for", "line", "in", "old_plan", ":", "if", "line", ".", "startswith", "(", "u'#'", ")", ":", "continue", "if", "(", "u' '", "not", "in", "line", ")", ":", "raise", "ArgumentError", "(", "(", "u\"The instruction '%s' takes at least one argument\"", "%", "line", ")", ")", "(", "instruction", ",", "arg", ")", "=", "line", ".", "split", "(", "u' '", ",", "1", ")", "plan", ".", "append", "(", "(", "instruction", ",", "arg", ")", ")", "return", "plan" ]
update an old plan object to work with conda .
train
false
2,892
def set_color(img, coords, color, alpha=1): (rr, cc) = coords if (img.ndim == 2): img = img[..., np.newaxis] color = np.array(color, ndmin=1, copy=False) if (img.shape[(-1)] != color.shape[(-1)]): raise ValueError('Color shape ({}) must match last image dimension ({}).'.format(color.shape[0], img.shape[(-1)])) if np.isscalar(alpha): alpha = (np.ones_like(rr) * alpha) (rr, cc, alpha) = _coords_inside_image(rr, cc, img.shape, val=alpha) alpha = alpha[..., np.newaxis] color = (color * alpha) vals = (img[(rr, cc)] * (1 - alpha)) img[(rr, cc)] = (vals + color)
[ "def", "set_color", "(", "img", ",", "coords", ",", "color", ",", "alpha", "=", "1", ")", ":", "(", "rr", ",", "cc", ")", "=", "coords", "if", "(", "img", ".", "ndim", "==", "2", ")", ":", "img", "=", "img", "[", "...", ",", "np", ".", "newaxis", "]", "color", "=", "np", ".", "array", "(", "color", ",", "ndmin", "=", "1", ",", "copy", "=", "False", ")", "if", "(", "img", ".", "shape", "[", "(", "-", "1", ")", "]", "!=", "color", ".", "shape", "[", "(", "-", "1", ")", "]", ")", ":", "raise", "ValueError", "(", "'Color shape ({}) must match last image dimension ({}).'", ".", "format", "(", "color", ".", "shape", "[", "0", "]", ",", "img", ".", "shape", "[", "(", "-", "1", ")", "]", ")", ")", "if", "np", ".", "isscalar", "(", "alpha", ")", ":", "alpha", "=", "(", "np", ".", "ones_like", "(", "rr", ")", "*", "alpha", ")", "(", "rr", ",", "cc", ",", "alpha", ")", "=", "_coords_inside_image", "(", "rr", ",", "cc", ",", "img", ".", "shape", ",", "val", "=", "alpha", ")", "alpha", "=", "alpha", "[", "...", ",", "np", ".", "newaxis", "]", "color", "=", "(", "color", "*", "alpha", ")", "vals", "=", "(", "img", "[", "(", "rr", ",", "cc", ")", "]", "*", "(", "1", "-", "alpha", ")", ")", "img", "[", "(", "rr", ",", "cc", ")", "]", "=", "(", "vals", "+", "color", ")" ]
set pixel color in the image at the given coordinates .
train
false
2,894
def _WasGypIncludeFileModified(params, files): if params['options'].includes: for include in params['options'].includes: if (_ToGypPath(os.path.normpath(include)) in files): print 'Include file modified, assuming all changed', include return True return False
[ "def", "_WasGypIncludeFileModified", "(", "params", ",", "files", ")", ":", "if", "params", "[", "'options'", "]", ".", "includes", ":", "for", "include", "in", "params", "[", "'options'", "]", ".", "includes", ":", "if", "(", "_ToGypPath", "(", "os", ".", "path", ".", "normpath", "(", "include", ")", ")", "in", "files", ")", ":", "print", "'Include file modified, assuming all changed'", ",", "include", "return", "True", "return", "False" ]
returns true if one of the files in |files| is in the set of included files .
train
false
2,895
def generate_unit_summary(namespace): docstring = io.StringIO() docstring.write(u'\n.. list-table:: Available Units\n :header-rows: 1\n :widths: 10 20 20 20 1\n\n * - Unit\n - Description\n - Represents\n - Aliases\n - SI Prefixes\n') for unit_summary in _iter_unit_summary(namespace): docstring.write(u'\n * - ``{0}``\n - {1}\n - {2}\n - {3}\n - {4!s:.1}\n'.format(*unit_summary)) return docstring.getvalue()
[ "def", "generate_unit_summary", "(", "namespace", ")", ":", "docstring", "=", "io", ".", "StringIO", "(", ")", "docstring", ".", "write", "(", "u'\\n.. list-table:: Available Units\\n :header-rows: 1\\n :widths: 10 20 20 20 1\\n\\n * - Unit\\n - Description\\n - Represents\\n - Aliases\\n - SI Prefixes\\n'", ")", "for", "unit_summary", "in", "_iter_unit_summary", "(", "namespace", ")", ":", "docstring", ".", "write", "(", "u'\\n * - ``{0}``\\n - {1}\\n - {2}\\n - {3}\\n - {4!s:.1}\\n'", ".", "format", "(", "*", "unit_summary", ")", ")", "return", "docstring", ".", "getvalue", "(", ")" ]
generates a summary of units from a given namespace .
train
false
2,897
def _suggest_donation_if_appropriate(config, action): if (config.staging or (config.verb == 'renew')): return if (action not in ['renew', 'newcert']): return reporter_util = zope.component.getUtility(interfaces.IReporter) msg = "If you like Certbot, please consider supporting our work by:\n\nDonating to ISRG / Let's Encrypt: https://letsencrypt.org/donate\nDonating to EFF: https://eff.org/donate-le\n\n" reporter_util.add_message(msg, reporter_util.LOW_PRIORITY)
[ "def", "_suggest_donation_if_appropriate", "(", "config", ",", "action", ")", ":", "if", "(", "config", ".", "staging", "or", "(", "config", ".", "verb", "==", "'renew'", ")", ")", ":", "return", "if", "(", "action", "not", "in", "[", "'renew'", ",", "'newcert'", "]", ")", ":", "return", "reporter_util", "=", "zope", ".", "component", ".", "getUtility", "(", "interfaces", ".", "IReporter", ")", "msg", "=", "\"If you like Certbot, please consider supporting our work by:\\n\\nDonating to ISRG / Let's Encrypt: https://letsencrypt.org/donate\\nDonating to EFF: https://eff.org/donate-le\\n\\n\"", "reporter_util", ".", "add_message", "(", "msg", ",", "reporter_util", ".", "LOW_PRIORITY", ")" ]
potentially suggest a donation to support certbot .
train
false
2,899
def ExpandPathAttribute(src, attrib): path = src.get(attrib) if (not path): return [src] path = encoder.MaybeNarrowPath(path) pathlist = glob.glob(path) if (not pathlist): return [src] if (type(src) != types.DictionaryType): tmp = {} for key in src.keys(): tmp[key] = src[key] src = tmp retval = [] for path in pathlist: dst = src.copy() dst[attrib] = path retval.append(dst) return retval
[ "def", "ExpandPathAttribute", "(", "src", ",", "attrib", ")", ":", "path", "=", "src", ".", "get", "(", "attrib", ")", "if", "(", "not", "path", ")", ":", "return", "[", "src", "]", "path", "=", "encoder", ".", "MaybeNarrowPath", "(", "path", ")", "pathlist", "=", "glob", ".", "glob", "(", "path", ")", "if", "(", "not", "pathlist", ")", ":", "return", "[", "src", "]", "if", "(", "type", "(", "src", ")", "!=", "types", ".", "DictionaryType", ")", ":", "tmp", "=", "{", "}", "for", "key", "in", "src", ".", "keys", "(", ")", ":", "tmp", "[", "key", "]", "=", "src", "[", "key", "]", "src", "=", "tmp", "retval", "=", "[", "]", "for", "path", "in", "pathlist", ":", "dst", "=", "src", ".", "copy", "(", ")", "dst", "[", "attrib", "]", "=", "path", "retval", ".", "append", "(", "dst", ")", "return", "retval" ]
given a dictionary of attributes .
train
false
2,900
def scrape_video(youtube_id, format='mp4', force=False, quiet=False, callback=None): video_filename = ('%(id)s.%(ext)s' % {'id': youtube_id, 'ext': format}) video_file_download_path = os.path.join(settings.CONTENT_ROOT, video_filename) if (os.path.exists(video_file_download_path) and (not force)): return yt_dl = youtube_dl.YoutubeDL({'outtmpl': video_file_download_path, 'quiet': quiet}) yt_dl.add_default_info_extractors() if callback: yt_dl.add_progress_hook(callback) yt_dl.extract_info(('www.youtube.com/watch?v=%s' % youtube_id), download=True)
[ "def", "scrape_video", "(", "youtube_id", ",", "format", "=", "'mp4'", ",", "force", "=", "False", ",", "quiet", "=", "False", ",", "callback", "=", "None", ")", ":", "video_filename", "=", "(", "'%(id)s.%(ext)s'", "%", "{", "'id'", ":", "youtube_id", ",", "'ext'", ":", "format", "}", ")", "video_file_download_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "CONTENT_ROOT", ",", "video_filename", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "video_file_download_path", ")", "and", "(", "not", "force", ")", ")", ":", "return", "yt_dl", "=", "youtube_dl", ".", "YoutubeDL", "(", "{", "'outtmpl'", ":", "video_file_download_path", ",", "'quiet'", ":", "quiet", "}", ")", "yt_dl", ".", "add_default_info_extractors", "(", ")", "if", "callback", ":", "yt_dl", ".", "add_progress_hook", "(", "callback", ")", "yt_dl", ".", "extract_info", "(", "(", "'www.youtube.com/watch?v=%s'", "%", "youtube_id", ")", ",", "download", "=", "True", ")" ]
assumes its in the path; if not .
train
false
2,901
def variance(iterable, sample=False): a = (iterable if isinstance(iterable, list) else list(iterable)) m = mean(a) return (sum((((x - m) ** 2) for x in a)) / ((len(a) - int(sample)) or 1))
[ "def", "variance", "(", "iterable", ",", "sample", "=", "False", ")", ":", "a", "=", "(", "iterable", "if", "isinstance", "(", "iterable", ",", "list", ")", "else", "list", "(", "iterable", ")", ")", "m", "=", "mean", "(", "a", ")", "return", "(", "sum", "(", "(", "(", "(", "x", "-", "m", ")", "**", "2", ")", "for", "x", "in", "a", ")", ")", "/", "(", "(", "len", "(", "a", ")", "-", "int", "(", "sample", ")", ")", "or", "1", ")", ")" ]
returns the variance of the given list of values .
train
false
2,902
def check_password_expired(user): if (not settings.ACCOUNT_PASSWORD_USE_HISTORY): return False if hasattr(user, u'password_expiry'): expiry = user.password_expiry.expiry else: expiry = settings.ACCOUNT_PASSWORD_EXPIRY if (expiry == 0): return False try: latest = user.password_history.latest(u'timestamp') except PasswordHistory.DoesNotExist: return False now = datetime.datetime.now(tz=pytz.UTC) expiration = (latest.timestamp + datetime.timedelta(seconds=expiry)) if (expiration < now): return True else: return False
[ "def", "check_password_expired", "(", "user", ")", ":", "if", "(", "not", "settings", ".", "ACCOUNT_PASSWORD_USE_HISTORY", ")", ":", "return", "False", "if", "hasattr", "(", "user", ",", "u'password_expiry'", ")", ":", "expiry", "=", "user", ".", "password_expiry", ".", "expiry", "else", ":", "expiry", "=", "settings", ".", "ACCOUNT_PASSWORD_EXPIRY", "if", "(", "expiry", "==", "0", ")", ":", "return", "False", "try", ":", "latest", "=", "user", ".", "password_history", ".", "latest", "(", "u'timestamp'", ")", "except", "PasswordHistory", ".", "DoesNotExist", ":", "return", "False", "now", "=", "datetime", ".", "datetime", ".", "now", "(", "tz", "=", "pytz", ".", "UTC", ")", "expiration", "=", "(", "latest", ".", "timestamp", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "expiry", ")", ")", "if", "(", "expiration", "<", "now", ")", ":", "return", "True", "else", ":", "return", "False" ]
return true if password is expired and system is using password expiration .
train
true
2,903
def get_files(storage, ignore_patterns=[], location=''): (directories, files) = storage.listdir(location) for fn in files: if is_ignored(fn, ignore_patterns): continue if location: fn = os.path.join(location, fn) (yield fn) for dir in directories: if is_ignored(dir, ignore_patterns): continue if location: dir = os.path.join(location, dir) for fn in get_files(storage, ignore_patterns, dir): (yield fn)
[ "def", "get_files", "(", "storage", ",", "ignore_patterns", "=", "[", "]", ",", "location", "=", "''", ")", ":", "(", "directories", ",", "files", ")", "=", "storage", ".", "listdir", "(", "location", ")", "for", "fn", "in", "files", ":", "if", "is_ignored", "(", "fn", ",", "ignore_patterns", ")", ":", "continue", "if", "location", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "location", ",", "fn", ")", "(", "yield", "fn", ")", "for", "dir", "in", "directories", ":", "if", "is_ignored", "(", "dir", ",", "ignore_patterns", ")", ":", "continue", "if", "location", ":", "dir", "=", "os", ".", "path", ".", "join", "(", "location", ",", "dir", ")", "for", "fn", "in", "get_files", "(", "storage", ",", "ignore_patterns", ",", "dir", ")", ":", "(", "yield", "fn", ")" ]
return the list of files that need approval .
train
false
2,904
def root_dict_to_etree(d): assert (len(d) == 1), ('Incoming dict len must be exactly 1. Data: %r' % d) (key,) = d.keys() retval = etree.Element(key) for val in d.values(): break if (val is None): return retval if (isinstance(val, dict) or isinstance(val, odict)): dict_to_etree(val, retval) elif ((not isinstance(val, collections.Sized)) or isinstance(val, six.string_types)): retval.text = str(val) else: for a in val: dict_to_etree(a, retval) return retval
[ "def", "root_dict_to_etree", "(", "d", ")", ":", "assert", "(", "len", "(", "d", ")", "==", "1", ")", ",", "(", "'Incoming dict len must be exactly 1. Data: %r'", "%", "d", ")", "(", "key", ",", ")", "=", "d", ".", "keys", "(", ")", "retval", "=", "etree", ".", "Element", "(", "key", ")", "for", "val", "in", "d", ".", "values", "(", ")", ":", "break", "if", "(", "val", "is", "None", ")", ":", "return", "retval", "if", "(", "isinstance", "(", "val", ",", "dict", ")", "or", "isinstance", "(", "val", ",", "odict", ")", ")", ":", "dict_to_etree", "(", "val", ",", "retval", ")", "elif", "(", "(", "not", "isinstance", "(", "val", ",", "collections", ".", "Sized", ")", ")", "or", "isinstance", "(", "val", ",", "six", ".", "string_types", ")", ")", ":", "retval", ".", "text", "=", "str", "(", "val", ")", "else", ":", "for", "a", "in", "val", ":", "dict_to_etree", "(", "a", ",", "retval", ")", "return", "retval" ]
converts a dictionary to an xml hiearchy .
train
false
2,905
def parse_number(string): return get_i18n().parse_number(string)
[ "def", "parse_number", "(", "string", ")", ":", "return", "get_i18n", "(", ")", ".", "parse_number", "(", "string", ")" ]
see :meth:i18n .
train
false
2,906
def isdir(path): try: st = os.stat(path) except os.error: return False return stat.S_ISDIR(st.st_mode)
[ "def", "isdir", "(", "path", ")", ":", "try", ":", "st", "=", "os", ".", "stat", "(", "path", ")", "except", "os", ".", "error", ":", "return", "False", "return", "stat", ".", "S_ISDIR", "(", "st", ".", "st_mode", ")" ]
return true if the pathname refers to an existing directory .
train
true
2,907
def render_inclusion(func, file_name, takes_context, django_context, *args, **kwargs): if takes_context: args = ([django_context] + list(args)) _dict = func(*args, **kwargs) if isinstance(file_name, Template): t = file_name elif ((not isinstance(file_name, basestring)) and is_iterable(file_name)): t = select_template(file_name) else: t = get_template(file_name) nodelist = t.nodelist new_context = Context(_dict) csrf_token = django_context.get('csrf_token', None) if (csrf_token is not None): new_context['csrf_token'] = csrf_token return nodelist.render(new_context)
[ "def", "render_inclusion", "(", "func", ",", "file_name", ",", "takes_context", ",", "django_context", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "takes_context", ":", "args", "=", "(", "[", "django_context", "]", "+", "list", "(", "args", ")", ")", "_dict", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "if", "isinstance", "(", "file_name", ",", "Template", ")", ":", "t", "=", "file_name", "elif", "(", "(", "not", "isinstance", "(", "file_name", ",", "basestring", ")", ")", "and", "is_iterable", "(", "file_name", ")", ")", ":", "t", "=", "select_template", "(", "file_name", ")", "else", ":", "t", "=", "get_template", "(", "file_name", ")", "nodelist", "=", "t", ".", "nodelist", "new_context", "=", "Context", "(", "_dict", ")", "csrf_token", "=", "django_context", ".", "get", "(", "'csrf_token'", ",", "None", ")", "if", "(", "csrf_token", "is", "not", "None", ")", ":", "new_context", "[", "'csrf_token'", "]", "=", "csrf_token", "return", "nodelist", ".", "render", "(", "new_context", ")" ]
this allows a mako template to call a template tag function that is an "inclusion tag" .
train
false
2,908
def get_track_id_from_json(item): fields = ['contentDetails/videoId', 'snippet/resourceId/videoId', 'id/videoId', 'id'] for field in fields: node = item for p in field.split('/'): if (node and isinstance(node, dict)): node = node.get(p) if node: return node return ''
[ "def", "get_track_id_from_json", "(", "item", ")", ":", "fields", "=", "[", "'contentDetails/videoId'", ",", "'snippet/resourceId/videoId'", ",", "'id/videoId'", ",", "'id'", "]", "for", "field", "in", "fields", ":", "node", "=", "item", "for", "p", "in", "field", ".", "split", "(", "'/'", ")", ":", "if", "(", "node", "and", "isinstance", "(", "node", ",", "dict", ")", ")", ":", "node", "=", "node", ".", "get", "(", "p", ")", "if", "node", ":", "return", "node", "return", "''" ]
try to extract video id from various response types .
train
false
2,909
@register.filter(is_safe=True) def stringformat(value, arg): try: return ((u'%' + six.text_type(arg)) % value) except (ValueError, TypeError): return u''
[ "@", "register", ".", "filter", "(", "is_safe", "=", "True", ")", "def", "stringformat", "(", "value", ",", "arg", ")", ":", "try", ":", "return", "(", "(", "u'%'", "+", "six", ".", "text_type", "(", "arg", ")", ")", "%", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "u''" ]
formats the variable according to the arg .
train
false
2,910
def dup_add_term(f, c, i, K): if (not c): return f n = len(f) m = ((n - i) - 1) if (i == (n - 1)): return dup_strip(([(f[0] + c)] + f[1:])) elif (i >= n): return (([c] + ([K.zero] * (i - n))) + f) else: return ((f[:m] + [(f[m] + c)]) + f[(m + 1):])
[ "def", "dup_add_term", "(", "f", ",", "c", ",", "i", ",", "K", ")", ":", "if", "(", "not", "c", ")", ":", "return", "f", "n", "=", "len", "(", "f", ")", "m", "=", "(", "(", "n", "-", "i", ")", "-", "1", ")", "if", "(", "i", "==", "(", "n", "-", "1", ")", ")", ":", "return", "dup_strip", "(", "(", "[", "(", "f", "[", "0", "]", "+", "c", ")", "]", "+", "f", "[", "1", ":", "]", ")", ")", "elif", "(", "i", ">=", "n", ")", ":", "return", "(", "(", "[", "c", "]", "+", "(", "[", "K", ".", "zero", "]", "*", "(", "i", "-", "n", ")", ")", ")", "+", "f", ")", "else", ":", "return", "(", "(", "f", "[", ":", "m", "]", "+", "[", "(", "f", "[", "m", "]", "+", "c", ")", "]", ")", "+", "f", "[", "(", "m", "+", "1", ")", ":", "]", ")" ]
add c*x**i to f in k[x] .
train
false
2,912
def click_element(browser, css_selector, timeout=10, frequency=1.0, header_height=155): move_to_element(browser, css_selector, header_height) browser.execute_script(('document.querySelector("%s").focus()' % css_selector.replace('"', '\\"'))) wait_until_condition(browser, condition=(lambda x: EC.element_to_be_clickable((By.CSS_SELECTOR, css_selector))(browser.driver)), timeout=timeout, frequency=frequency) browser.find_by_css(css_selector).click()
[ "def", "click_element", "(", "browser", ",", "css_selector", ",", "timeout", "=", "10", ",", "frequency", "=", "1.0", ",", "header_height", "=", "155", ")", ":", "move_to_element", "(", "browser", ",", "css_selector", ",", "header_height", ")", "browser", ".", "execute_script", "(", "(", "'document.querySelector(\"%s\").focus()'", "%", "css_selector", ".", "replace", "(", "'\"'", ",", "'\\\\\"'", ")", ")", ")", "wait_until_condition", "(", "browser", ",", "condition", "=", "(", "lambda", "x", ":", "EC", ".", "element_to_be_clickable", "(", "(", "By", ".", "CSS_SELECTOR", ",", "css_selector", ")", ")", "(", "browser", ".", "driver", ")", ")", ",", "timeout", "=", "timeout", ",", "frequency", "=", "frequency", ")", "browser", ".", "find_by_css", "(", "css_selector", ")", ".", "click", "(", ")" ]
click a browser dom element .
train
false
2,913
def is_macosx_sdk_path(path): return ((path.startswith('/usr/') and (not path.startswith('/usr/local'))) or path.startswith('/System/') or path.startswith('/Library/'))
[ "def", "is_macosx_sdk_path", "(", "path", ")", ":", "return", "(", "(", "path", ".", "startswith", "(", "'/usr/'", ")", "and", "(", "not", "path", ".", "startswith", "(", "'/usr/local'", ")", ")", ")", "or", "path", ".", "startswith", "(", "'/System/'", ")", "or", "path", ".", "startswith", "(", "'/Library/'", ")", ")" ]
returns true if path can be located in an osx sdk .
train
false
2,915
def get_systemd_os_like(filepath='/etc/os-release'): return _get_systemd_os_release_var('ID_LIKE', filepath).split(' ')
[ "def", "get_systemd_os_like", "(", "filepath", "=", "'/etc/os-release'", ")", ":", "return", "_get_systemd_os_release_var", "(", "'ID_LIKE'", ",", "filepath", ")", ".", "split", "(", "' '", ")" ]
get a list of strings that indicate the distribution likeness to other distributions .
train
false
2,916
def GetFeedMapping(client, feed, placeholder_type): feed_mapping_service = client.GetService('FeedMappingService', 'v201607') attribute_mappings = {} more_pages = True selector = {'fields': ['FeedMappingId', 'AttributeFieldMappings'], 'predicates': [{'field': 'FeedId', 'operator': 'EQUALS', 'values': [feed['id']]}, {'field': 'PlaceholderType', 'operator': 'EQUALS', 'values': [placeholder_type]}], 'paging': {'startIndex': 0, 'numberResults': PAGE_SIZE}} while more_pages: page = feed_mapping_service.get(selector) if ('entries' in page): for feed_mapping in page['entries']: for attribute_mapping in feed_mapping['attributeFieldMappings']: if (attribute_mapping['feedAttributeId'] in attribute_mappings): attribute_mappings[attribute_mapping['feedAttributeId']].append(attribute_mapping['fieldId']) else: attribute_mappings[attribute_mapping['feedAttributeId']] = [attribute_mapping['fieldId']] selector['paging']['startIndex'] += PAGE_SIZE more_pages = (selector['paging']['startIndex'] < int(page['totalNumEntries'])) return attribute_mappings
[ "def", "GetFeedMapping", "(", "client", ",", "feed", ",", "placeholder_type", ")", ":", "feed_mapping_service", "=", "client", ".", "GetService", "(", "'FeedMappingService'", ",", "'v201607'", ")", "attribute_mappings", "=", "{", "}", "more_pages", "=", "True", "selector", "=", "{", "'fields'", ":", "[", "'FeedMappingId'", ",", "'AttributeFieldMappings'", "]", ",", "'predicates'", ":", "[", "{", "'field'", ":", "'FeedId'", ",", "'operator'", ":", "'EQUALS'", ",", "'values'", ":", "[", "feed", "[", "'id'", "]", "]", "}", ",", "{", "'field'", ":", "'PlaceholderType'", ",", "'operator'", ":", "'EQUALS'", ",", "'values'", ":", "[", "placeholder_type", "]", "}", "]", ",", "'paging'", ":", "{", "'startIndex'", ":", "0", ",", "'numberResults'", ":", "PAGE_SIZE", "}", "}", "while", "more_pages", ":", "page", "=", "feed_mapping_service", ".", "get", "(", "selector", ")", "if", "(", "'entries'", "in", "page", ")", ":", "for", "feed_mapping", "in", "page", "[", "'entries'", "]", ":", "for", "attribute_mapping", "in", "feed_mapping", "[", "'attributeFieldMappings'", "]", ":", "if", "(", "attribute_mapping", "[", "'feedAttributeId'", "]", "in", "attribute_mappings", ")", ":", "attribute_mappings", "[", "attribute_mapping", "[", "'feedAttributeId'", "]", "]", ".", "append", "(", "attribute_mapping", "[", "'fieldId'", "]", ")", "else", ":", "attribute_mappings", "[", "attribute_mapping", "[", "'feedAttributeId'", "]", "]", "=", "[", "attribute_mapping", "[", "'fieldId'", "]", "]", "selector", "[", "'paging'", "]", "[", "'startIndex'", "]", "+=", "PAGE_SIZE", "more_pages", "=", "(", "selector", "[", "'paging'", "]", "[", "'startIndex'", "]", "<", "int", "(", "page", "[", "'totalNumEntries'", "]", ")", ")", "return", "attribute_mappings" ]
gets the feed mapping for a given feed .
train
true
2,917
def _get_collection_memcache_key(collection_id, version=None): if version: return ('collection-version:%s:%s' % (collection_id, version)) else: return ('collection:%s' % collection_id)
[ "def", "_get_collection_memcache_key", "(", "collection_id", ",", "version", "=", "None", ")", ":", "if", "version", ":", "return", "(", "'collection-version:%s:%s'", "%", "(", "collection_id", ",", "version", ")", ")", "else", ":", "return", "(", "'collection:%s'", "%", "collection_id", ")" ]
returns a memcache key for the collection .
train
false
2,918
def maybe_download_and_extract(filename, working_directory, url_source, extract=False, expected_bytes=None): def _download(filename, working_directory, url_source): def _dlProgress(count, blockSize, totalSize): if (totalSize != 0): percent = ((float((count * blockSize)) / float(totalSize)) * 100.0) sys.stdout.write((('\rDownloading ' + filename) + ('...%d%%' % percent))) sys.stdout.flush() if (sys.version_info[0] == 2): from urllib import urlretrieve else: from urllib.request import urlretrieve filepath = os.path.join(working_directory, filename) urlretrieve((url_source + filename), filepath, reporthook=_dlProgress) exists_or_mkdir(working_directory, verbose=False) filepath = os.path.join(working_directory, filename) if (not os.path.exists(filepath)): _download(filename, working_directory, url_source) print () statinfo = os.stat(filepath) print ('Succesfully downloaded', filename, statinfo.st_size, 'bytes.') if ((not (expected_bytes is None)) and (expected_bytes != statinfo.st_size)): raise Exception((('Failed to verify ' + filename) + '. Can you get to it with a browser?')) if extract: if tarfile.is_tarfile(filepath): print 'Trying to extract tar file' tarfile.open(filepath, 'r').extractall(working_directory) print '... Success!' elif zipfile.is_zipfile(filepath): print 'Trying to extract zip file' with zipfile.ZipFile(filepath) as zf: zf.extractall(working_directory) print '... Success!' else: print 'Unknown compression_format only .tar.gz/.tar.bz2/.tar and .zip supported' return filepath
[ "def", "maybe_download_and_extract", "(", "filename", ",", "working_directory", ",", "url_source", ",", "extract", "=", "False", ",", "expected_bytes", "=", "None", ")", ":", "def", "_download", "(", "filename", ",", "working_directory", ",", "url_source", ")", ":", "def", "_dlProgress", "(", "count", ",", "blockSize", ",", "totalSize", ")", ":", "if", "(", "totalSize", "!=", "0", ")", ":", "percent", "=", "(", "(", "float", "(", "(", "count", "*", "blockSize", ")", ")", "/", "float", "(", "totalSize", ")", ")", "*", "100.0", ")", "sys", ".", "stdout", ".", "write", "(", "(", "(", "'\\rDownloading '", "+", "filename", ")", "+", "(", "'...%d%%'", "%", "percent", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "if", "(", "sys", ".", "version_info", "[", "0", "]", "==", "2", ")", ":", "from", "urllib", "import", "urlretrieve", "else", ":", "from", "urllib", ".", "request", "import", "urlretrieve", "filepath", "=", "os", ".", "path", ".", "join", "(", "working_directory", ",", "filename", ")", "urlretrieve", "(", "(", "url_source", "+", "filename", ")", ",", "filepath", ",", "reporthook", "=", "_dlProgress", ")", "exists_or_mkdir", "(", "working_directory", ",", "verbose", "=", "False", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "working_directory", ",", "filename", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "filepath", ")", ")", ":", "_download", "(", "filename", ",", "working_directory", ",", "url_source", ")", "print", "(", ")", "statinfo", "=", "os", ".", "stat", "(", "filepath", ")", "print", "(", "'Succesfully downloaded'", ",", "filename", ",", "statinfo", ".", "st_size", ",", "'bytes.'", ")", "if", "(", "(", "not", "(", "expected_bytes", "is", "None", ")", ")", "and", "(", "expected_bytes", "!=", "statinfo", ".", "st_size", ")", ")", ":", "raise", "Exception", "(", "(", "(", "'Failed to verify '", "+", "filename", ")", "+", "'. Can you get to it with a browser?'", ")", ")", "if", "extract", ":", "if", "tarfile", ".", "is_tarfile", "(", "filepath", ")", ":", "print", "'Trying to extract tar file'", "tarfile", ".", "open", "(", "filepath", ",", "'r'", ")", ".", "extractall", "(", "working_directory", ")", "print", "'... Success!'", "elif", "zipfile", ".", "is_zipfile", "(", "filepath", ")", ":", "print", "'Trying to extract zip file'", "with", "zipfile", ".", "ZipFile", "(", "filepath", ")", "as", "zf", ":", "zf", ".", "extractall", "(", "working_directory", ")", "print", "'... Success!'", "else", ":", "print", "'Unknown compression_format only .tar.gz/.tar.bz2/.tar and .zip supported'", "return", "filepath" ]
download and extract model tar file .
train
false
2,920
def select2_submodule_check(app_configs, **kwargs): errors = [] dal_select2_path = os.path.dirname(__file__) select2 = os.path.join(os.path.abspath(dal_select2_path), 'static/autocomplete_light/vendor/select2/dist/js/select2.min.js') if (not os.path.exists(select2)): errors.append(checks.Error('Select2 static files not checked out', hint='Run git submodule update --init in DAL ({})'.format(os.path.dirname(dal_select2_path)), id='dal_select2.E001')) return errors
[ "def", "select2_submodule_check", "(", "app_configs", ",", "**", "kwargs", ")", ":", "errors", "=", "[", "]", "dal_select2_path", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "select2", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "abspath", "(", "dal_select2_path", ")", ",", "'static/autocomplete_light/vendor/select2/dist/js/select2.min.js'", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "select2", ")", ")", ":", "errors", ".", "append", "(", "checks", ".", "Error", "(", "'Select2 static files not checked out'", ",", "hint", "=", "'Run git submodule update --init in DAL ({})'", ".", "format", "(", "os", ".", "path", ".", "dirname", "(", "dal_select2_path", ")", ")", ",", "id", "=", "'dal_select2.E001'", ")", ")", "return", "errors" ]
return an error if select2 is missing .
train
false
2,921
def parse_debug_value(value): if (value is None): return value value = value.lower() if (value in ('true', '1')): return True elif (value in ('false', '0')): return False elif (value in ('merge',)): return 'merge' else: raise ValueError()
[ "def", "parse_debug_value", "(", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "value", "value", "=", "value", ".", "lower", "(", ")", "if", "(", "value", "in", "(", "'true'", ",", "'1'", ")", ")", ":", "return", "True", "elif", "(", "value", "in", "(", "'false'", ",", "'0'", ")", ")", ":", "return", "False", "elif", "(", "value", "in", "(", "'merge'", ",", ")", ")", ":", "return", "'merge'", "else", ":", "raise", "ValueError", "(", ")" ]
resolve the given string value to a debug option .
train
false
2,923
def make_dense(targets, noclass): with tf.device('/cpu:0'): shape = tf.shape(targets) batch_size = shape[0] indices = (targets + (noclass * tf.range(0, batch_size))) length = tf.expand_dims((batch_size * noclass), 0) dense = tf.sparse_to_dense(indices, length, 1.0, 0.0) return tf.reshape(dense, [(-1), noclass])
[ "def", "make_dense", "(", "targets", ",", "noclass", ")", ":", "with", "tf", ".", "device", "(", "'/cpu:0'", ")", ":", "shape", "=", "tf", ".", "shape", "(", "targets", ")", "batch_size", "=", "shape", "[", "0", "]", "indices", "=", "(", "targets", "+", "(", "noclass", "*", "tf", ".", "range", "(", "0", ",", "batch_size", ")", ")", ")", "length", "=", "tf", ".", "expand_dims", "(", "(", "batch_size", "*", "noclass", ")", ",", "0", ")", "dense", "=", "tf", ".", "sparse_to_dense", "(", "indices", ",", "length", ",", "1.0", ",", "0.0", ")", "return", "tf", ".", "reshape", "(", "dense", ",", "[", "(", "-", "1", ")", ",", "noclass", "]", ")" ]
move a batch of targets to a dense 1-hot representation .
train
false
2,924
@handle_response_format @treeio_login_required def service_view(request, service_id, response_format='html'): service = get_object_or_404(Service, pk=service_id) if ((not request.user.profile.has_permission(service)) and (not request.user.profile.is_admin('treeio_services'))): return user_denied(request, message="You don't have access to this Service") context = _get_default_context(request) context.update({'service': service}) return render_to_response('services/service_view', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "service_view", "(", "request", ",", "service_id", ",", "response_format", "=", "'html'", ")", ":", "service", "=", "get_object_or_404", "(", "Service", ",", "pk", "=", "service_id", ")", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "service", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio_services'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Service\"", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'service'", ":", "service", "}", ")", "return", "render_to_response", "(", "'services/service_view'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
service view .
train
false
2,925
def decode64(encoded_content, encoding='utf-8'): return b64decode(encoded_content.encode(encoding)).decode(encoding)
[ "def", "decode64", "(", "encoded_content", ",", "encoding", "=", "'utf-8'", ")", ":", "return", "b64decode", "(", "encoded_content", ".", "encode", "(", "encoding", ")", ")", ".", "decode", "(", "encoding", ")" ]
decode some base64 encoded content .
train
false
2,926
def make_socket(port=4050): sockobj = socket(AF_INET, SOCK_STREAM) sockobj.connect(('localhost', port)) return sockobj
[ "def", "make_socket", "(", "port", "=", "4050", ")", ":", "sockobj", "=", "socket", "(", "AF_INET", ",", "SOCK_STREAM", ")", "sockobj", ".", "connect", "(", "(", "'localhost'", ",", "port", ")", ")", "return", "sockobj" ]
create a socket on localhost and return it .
train
false
2,927
def detect_unboundedness(R, s, t): q = deque([s]) seen = set([s]) inf = R.graph['inf'] while q: u = q.popleft() for (v, attr) in R[u].items(): if ((attr['capacity'] == inf) and (v not in seen)): if (v == t): raise nx.NetworkXUnbounded('Infinite capacity path, flow unbounded above.') seen.add(v) q.append(v)
[ "def", "detect_unboundedness", "(", "R", ",", "s", ",", "t", ")", ":", "q", "=", "deque", "(", "[", "s", "]", ")", "seen", "=", "set", "(", "[", "s", "]", ")", "inf", "=", "R", ".", "graph", "[", "'inf'", "]", "while", "q", ":", "u", "=", "q", ".", "popleft", "(", ")", "for", "(", "v", ",", "attr", ")", "in", "R", "[", "u", "]", ".", "items", "(", ")", ":", "if", "(", "(", "attr", "[", "'capacity'", "]", "==", "inf", ")", "and", "(", "v", "not", "in", "seen", ")", ")", ":", "if", "(", "v", "==", "t", ")", ":", "raise", "nx", ".", "NetworkXUnbounded", "(", "'Infinite capacity path, flow unbounded above.'", ")", "seen", ".", "add", "(", "v", ")", "q", ".", "append", "(", "v", ")" ]
detect an infinite-capacity s-t path in r .
train
false
2,928
def _AddSlots(message_descriptor, dictionary): dictionary['__slots__'] = ['_cached_byte_size', '_cached_byte_size_dirty', '_fields', '_unknown_fields', '_is_present_in_parent', '_listener', '_listener_for_children', '__weakref__']
[ "def", "_AddSlots", "(", "message_descriptor", ",", "dictionary", ")", ":", "dictionary", "[", "'__slots__'", "]", "=", "[", "'_cached_byte_size'", ",", "'_cached_byte_size_dirty'", ",", "'_fields'", ",", "'_unknown_fields'", ",", "'_is_present_in_parent'", ",", "'_listener'", ",", "'_listener_for_children'", ",", "'__weakref__'", "]" ]
adds a __slots__ entry to dictionary .
train
false
2,930
def getSphericalByRadians(azimuthRadians, elevationRadians, radius=1.0): elevationComplex = euclidean.getWiddershinsUnitPolar(elevationRadians) azimuthComplex = (euclidean.getWiddershinsUnitPolar(azimuthRadians) * elevationComplex.real) return (Vector3(azimuthComplex.real, azimuthComplex.imag, elevationComplex.imag) * radius)
[ "def", "getSphericalByRadians", "(", "azimuthRadians", ",", "elevationRadians", ",", "radius", "=", "1.0", ")", ":", "elevationComplex", "=", "euclidean", ".", "getWiddershinsUnitPolar", "(", "elevationRadians", ")", "azimuthComplex", "=", "(", "euclidean", ".", "getWiddershinsUnitPolar", "(", "azimuthRadians", ")", "*", "elevationComplex", ".", "real", ")", "return", "(", "Vector3", "(", "azimuthComplex", ".", "real", ",", "azimuthComplex", ".", "imag", ",", "elevationComplex", ".", "imag", ")", "*", "radius", ")" ]
get the spherical vector3 unit by radians .
train
false
2,931
def dtype_to_char(dtype): dtype_char = {'complex64': 'c', 'complex128': 'z', 'float32': 'f', 'float64': 'd', 'int8': 'b', 'int16': 'w', 'int32': 'i', 'int64': 'l'} if (dtype in dtype_char): return dtype_char[dtype] else: return 'X'
[ "def", "dtype_to_char", "(", "dtype", ")", ":", "dtype_char", "=", "{", "'complex64'", ":", "'c'", ",", "'complex128'", ":", "'z'", ",", "'float32'", ":", "'f'", ",", "'float64'", ":", "'d'", ",", "'int8'", ":", "'b'", ",", "'int16'", ":", "'w'", ",", "'int32'", ":", "'i'", ",", "'int64'", ":", "'l'", "}", "if", "(", "dtype", "in", "dtype_char", ")", ":", "return", "dtype_char", "[", "dtype", "]", "else", ":", "return", "'X'" ]
return character that represents data type .
train
false
2,932
def get_company_contact(user): contact = get_person_contact(user) if (not contact): return None return contact.company_memberships.filter(is_active=True).first()
[ "def", "get_company_contact", "(", "user", ")", ":", "contact", "=", "get_person_contact", "(", "user", ")", "if", "(", "not", "contact", ")", ":", "return", "None", "return", "contact", ".", "company_memberships", ".", "filter", "(", "is_active", "=", "True", ")", ".", "first", "(", ")" ]
get preferred companycontact of given user .
train
false
2,933
def minversion(module, version, inclusive=True, version_path=u'__version__'): if isinstance(module, types.ModuleType): module_name = module.__name__ elif isinstance(module, six.string_types): module_name = module try: module = resolve_name(module_name) except ImportError: return False else: raise ValueError(u'module argument must be an actual imported module, or the import name of the module; got {0!r}'.format(module)) if (u'.' not in version_path): have_version = getattr(module, version_path) else: have_version = resolve_name(module.__name__, version_path) try: from pkg_resources import parse_version except ImportError: from distutils.version import LooseVersion as parse_version if inclusive: return (parse_version(have_version) >= parse_version(version)) else: return (parse_version(have_version) > parse_version(version))
[ "def", "minversion", "(", "module", ",", "version", ",", "inclusive", "=", "True", ",", "version_path", "=", "u'__version__'", ")", ":", "if", "isinstance", "(", "module", ",", "types", ".", "ModuleType", ")", ":", "module_name", "=", "module", ".", "__name__", "elif", "isinstance", "(", "module", ",", "six", ".", "string_types", ")", ":", "module_name", "=", "module", "try", ":", "module", "=", "resolve_name", "(", "module_name", ")", "except", "ImportError", ":", "return", "False", "else", ":", "raise", "ValueError", "(", "u'module argument must be an actual imported module, or the import name of the module; got {0!r}'", ".", "format", "(", "module", ")", ")", "if", "(", "u'.'", "not", "in", "version_path", ")", ":", "have_version", "=", "getattr", "(", "module", ",", "version_path", ")", "else", ":", "have_version", "=", "resolve_name", "(", "module", ".", "__name__", ",", "version_path", ")", "try", ":", "from", "pkg_resources", "import", "parse_version", "except", "ImportError", ":", "from", "distutils", ".", "version", "import", "LooseVersion", "as", "parse_version", "if", "inclusive", ":", "return", "(", "parse_version", "(", "have_version", ")", ">=", "parse_version", "(", "version", ")", ")", "else", ":", "return", "(", "parse_version", "(", "have_version", ")", ">", "parse_version", "(", "version", ")", ")" ]
returns true if the specified python module satisfies a minimum version requirement .
train
false
2,934
def TRIMA(ds, count, timeperiod=(- (2 ** 31))): return call_talib_with_ds(ds, count, talib.TRIMA, timeperiod)
[ "def", "TRIMA", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "TRIMA", ",", "timeperiod", ")" ]
triangular moving average .
train
false
2,935
def js_helper(prefix, *args): return '\n'.join([javascript_link(url_for(('/%s%s.js?v=%s' % (prefix, name, server_starttime)))) for name in args])
[ "def", "js_helper", "(", "prefix", ",", "*", "args", ")", ":", "return", "'\\n'", ".", "join", "(", "[", "javascript_link", "(", "url_for", "(", "(", "'/%s%s.js?v=%s'", "%", "(", "prefix", ",", "name", ",", "server_starttime", ")", ")", ")", ")", "for", "name", "in", "args", "]", ")" ]
take a prefix and list of javascript names and return appropriate string of script tags .
train
false
2,936
def only_for(roles): if local.flags.in_test: return if (not isinstance(roles, (tuple, list))): roles = (roles,) roles = set(roles) myroles = set(get_roles()) if (not roles.intersection(myroles)): raise PermissionError
[ "def", "only_for", "(", "roles", ")", ":", "if", "local", ".", "flags", ".", "in_test", ":", "return", "if", "(", "not", "isinstance", "(", "roles", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "roles", "=", "(", "roles", ",", ")", "roles", "=", "set", "(", "roles", ")", "myroles", "=", "set", "(", "get_roles", "(", ")", ")", "if", "(", "not", "roles", ".", "intersection", "(", "myroles", ")", ")", ":", "raise", "PermissionError" ]
raise frappe .
train
false
2,938
def filemode(mode): perm = [] for table in _filemode_table: for (bit, char) in table: if ((mode & bit) == bit): perm.append(char) break else: perm.append('-') return ''.join(perm)
[ "def", "filemode", "(", "mode", ")", ":", "perm", "=", "[", "]", "for", "table", "in", "_filemode_table", ":", "for", "(", "bit", ",", "char", ")", "in", "table", ":", "if", "(", "(", "mode", "&", "bit", ")", "==", "bit", ")", ":", "perm", ".", "append", "(", "char", ")", "break", "else", ":", "perm", ".", "append", "(", "'-'", ")", "return", "''", ".", "join", "(", "perm", ")" ]
convert a files mode to a string of the form -rwxrwxrwx .
train
true
2,939
def volume_glance_metadata_list_get(context, volume_id_list): return IMPL.volume_glance_metadata_list_get(context, volume_id_list)
[ "def", "volume_glance_metadata_list_get", "(", "context", ",", "volume_id_list", ")", ":", "return", "IMPL", ".", "volume_glance_metadata_list_get", "(", "context", ",", "volume_id_list", ")" ]
return the glance metadata for a volume list .
train
false
2,940
def test_mixeddiv(): i = iscalar() d = dscalar() assert (0 == function([i, d], (d * (i // (i + 1))))(3, 1.0))
[ "def", "test_mixeddiv", "(", ")", ":", "i", "=", "iscalar", "(", ")", "d", "=", "dscalar", "(", ")", "assert", "(", "0", "==", "function", "(", "[", "i", ",", "d", "]", ",", "(", "d", "*", "(", "i", "//", "(", "i", "+", "1", ")", ")", ")", ")", "(", "3", ",", "1.0", ")", ")" ]
test that int division is preserved .
train
false
2,941
def _encode_params(**kw): args = [] for (k, v) in kw.iteritems(): if isinstance(v, basestring): qv = (v.encode('utf-8') if isinstance(v, unicode) else v) args.append(('%s=%s' % (k, urllib.quote(qv)))) elif isinstance(v, collections.Iterable): for i in v: qv = (i.encode('utf-8') if isinstance(i, unicode) else str(i)) args.append(('%s=%s' % (k, urllib.quote(qv)))) else: qv = str(v) args.append(('%s=%s' % (k, urllib.quote(qv)))) return '&'.join(args)
[ "def", "_encode_params", "(", "**", "kw", ")", ":", "args", "=", "[", "]", "for", "(", "k", ",", "v", ")", "in", "kw", ".", "iteritems", "(", ")", ":", "if", "isinstance", "(", "v", ",", "basestring", ")", ":", "qv", "=", "(", "v", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "v", ",", "unicode", ")", "else", "v", ")", "args", ".", "append", "(", "(", "'%s=%s'", "%", "(", "k", ",", "urllib", ".", "quote", "(", "qv", ")", ")", ")", ")", "elif", "isinstance", "(", "v", ",", "collections", ".", "Iterable", ")", ":", "for", "i", "in", "v", ":", "qv", "=", "(", "i", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "i", ",", "unicode", ")", "else", "str", "(", "i", ")", ")", "args", ".", "append", "(", "(", "'%s=%s'", "%", "(", "k", ",", "urllib", ".", "quote", "(", "qv", ")", ")", ")", ")", "else", ":", "qv", "=", "str", "(", "v", ")", "args", ".", "append", "(", "(", "'%s=%s'", "%", "(", "k", ",", "urllib", ".", "quote", "(", "qv", ")", ")", ")", ")", "return", "'&'", ".", "join", "(", "args", ")" ]
do url-encode parameters .
train
true
2,942
def subplot(*args, **kwargs): if (len(args) == 0): args = (1, 1, 1) if ((len(args) >= 3) and isinstance(args[2], bool)): warnings.warn(u'The subplot index argument to subplot() appears to be a boolean. Did you intend to use subplots()?') fig = gcf() a = fig.add_subplot(*args, **kwargs) bbox = a.bbox byebye = [] for other in fig.axes: if (other == a): continue if bbox.fully_overlaps(other.bbox): byebye.append(other) for ax in byebye: delaxes(ax) return a
[ "def", "subplot", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "len", "(", "args", ")", "==", "0", ")", ":", "args", "=", "(", "1", ",", "1", ",", "1", ")", "if", "(", "(", "len", "(", "args", ")", ">=", "3", ")", "and", "isinstance", "(", "args", "[", "2", "]", ",", "bool", ")", ")", ":", "warnings", ".", "warn", "(", "u'The subplot index argument to subplot() appears to be a boolean. Did you intend to use subplots()?'", ")", "fig", "=", "gcf", "(", ")", "a", "=", "fig", ".", "add_subplot", "(", "*", "args", ",", "**", "kwargs", ")", "bbox", "=", "a", ".", "bbox", "byebye", "=", "[", "]", "for", "other", "in", "fig", ".", "axes", ":", "if", "(", "other", "==", "a", ")", ":", "continue", "if", "bbox", ".", "fully_overlaps", "(", "other", ".", "bbox", ")", ":", "byebye", ".", "append", "(", "other", ")", "for", "ax", "in", "byebye", ":", "delaxes", "(", "ax", ")", "return", "a" ]
create a subplot command .
train
false
2,943
def get_repo_url(force_github=False): sourcepath = os.path.realpath(os.path.join(os.path.dirname(nipype.__file__), os.path.pardir)) gitpathgit = os.path.join(sourcepath, u'.git') if ((not os.path.exists(gitpathgit)) and (not force_github)): uri = (u'file://%s' % sourcepath) else: uri = u'http://github.com/nipy/nipype/blob/master' return uri
[ "def", "get_repo_url", "(", "force_github", "=", "False", ")", ":", "sourcepath", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "nipype", ".", "__file__", ")", ",", "os", ".", "path", ".", "pardir", ")", ")", "gitpathgit", "=", "os", ".", "path", ".", "join", "(", "sourcepath", ",", "u'.git'", ")", "if", "(", "(", "not", "os", ".", "path", ".", "exists", "(", "gitpathgit", ")", ")", "and", "(", "not", "force_github", ")", ")", ":", "uri", "=", "(", "u'file://%s'", "%", "sourcepath", ")", "else", ":", "uri", "=", "u'http://github.com/nipy/nipype/blob/master'", "return", "uri" ]
returns github url or local url returns uri: str filesystem path or github repo url .
train
false
2,944
def get_hex_from_color(color): return ('#' + ''.join(['{0:02x}'.format(int((x * 255))) for x in color]))
[ "def", "get_hex_from_color", "(", "color", ")", ":", "return", "(", "'#'", "+", "''", ".", "join", "(", "[", "'{0:02x}'", ".", "format", "(", "int", "(", "(", "x", "*", "255", ")", ")", ")", "for", "x", "in", "color", "]", ")", ")" ]
transform a kivy :class:~kivy .
train
false
2,945
def has_table(table_name, con, flavor=None, schema=None): pandas_sql = pandasSQL_builder(con, flavor=flavor, schema=schema) return pandas_sql.has_table(table_name)
[ "def", "has_table", "(", "table_name", ",", "con", ",", "flavor", "=", "None", ",", "schema", "=", "None", ")", ":", "pandas_sql", "=", "pandasSQL_builder", "(", "con", ",", "flavor", "=", "flavor", ",", "schema", "=", "schema", ")", "return", "pandas_sql", ".", "has_table", "(", "table_name", ")" ]
check if database has named table .
train
true
2,947
def course_slug(course_key, mode): digest = hashlib.sha256(u'{}{}'.format(unicode(course_key), unicode(mode))).hexdigest()[:7] base_slug = slugify((unicode(course_key) + u'_{}_'.format(mode)))[:248] return (base_slug + digest)
[ "def", "course_slug", "(", "course_key", ",", "mode", ")", ":", "digest", "=", "hashlib", ".", "sha256", "(", "u'{}{}'", ".", "format", "(", "unicode", "(", "course_key", ")", ",", "unicode", "(", "mode", ")", ")", ")", ".", "hexdigest", "(", ")", "[", ":", "7", "]", "base_slug", "=", "slugify", "(", "(", "unicode", "(", "course_key", ")", "+", "u'_{}_'", ".", "format", "(", "mode", ")", ")", ")", "[", ":", "248", "]", "return", "(", "base_slug", "+", "digest", ")" ]
legacy: not to be used as a model for constructing badge slugs .
train
false
2,949
def make_safe_f(f, allowed_params): def inner(*args, **kwargs): if kwargs: new_kwargs = {} for (k, v) in kwargs.items(): if (k in allowed_params): new_kwargs[k] = v return f(*args, **new_kwargs) return f(*args, **kwargs) return inner
[ "def", "make_safe_f", "(", "f", ",", "allowed_params", ")", ":", "def", "inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "kwargs", ":", "new_kwargs", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "items", "(", ")", ":", "if", "(", "k", "in", "allowed_params", ")", ":", "new_kwargs", "[", "k", "]", "=", "v", "return", "f", "(", "*", "args", ",", "**", "new_kwargs", ")", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "inner" ]
make version of f that ignores extra named params .
train
false
2,950
def libvlc_media_player_get_full_title_descriptions(p_mi, titles): f = (_Cfunctions.get('libvlc_media_player_get_full_title_descriptions', None) or _Cfunction('libvlc_media_player_get_full_title_descriptions', ((1,), (1,)), None, ctypes.c_int, MediaPlayer, ctypes.POINTER(ctypes.POINTER(TitleDescription)))) return f(p_mi, titles)
[ "def", "libvlc_media_player_get_full_title_descriptions", "(", "p_mi", ",", "titles", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_player_get_full_title_descriptions'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_player_get_full_title_descriptions'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaPlayer", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "POINTER", "(", "TitleDescription", ")", ")", ")", ")", "return", "f", "(", "p_mi", ",", "titles", ")" ]
get the full description of available titles .
train
false
2,951
def test_date(): date_chart = DateLine(truncate_label=1000) date_chart.add('dates', [(date(2013, 1, 2), 300), (date(2013, 1, 12), 412), (date(2013, 2, 2), 823), (date(2013, 2, 22), 672)]) q = date_chart.render_pyquery() assert (list(map((lambda t: t.split(' ')[0]), q('.axis.x text').map(texts))) == ['2013-01-12', '2013-01-24', '2013-02-04', '2013-02-16'])
[ "def", "test_date", "(", ")", ":", "date_chart", "=", "DateLine", "(", "truncate_label", "=", "1000", ")", "date_chart", ".", "add", "(", "'dates'", ",", "[", "(", "date", "(", "2013", ",", "1", ",", "2", ")", ",", "300", ")", ",", "(", "date", "(", "2013", ",", "1", ",", "12", ")", ",", "412", ")", ",", "(", "date", "(", "2013", ",", "2", ",", "2", ")", ",", "823", ")", ",", "(", "date", "(", "2013", ",", "2", ",", "22", ")", ",", "672", ")", "]", ")", "q", "=", "date_chart", ".", "render_pyquery", "(", ")", "assert", "(", "list", "(", "map", "(", "(", "lambda", "t", ":", "t", ".", "split", "(", "' '", ")", "[", "0", "]", ")", ",", "q", "(", "'.axis.x text'", ")", ".", "map", "(", "texts", ")", ")", ")", "==", "[", "'2013-01-12'", ",", "'2013-01-24'", ",", "'2013-02-04'", ",", "'2013-02-16'", "]", ")" ]
test a simple dateline .
train
false
2,952
def apply_wrapper(wrapper, func): warnings.warn('The function `apply_wrapper` is deprecated since IPython 4.0', DeprecationWarning, stacklevel=2) import nose.tools return decorator(wrapper, nose.tools.make_decorator(func)(wrapper))
[ "def", "apply_wrapper", "(", "wrapper", ",", "func", ")", ":", "warnings", ".", "warn", "(", "'The function `apply_wrapper` is deprecated since IPython 4.0'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "import", "nose", ".", "tools", "return", "decorator", "(", "wrapper", ",", "nose", ".", "tools", ".", "make_decorator", "(", "func", ")", "(", "wrapper", ")", ")" ]
apply a wrapper to a function for decoration .
train
false
2,953
def generate_examples_and_gallery(example_dir, rst_dir, cfg): if (not example_dir.exists): print ('No example directory found at', example_dir) return rst_dir.makedirs() with open(rst_dir.pjoin(('index' + cfg.source_suffix_str)), 'w') as gallery_index: write_gallery(gallery_index, example_dir, rst_dir, cfg) for d in sorted(example_dir.listdir()): example_sub = example_dir.pjoin(d) if example_sub.isdir: rst_sub = rst_dir.pjoin(d) rst_sub.makedirs() write_gallery(gallery_index, example_sub, rst_sub, cfg, depth=1) gallery_index.flush()
[ "def", "generate_examples_and_gallery", "(", "example_dir", ",", "rst_dir", ",", "cfg", ")", ":", "if", "(", "not", "example_dir", ".", "exists", ")", ":", "print", "(", "'No example directory found at'", ",", "example_dir", ")", "return", "rst_dir", ".", "makedirs", "(", ")", "with", "open", "(", "rst_dir", ".", "pjoin", "(", "(", "'index'", "+", "cfg", ".", "source_suffix_str", ")", ")", ",", "'w'", ")", "as", "gallery_index", ":", "write_gallery", "(", "gallery_index", ",", "example_dir", ",", "rst_dir", ",", "cfg", ")", "for", "d", "in", "sorted", "(", "example_dir", ".", "listdir", "(", ")", ")", ":", "example_sub", "=", "example_dir", ".", "pjoin", "(", "d", ")", "if", "example_sub", ".", "isdir", ":", "rst_sub", "=", "rst_dir", ".", "pjoin", "(", "d", ")", "rst_sub", ".", "makedirs", "(", ")", "write_gallery", "(", "gallery_index", ",", "example_sub", ",", "rst_sub", ",", "cfg", ",", "depth", "=", "1", ")", "gallery_index", ".", "flush", "(", ")" ]
generate rst from examples and create gallery to showcase examples .
train
false
2,954
def create_script(typeclass=None, key=None, obj=None, player=None, locks=None, interval=None, start_delay=None, repeats=None, persistent=None, autostart=True, report_to=None, desc=None): global _ScriptDB if (not _ScriptDB): from evennia.scripts.models import ScriptDB as _ScriptDB typeclass = (typeclass if typeclass else settings.BASE_SCRIPT_TYPECLASS) if isinstance(typeclass, basestring): typeclass = class_from_module(typeclass, settings.TYPECLASS_PATHS) kwarg = {} if key: kwarg['db_key'] = key if player: kwarg['db_player'] = dbid_to_obj(player, _ScriptDB) if obj: kwarg['db_obj'] = dbid_to_obj(obj, _ScriptDB) if interval: kwarg['db_interval'] = interval if start_delay: kwarg['db_start_delay'] = start_delay if repeats: kwarg['db_repeats'] = repeats if persistent: kwarg['db_persistent'] = persistent if desc: kwarg['db_desc'] = desc new_script = typeclass(**kwarg) new_script._createdict = {'key': key, 'obj': obj, 'player': player, 'locks': locks, 'interval': interval, 'start_delay': start_delay, 'repeats': repeats, 'persistent': persistent, 'autostart': autostart, 'report_to': report_to} new_script.save() return new_script
[ "def", "create_script", "(", "typeclass", "=", "None", ",", "key", "=", "None", ",", "obj", "=", "None", ",", "player", "=", "None", ",", "locks", "=", "None", ",", "interval", "=", "None", ",", "start_delay", "=", "None", ",", "repeats", "=", "None", ",", "persistent", "=", "None", ",", "autostart", "=", "True", ",", "report_to", "=", "None", ",", "desc", "=", "None", ")", ":", "global", "_ScriptDB", "if", "(", "not", "_ScriptDB", ")", ":", "from", "evennia", ".", "scripts", ".", "models", "import", "ScriptDB", "as", "_ScriptDB", "typeclass", "=", "(", "typeclass", "if", "typeclass", "else", "settings", ".", "BASE_SCRIPT_TYPECLASS", ")", "if", "isinstance", "(", "typeclass", ",", "basestring", ")", ":", "typeclass", "=", "class_from_module", "(", "typeclass", ",", "settings", ".", "TYPECLASS_PATHS", ")", "kwarg", "=", "{", "}", "if", "key", ":", "kwarg", "[", "'db_key'", "]", "=", "key", "if", "player", ":", "kwarg", "[", "'db_player'", "]", "=", "dbid_to_obj", "(", "player", ",", "_ScriptDB", ")", "if", "obj", ":", "kwarg", "[", "'db_obj'", "]", "=", "dbid_to_obj", "(", "obj", ",", "_ScriptDB", ")", "if", "interval", ":", "kwarg", "[", "'db_interval'", "]", "=", "interval", "if", "start_delay", ":", "kwarg", "[", "'db_start_delay'", "]", "=", "start_delay", "if", "repeats", ":", "kwarg", "[", "'db_repeats'", "]", "=", "repeats", "if", "persistent", ":", "kwarg", "[", "'db_persistent'", "]", "=", "persistent", "if", "desc", ":", "kwarg", "[", "'db_desc'", "]", "=", "desc", "new_script", "=", "typeclass", "(", "**", "kwarg", ")", "new_script", ".", "_createdict", "=", "{", "'key'", ":", "key", ",", "'obj'", ":", "obj", ",", "'player'", ":", "player", ",", "'locks'", ":", "locks", ",", "'interval'", ":", "interval", ",", "'start_delay'", ":", "start_delay", ",", "'repeats'", ":", "repeats", ",", "'persistent'", ":", "persistent", ",", "'autostart'", ":", "autostart", ",", "'report_to'", ":", "report_to", "}", "new_script", ".", "save", "(", ")", "return", "new_script" ]
write out a script onto a target .
train
false
2,955
def validate_auth_mechanism(option, value): if ((value not in MECHANISMS) and (value != 'CRAM-MD5')): raise ValueError(('%s must be in %s' % (option, tuple(MECHANISMS)))) return value
[ "def", "validate_auth_mechanism", "(", "option", ",", "value", ")", ":", "if", "(", "(", "value", "not", "in", "MECHANISMS", ")", "and", "(", "value", "!=", "'CRAM-MD5'", ")", ")", ":", "raise", "ValueError", "(", "(", "'%s must be in %s'", "%", "(", "option", ",", "tuple", "(", "MECHANISMS", ")", ")", ")", ")", "return", "value" ]
validate the authmechanism uri option .
train
true
2,956
def _show_one(audio_file): print 'File: ', audio_file pytrack = track.track_from_filename(audio_file) print 'Artist: ', (pytrack.artist if hasattr(pytrack, 'artist') else 'Unknown') print 'Title: ', (pytrack.title if hasattr(pytrack, 'title') else 'Unknown') print 'Track ID: ', pytrack.id print 'Tempo: ', pytrack.tempo print ('Energy: %1.3f %s' % (pytrack.energy, _bar(pytrack.energy))) if (not pytrack.valence): pytrack = track.track_from_filename(audio_file, force_upload=True) print ('Valence: %1.3f %s' % (pytrack.valence, _bar(pytrack.valence))) print ('Acousticness: %1.3f %s' % (pytrack.acousticness, _bar(pytrack.acousticness))) print
[ "def", "_show_one", "(", "audio_file", ")", ":", "print", "'File: '", ",", "audio_file", "pytrack", "=", "track", ".", "track_from_filename", "(", "audio_file", ")", "print", "'Artist: '", ",", "(", "pytrack", ".", "artist", "if", "hasattr", "(", "pytrack", ",", "'artist'", ")", "else", "'Unknown'", ")", "print", "'Title: '", ",", "(", "pytrack", ".", "title", "if", "hasattr", "(", "pytrack", ",", "'title'", ")", "else", "'Unknown'", ")", "print", "'Track ID: '", ",", "pytrack", ".", "id", "print", "'Tempo: '", ",", "pytrack", ".", "tempo", "print", "(", "'Energy: %1.3f %s'", "%", "(", "pytrack", ".", "energy", ",", "_bar", "(", "pytrack", ".", "energy", ")", ")", ")", "if", "(", "not", "pytrack", ".", "valence", ")", ":", "pytrack", "=", "track", ".", "track_from_filename", "(", "audio_file", ",", "force_upload", "=", "True", ")", "print", "(", "'Valence: %1.3f %s'", "%", "(", "pytrack", ".", "valence", ",", "_bar", "(", "pytrack", ".", "valence", ")", ")", ")", "print", "(", "'Acousticness: %1.3f %s'", "%", "(", "pytrack", ".", "acousticness", ",", "_bar", "(", "pytrack", ".", "acousticness", ")", ")", ")", "print" ]
given an audio file .
train
true