id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
15,038
@cli.command() @click.option('-d', '--directory', default='./', type=click.Path(), required=True) def examples(directory): source_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'examples') try: shutil.copytree(source_dir, os.path.join(directory, 'examples')) except OSError as e: if (e.errno == errno.EEXIST): print_('Folder examples is exists.')
[ "@", "cli", ".", "command", "(", ")", "@", "click", ".", "option", "(", "'-d'", ",", "'--directory'", ",", "default", "=", "'./'", ",", "type", "=", "click", ".", "Path", "(", ")", ",", "required", "=", "True", ")", "def", "examples", "(", "directory", ")", ":", "source_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ",", "'examples'", ")", "try", ":", "shutil", ".", "copytree", "(", "source_dir", ",", "os", ".", "path", ".", "join", "(", "directory", ",", "'examples'", ")", ")", "except", "OSError", "as", "e", ":", "if", "(", "e", ".", "errno", "==", "errno", ".", "EEXIST", ")", ":", "print_", "(", "'Folder examples is exists.'", ")" ]
generate example strategies to target folder .
train
false
15,039
def htons(integer): return ntohs(integer)
[ "def", "htons", "(", "integer", ")", ":", "return", "ntohs", "(", "integer", ")" ]
convert integer in n from host-byte order to network-byte order .
train
false
15,041
def callInThreadPool(fun, *args, **kwargs): if isInThreadPool(): fun(*args, **kwargs) else: reactor.callFromThread(reactor.callInThread, fun, *args, **kwargs)
[ "def", "callInThreadPool", "(", "fun", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "isInThreadPool", "(", ")", ":", "fun", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "reactor", ".", "callFromThread", "(", "reactor", ".", "callInThread", ",", "fun", ",", "*", "args", ",", "**", "kwargs", ")" ]
calls fun in the reactors thread pool .
train
false
15,043
def p_init_declarator_list_2(t): pass
[ "def", "p_init_declarator_list_2", "(", "t", ")", ":", "pass" ]
init_declarator_list : init_declarator_list comma init_declarator .
train
false
15,045
def merge_mean_color(graph, src, dst): graph.node[dst]['total color'] += graph.node[src]['total color'] graph.node[dst]['pixel count'] += graph.node[src]['pixel count'] graph.node[dst]['mean color'] = (graph.node[dst]['total color'] / graph.node[dst]['pixel count'])
[ "def", "merge_mean_color", "(", "graph", ",", "src", ",", "dst", ")", ":", "graph", ".", "node", "[", "dst", "]", "[", "'total color'", "]", "+=", "graph", ".", "node", "[", "src", "]", "[", "'total color'", "]", "graph", ".", "node", "[", "dst", "]", "[", "'pixel count'", "]", "+=", "graph", ".", "node", "[", "src", "]", "[", "'pixel count'", "]", "graph", ".", "node", "[", "dst", "]", "[", "'mean color'", "]", "=", "(", "graph", ".", "node", "[", "dst", "]", "[", "'total color'", "]", "/", "graph", ".", "node", "[", "dst", "]", "[", "'pixel count'", "]", ")" ]
callback called before merging two nodes of a mean color distance graph .
train
false
15,047
def random_organism(): genome = random_genome() return Organism(genome, test_fitness)
[ "def", "random_organism", "(", ")", ":", "genome", "=", "random_genome", "(", ")", "return", "Organism", "(", "genome", ",", "test_fitness", ")" ]
generate a random organism .
train
false
15,048
def basestation(): return s3_rest_controller()
[ "def", "basestation", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller for base stations .
train
false
15,049
def observe_lr(optimizer_name='main', observation_key='lr'): return observe_value(observation_key, (lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr))
[ "def", "observe_lr", "(", "optimizer_name", "=", "'main'", ",", "observation_key", "=", "'lr'", ")", ":", "return", "observe_value", "(", "observation_key", ",", "(", "lambda", "trainer", ":", "trainer", ".", "updater", ".", "get_optimizer", "(", "optimizer_name", ")", ".", "lr", ")", ")" ]
returns a trainer extension to record the learning rate .
train
false
15,050
def user_confirm(prompt, default=False): if default: prompt_default = '[Y/n]' else: prompt_default = '[y/N]' answer = input(('%s %s ' % (prompt, prompt_default))) if (answer == ''): return default else: return (answer.lower() in ('yes', 'y'))
[ "def", "user_confirm", "(", "prompt", ",", "default", "=", "False", ")", ":", "if", "default", ":", "prompt_default", "=", "'[Y/n]'", "else", ":", "prompt_default", "=", "'[y/N]'", "answer", "=", "input", "(", "(", "'%s %s '", "%", "(", "prompt", ",", "prompt_default", ")", ")", ")", "if", "(", "answer", "==", "''", ")", ":", "return", "default", "else", ":", "return", "(", "answer", ".", "lower", "(", ")", "in", "(", "'yes'", ",", "'y'", ")", ")" ]
yes/no question dialog with user .
train
false
15,051
def send_email_for_monthly_fee_payment(email, event_name, date, amount, payment_url): send_email(to=email, action=MONTHLY_PAYMENT_EMAIL, subject=MAILS[MONTHLY_PAYMENT_EMAIL]['subject'].format(event_name=event_name, date=date), html=MAILS[MONTHLY_PAYMENT_EMAIL]['message'].format(event_name=event_name, date=date, payment_url=payment_url, amount=amount, app_name=get_settings()['app_name']))
[ "def", "send_email_for_monthly_fee_payment", "(", "email", ",", "event_name", ",", "date", ",", "amount", ",", "payment_url", ")", ":", "send_email", "(", "to", "=", "email", ",", "action", "=", "MONTHLY_PAYMENT_EMAIL", ",", "subject", "=", "MAILS", "[", "MONTHLY_PAYMENT_EMAIL", "]", "[", "'subject'", "]", ".", "format", "(", "event_name", "=", "event_name", ",", "date", "=", "date", ")", ",", "html", "=", "MAILS", "[", "MONTHLY_PAYMENT_EMAIL", "]", "[", "'message'", "]", ".", "format", "(", "event_name", "=", "event_name", ",", "date", "=", "date", ",", "payment_url", "=", "payment_url", ",", "amount", "=", "amount", ",", "app_name", "=", "get_settings", "(", ")", "[", "'app_name'", "]", ")", ")" ]
send email every month with invoice to pay service fee .
train
false
15,052
def lighten(color, percent): return adjust(color, 2, percent)
[ "def", "lighten", "(", "color", ",", "percent", ")", ":", "return", "adjust", "(", "color", ",", "2", ",", "percent", ")" ]
lighten a color by increasing its lightness by percent .
train
false
15,053
def _find_channels(ch_names, ch_type='EOG'): substrings = (ch_type,) substrings = [s.upper() for s in substrings] if (ch_type == 'EOG'): substrings = ('EOG', 'EYE') eog_idx = [idx for (idx, ch) in enumerate(ch_names) if any(((substring in ch.upper()) for substring in substrings))] return eog_idx
[ "def", "_find_channels", "(", "ch_names", ",", "ch_type", "=", "'EOG'", ")", ":", "substrings", "=", "(", "ch_type", ",", ")", "substrings", "=", "[", "s", ".", "upper", "(", ")", "for", "s", "in", "substrings", "]", "if", "(", "ch_type", "==", "'EOG'", ")", ":", "substrings", "=", "(", "'EOG'", ",", "'EYE'", ")", "eog_idx", "=", "[", "idx", "for", "(", "idx", ",", "ch", ")", "in", "enumerate", "(", "ch_names", ")", "if", "any", "(", "(", "(", "substring", "in", "ch", ".", "upper", "(", ")", ")", "for", "substring", "in", "substrings", ")", ")", "]", "return", "eog_idx" ]
find eog channel .
train
false
15,054
def test_sobel_v_vertical(): (i, j) = np.mgrid[(-5):6, (-5):6] image = (j >= 0).astype(float) result = filters.sobel_v(image) j[(np.abs(i) == 5)] = 10000 assert np.all((result[(j == 0)] == 1)) assert np.all((result[(np.abs(j) > 1)] == 0))
[ "def", "test_sobel_v_vertical", "(", ")", ":", "(", "i", ",", "j", ")", "=", "np", ".", "mgrid", "[", "(", "-", "5", ")", ":", "6", ",", "(", "-", "5", ")", ":", "6", "]", "image", "=", "(", "j", ">=", "0", ")", ".", "astype", "(", "float", ")", "result", "=", "filters", ".", "sobel_v", "(", "image", ")", "j", "[", "(", "np", ".", "abs", "(", "i", ")", "==", "5", ")", "]", "=", "10000", "assert", "np", ".", "all", "(", "(", "result", "[", "(", "j", "==", "0", ")", "]", "==", "1", ")", ")", "assert", "np", ".", "all", "(", "(", "result", "[", "(", "np", ".", "abs", "(", "j", ")", ">", "1", ")", "]", "==", "0", ")", ")" ]
vertical sobel on an edge should be a vertical line .
train
false
15,055
def expand_paths(inputs): seen = {} for dirname in inputs: dirname = normalize_path(dirname) if (dirname in seen): continue seen[dirname] = 1 if (not os.path.isdir(dirname)): continue files = os.listdir(dirname) (yield (dirname, files)) for name in files: if (not name.endswith('.pth')): continue if (name in ('easy-install.pth', 'setuptools.pth')): continue f = open(os.path.join(dirname, name)) lines = list(yield_lines(f)) f.close() for line in lines: if (not line.startswith('import')): line = normalize_path(line.rstrip()) if (line not in seen): seen[line] = 1 if (not os.path.isdir(line)): continue (yield (line, os.listdir(line)))
[ "def", "expand_paths", "(", "inputs", ")", ":", "seen", "=", "{", "}", "for", "dirname", "in", "inputs", ":", "dirname", "=", "normalize_path", "(", "dirname", ")", "if", "(", "dirname", "in", "seen", ")", ":", "continue", "seen", "[", "dirname", "]", "=", "1", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "dirname", ")", ")", ":", "continue", "files", "=", "os", ".", "listdir", "(", "dirname", ")", "(", "yield", "(", "dirname", ",", "files", ")", ")", "for", "name", "in", "files", ":", "if", "(", "not", "name", ".", "endswith", "(", "'.pth'", ")", ")", ":", "continue", "if", "(", "name", "in", "(", "'easy-install.pth'", ",", "'setuptools.pth'", ")", ")", ":", "continue", "f", "=", "open", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "name", ")", ")", "lines", "=", "list", "(", "yield_lines", "(", "f", ")", ")", "f", ".", "close", "(", ")", "for", "line", "in", "lines", ":", "if", "(", "not", "line", ".", "startswith", "(", "'import'", ")", ")", ":", "line", "=", "normalize_path", "(", "line", ".", "rstrip", "(", ")", ")", "if", "(", "line", "not", "in", "seen", ")", ":", "seen", "[", "line", "]", "=", "1", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "line", ")", ")", ":", "continue", "(", "yield", "(", "line", ",", "os", ".", "listdir", "(", "line", ")", ")", ")" ]
yield sys .
train
true
15,056
def batch_get_value(xs): if xs: return get_session().run(xs) else: return []
[ "def", "batch_get_value", "(", "xs", ")", ":", "if", "xs", ":", "return", "get_session", "(", ")", ".", "run", "(", "xs", ")", "else", ":", "return", "[", "]" ]
returns the value of more than one tensor variable .
train
false
15,057
@transaction.non_atomic_requests @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_global_staff @require_POST def start_certificate_generation(request, course_id): course_key = CourseKey.from_string(course_id) task = lms.djangoapps.instructor_task.api.generate_certificates_for_students(request, course_key) message = _('Certificate generation task for all students of this course has been started. You can view the status of the generation task in the "Pending Tasks" section.') response_payload = {'message': message, 'task_id': task.task_id} return JsonResponse(response_payload)
[ "@", "transaction", ".", "non_atomic_requests", "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_global_staff", "@", "require_POST", "def", "start_certificate_generation", "(", "request", ",", "course_id", ")", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "course_id", ")", "task", "=", "lms", ".", "djangoapps", ".", "instructor_task", ".", "api", ".", "generate_certificates_for_students", "(", "request", ",", "course_key", ")", "message", "=", "_", "(", "'Certificate generation task for all students of this course has been started. You can view the status of the generation task in the \"Pending Tasks\" section.'", ")", "response_payload", "=", "{", "'message'", ":", "message", ",", "'task_id'", ":", "task", ".", "task_id", "}", "return", "JsonResponse", "(", "response_payload", ")" ]
start generating certificates for all students enrolled in given course .
train
false
15,059
def get_stories(f, only_supporting=False, max_length=None): data = parse_stories(f.readlines(), only_supporting=only_supporting) flatten = (lambda data: reduce((lambda x, y: (x + y)), data)) data = [(flatten(story), q, answer) for (story, q, answer) in data if ((not max_length) or (len(flatten(story)) < max_length))] return data
[ "def", "get_stories", "(", "f", ",", "only_supporting", "=", "False", ",", "max_length", "=", "None", ")", ":", "data", "=", "parse_stories", "(", "f", ".", "readlines", "(", ")", ",", "only_supporting", "=", "only_supporting", ")", "flatten", "=", "(", "lambda", "data", ":", "reduce", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "+", "y", ")", ")", ",", "data", ")", ")", "data", "=", "[", "(", "flatten", "(", "story", ")", ",", "q", ",", "answer", ")", "for", "(", "story", ",", "q", ",", "answer", ")", "in", "data", "if", "(", "(", "not", "max_length", ")", "or", "(", "len", "(", "flatten", "(", "story", ")", ")", "<", "max_length", ")", ")", "]", "return", "data" ]
given a file name .
train
false
15,060
def verify_signature(secret, parts): if (signature(secret, parts[:(-1)]) == parts[(-1)]): return True else: return False
[ "def", "verify_signature", "(", "secret", ",", "parts", ")", ":", "if", "(", "signature", "(", "secret", ",", "parts", "[", ":", "(", "-", "1", ")", "]", ")", "==", "parts", "[", "(", "-", "1", ")", "]", ")", ":", "return", "True", "else", ":", "return", "False" ]
check the signature in the message .
train
false
15,062
def ordinal(value): try: value = int(value) except (TypeError, ValueError): return value t = ('th', 'st', 'nd', 'rd', 'th', 'th', 'th', 'th', 'th', 'th') if ((value % 100) in (11, 12, 13)): return (u'%d%s' % (value, t[0])) return (u'%d%s' % (value, t[(value % 10)]))
[ "def", "ordinal", "(", "value", ")", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "value", "t", "=", "(", "'th'", ",", "'st'", ",", "'nd'", ",", "'rd'", ",", "'th'", ",", "'th'", ",", "'th'", ",", "'th'", ",", "'th'", ",", "'th'", ")", "if", "(", "(", "value", "%", "100", ")", "in", "(", "11", ",", "12", ",", "13", ")", ")", ":", "return", "(", "u'%d%s'", "%", "(", "value", ",", "t", "[", "0", "]", ")", ")", "return", "(", "u'%d%s'", "%", "(", "value", ",", "t", "[", "(", "value", "%", "10", ")", "]", ")", ")" ]
converts an integer to its ordinal as a string .
train
false
15,063
def pnt_func(f): return double_output(f, [c_void_p, c_int])
[ "def", "pnt_func", "(", "f", ")", ":", "return", "double_output", "(", "f", ",", "[", "c_void_p", ",", "c_int", "]", ")" ]
for accessing point information .
train
false
15,064
def cms_documentation(r, default_page, default_url): row = r.record if (not row): name = r.get_vars.get('name', default_page) table = r.resource.table query = ((table.name == name) & (table.deleted != True)) row = current.db(query).select(table.id, table.title, table.body, limitby=(0, 1)).first() if (not row): if (name != default_page): r.error(404, current.T('Page not found'), next=URL(args=current.request.args, vars={})) else: from s3 import s3_redirect_default s3_redirect_default(default_url) from s3 import S3XMLContents return {'bypass': True, 'output': {'title': row.title, 'contents': S3XMLContents(row.body)}}
[ "def", "cms_documentation", "(", "r", ",", "default_page", ",", "default_url", ")", ":", "row", "=", "r", ".", "record", "if", "(", "not", "row", ")", ":", "name", "=", "r", ".", "get_vars", ".", "get", "(", "'name'", ",", "default_page", ")", "table", "=", "r", ".", "resource", ".", "table", "query", "=", "(", "(", "table", ".", "name", "==", "name", ")", "&", "(", "table", ".", "deleted", "!=", "True", ")", ")", "row", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "table", ".", "id", ",", "table", ".", "title", ",", "table", ".", "body", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "if", "(", "not", "row", ")", ":", "if", "(", "name", "!=", "default_page", ")", ":", "r", ".", "error", "(", "404", ",", "current", ".", "T", "(", "'Page not found'", ")", ",", "next", "=", "URL", "(", "args", "=", "current", ".", "request", ".", "args", ",", "vars", "=", "{", "}", ")", ")", "else", ":", "from", "s3", "import", "s3_redirect_default", "s3_redirect_default", "(", "default_url", ")", "from", "s3", "import", "S3XMLContents", "return", "{", "'bypass'", ":", "True", ",", "'output'", ":", "{", "'title'", ":", "row", ".", "title", ",", "'contents'", ":", "S3XMLContents", "(", "row", ".", "body", ")", "}", "}" ]
render an online documentation page .
train
false
15,065
def git_line(*items): return (' '.join(items) + '\n')
[ "def", "git_line", "(", "*", "items", ")", ":", "return", "(", "' '", ".", "join", "(", "items", ")", "+", "'\\n'", ")" ]
formats items into a space sepreated line .
train
false
15,066
def sanitizeStr(value): return getUnicode(value).replace('\n', ' ').replace('\r', '')
[ "def", "sanitizeStr", "(", "value", ")", ":", "return", "getUnicode", "(", "value", ")", ".", "replace", "(", "'\\n'", ",", "' '", ")", ".", "replace", "(", "'\\r'", ",", "''", ")" ]
sanitizes string value in respect to newline and line-feed characters .
train
false
15,067
@task def sharejs(ctx, host=None, port=None, db_url=None, cors_allow_origin=None): if host: os.environ['SHAREJS_SERVER_HOST'] = host if port: os.environ['SHAREJS_SERVER_PORT'] = port if db_url: os.environ['SHAREJS_DB_URL'] = db_url if cors_allow_origin: os.environ['SHAREJS_CORS_ALLOW_ORIGIN'] = cors_allow_origin if settings.SENTRY_DSN: os.environ['SHAREJS_SENTRY_DSN'] = settings.SENTRY_DSN share_server = os.path.join(settings.ADDON_PATH, 'wiki', 'shareServer.js') ctx.run('node {0}'.format(share_server))
[ "@", "task", "def", "sharejs", "(", "ctx", ",", "host", "=", "None", ",", "port", "=", "None", ",", "db_url", "=", "None", ",", "cors_allow_origin", "=", "None", ")", ":", "if", "host", ":", "os", ".", "environ", "[", "'SHAREJS_SERVER_HOST'", "]", "=", "host", "if", "port", ":", "os", ".", "environ", "[", "'SHAREJS_SERVER_PORT'", "]", "=", "port", "if", "db_url", ":", "os", ".", "environ", "[", "'SHAREJS_DB_URL'", "]", "=", "db_url", "if", "cors_allow_origin", ":", "os", ".", "environ", "[", "'SHAREJS_CORS_ALLOW_ORIGIN'", "]", "=", "cors_allow_origin", "if", "settings", ".", "SENTRY_DSN", ":", "os", ".", "environ", "[", "'SHAREJS_SENTRY_DSN'", "]", "=", "settings", ".", "SENTRY_DSN", "share_server", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "ADDON_PATH", ",", "'wiki'", ",", "'shareServer.js'", ")", "ctx", ".", "run", "(", "'node {0}'", ".", "format", "(", "share_server", ")", ")" ]
start a local sharejs server .
train
false
15,068
def assign_role(course_id, user, rolename): (role, __) = Role.objects.get_or_create(course_id=course_id, name=rolename) user.roles.add(role)
[ "def", "assign_role", "(", "course_id", ",", "user", ",", "rolename", ")", ":", "(", "role", ",", "__", ")", "=", "Role", ".", "objects", ".", "get_or_create", "(", "course_id", "=", "course_id", ",", "name", "=", "rolename", ")", "user", ".", "roles", ".", "add", "(", "role", ")" ]
assign forum role rolename to user .
train
false
15,069
def perform_m_search(local_ip): search_target = 'urn:schemas-upnp-org:device:InternetGatewayDevice:1' ssdp_request = ''.join(['M-SEARCH * HTTP/1.1\r\n', 'HOST: 239.255.255.250:1900\r\n', 'MAN: "ssdp:discover"\r\n', 'MX: 2\r\n', 'ST: {0}\r\n'.format(search_target), '\r\n']) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((local_ip, 10000)) sock.sendto(ssdp_request, ('239.255.255.250', 1900)) if (local_ip == '127.0.0.1'): sock.settimeout(1) else: sock.settimeout(5) try: return sock.recv(2048) except socket.error: raise UpnpError('No reply from IGD using {} as IP'.format(local_ip)) finally: sock.close()
[ "def", "perform_m_search", "(", "local_ip", ")", ":", "search_target", "=", "'urn:schemas-upnp-org:device:InternetGatewayDevice:1'", "ssdp_request", "=", "''", ".", "join", "(", "[", "'M-SEARCH * HTTP/1.1\\r\\n'", ",", "'HOST: 239.255.255.250:1900\\r\\n'", ",", "'MAN: \"ssdp:discover\"\\r\\n'", ",", "'MX: 2\\r\\n'", ",", "'ST: {0}\\r\\n'", ".", "format", "(", "search_target", ")", ",", "'\\r\\n'", "]", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "sock", ".", "bind", "(", "(", "local_ip", ",", "10000", ")", ")", "sock", ".", "sendto", "(", "ssdp_request", ",", "(", "'239.255.255.250'", ",", "1900", ")", ")", "if", "(", "local_ip", "==", "'127.0.0.1'", ")", ":", "sock", ".", "settimeout", "(", "1", ")", "else", ":", "sock", ".", "settimeout", "(", "5", ")", "try", ":", "return", "sock", ".", "recv", "(", "2048", ")", "except", "socket", ".", "error", ":", "raise", "UpnpError", "(", "'No reply from IGD using {} as IP'", ".", "format", "(", "local_ip", ")", ")", "finally", ":", "sock", ".", "close", "(", ")" ]
broadcast a udp ssdp m-search packet and return response .
train
false
15,070
def set_system_date(newdate, utc_offset=None): fmts = ['%Y-%m-%d', '%m-%d-%Y', '%m-%d-%y', '%m/%d/%Y', '%m/%d/%y', '%Y/%m/%d'] dt_obj = _try_parse_datetime(newdate, fmts) if (dt_obj is None): raise SaltInvocationError('Invalid date format') return set_system_date_time(years=dt_obj.year, months=dt_obj.month, days=dt_obj.day, utc_offset=utc_offset)
[ "def", "set_system_date", "(", "newdate", ",", "utc_offset", "=", "None", ")", ":", "fmts", "=", "[", "'%Y-%m-%d'", ",", "'%m-%d-%Y'", ",", "'%m-%d-%y'", ",", "'%m/%d/%Y'", ",", "'%m/%d/%y'", ",", "'%Y/%m/%d'", "]", "dt_obj", "=", "_try_parse_datetime", "(", "newdate", ",", "fmts", ")", "if", "(", "dt_obj", "is", "None", ")", ":", "raise", "SaltInvocationError", "(", "'Invalid date format'", ")", "return", "set_system_date_time", "(", "years", "=", "dt_obj", ".", "year", ",", "months", "=", "dt_obj", ".", "month", ",", "days", "=", "dt_obj", ".", "day", ",", "utc_offset", "=", "utc_offset", ")" ]
set the windows system date .
train
true
15,075
def is_builtin_class_method(obj, attr_name): classes = [c for c in inspect.getmro(obj) if (attr_name in c.__dict__)] cls = (classes[0] if classes else object) if (not hasattr(builtins, safe_getattr(cls, '__name__', ''))): return False return (getattr(builtins, safe_getattr(cls, '__name__', '')) is cls)
[ "def", "is_builtin_class_method", "(", "obj", ",", "attr_name", ")", ":", "classes", "=", "[", "c", "for", "c", "in", "inspect", ".", "getmro", "(", "obj", ")", "if", "(", "attr_name", "in", "c", ".", "__dict__", ")", "]", "cls", "=", "(", "classes", "[", "0", "]", "if", "classes", "else", "object", ")", "if", "(", "not", "hasattr", "(", "builtins", ",", "safe_getattr", "(", "cls", ",", "'__name__'", ",", "''", ")", ")", ")", ":", "return", "False", "return", "(", "getattr", "(", "builtins", ",", "safe_getattr", "(", "cls", ",", "'__name__'", ",", "''", ")", ")", "is", "cls", ")" ]
if attr_name is implemented at builtin class .
train
false
15,076
def s_binary(value, name=None): parsed = value parsed = parsed.replace(' ', '') parsed = parsed.replace(' DCTB ', '') parsed = parsed.replace('\r', '') parsed = parsed.replace('\n', '') parsed = parsed.replace(',', '') parsed = parsed.replace('0x', '') parsed = parsed.replace('\\x', '') value = '' while parsed: pair = parsed[:2] parsed = parsed[2:] value += chr(int(pair, 16)) static = primitives.static(value, name) blocks.CURRENT.push(static)
[ "def", "s_binary", "(", "value", ",", "name", "=", "None", ")", ":", "parsed", "=", "value", "parsed", "=", "parsed", ".", "replace", "(", "' '", ",", "''", ")", "parsed", "=", "parsed", ".", "replace", "(", "' DCTB '", ",", "''", ")", "parsed", "=", "parsed", ".", "replace", "(", "'\\r'", ",", "''", ")", "parsed", "=", "parsed", ".", "replace", "(", "'\\n'", ",", "''", ")", "parsed", "=", "parsed", ".", "replace", "(", "','", ",", "''", ")", "parsed", "=", "parsed", ".", "replace", "(", "'0x'", ",", "''", ")", "parsed", "=", "parsed", ".", "replace", "(", "'\\\\x'", ",", "''", ")", "value", "=", "''", "while", "parsed", ":", "pair", "=", "parsed", "[", ":", "2", "]", "parsed", "=", "parsed", "[", "2", ":", "]", "value", "+=", "chr", "(", "int", "(", "pair", ",", "16", ")", ")", "static", "=", "primitives", ".", "static", "(", "value", ",", "name", ")", "blocks", ".", "CURRENT", ".", "push", "(", "static", ")" ]
parse a variable format binary string into a static value and push it onto the current block stack .
train
false
15,079
@pytest.mark.skipif('not HAS_BEAUTIFUL_SOUP') def test_no_names(): table_in = ['<table>', '<tr><td>1</td></tr>', '<tr><td>2</td></tr>', '</table>'] dat = Table.read(table_in, format='ascii.html') assert (dat.colnames == ['col1']) assert (len(dat) == 2) dat = Table.read(table_in, format='ascii.html', names=['a']) assert (dat.colnames == ['a']) assert (len(dat) == 2)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "'not HAS_BEAUTIFUL_SOUP'", ")", "def", "test_no_names", "(", ")", ":", "table_in", "=", "[", "'<table>'", ",", "'<tr><td>1</td></tr>'", ",", "'<tr><td>2</td></tr>'", ",", "'</table>'", "]", "dat", "=", "Table", ".", "read", "(", "table_in", ",", "format", "=", "'ascii.html'", ")", "assert", "(", "dat", ".", "colnames", "==", "[", "'col1'", "]", ")", "assert", "(", "len", "(", "dat", ")", "==", "2", ")", "dat", "=", "Table", ".", "read", "(", "table_in", ",", "format", "=", "'ascii.html'", ",", "names", "=", "[", "'a'", "]", ")", "assert", "(", "dat", ".", "colnames", "==", "[", "'a'", "]", ")", "assert", "(", "len", "(", "dat", ")", "==", "2", ")" ]
test reading a table witn no column header .
train
false
15,080
def get_path_style(path, fill=True): style = {} style['alpha'] = path.get_alpha() if (style['alpha'] is None): style['alpha'] = 1 style['edgecolor'] = color_to_hex(path.get_edgecolor()) if fill: style['facecolor'] = color_to_hex(path.get_facecolor()) else: style['facecolor'] = 'none' style['edgewidth'] = path.get_linewidth() style['dasharray'] = get_dasharray(path) style['zorder'] = path.get_zorder() return style
[ "def", "get_path_style", "(", "path", ",", "fill", "=", "True", ")", ":", "style", "=", "{", "}", "style", "[", "'alpha'", "]", "=", "path", ".", "get_alpha", "(", ")", "if", "(", "style", "[", "'alpha'", "]", "is", "None", ")", ":", "style", "[", "'alpha'", "]", "=", "1", "style", "[", "'edgecolor'", "]", "=", "color_to_hex", "(", "path", ".", "get_edgecolor", "(", ")", ")", "if", "fill", ":", "style", "[", "'facecolor'", "]", "=", "color_to_hex", "(", "path", ".", "get_facecolor", "(", ")", ")", "else", ":", "style", "[", "'facecolor'", "]", "=", "'none'", "style", "[", "'edgewidth'", "]", "=", "path", ".", "get_linewidth", "(", ")", "style", "[", "'dasharray'", "]", "=", "get_dasharray", "(", "path", ")", "style", "[", "'zorder'", "]", "=", "path", ".", "get_zorder", "(", ")", "return", "style" ]
get the style dictionary for matplotlib path objects .
train
true
15,081
def dllFromDll(dllid): if (dllid == None): return None elif (type('') == type(dllid)): return win32ui.LoadLibrary(dllid) else: try: dllid.GetFileName() except AttributeError: raise TypeError('DLL parameter must be None, a filename or a dll object') return dllid
[ "def", "dllFromDll", "(", "dllid", ")", ":", "if", "(", "dllid", "==", "None", ")", ":", "return", "None", "elif", "(", "type", "(", "''", ")", "==", "type", "(", "dllid", ")", ")", ":", "return", "win32ui", ".", "LoadLibrary", "(", "dllid", ")", "else", ":", "try", ":", "dllid", ".", "GetFileName", "(", ")", "except", "AttributeError", ":", "raise", "TypeError", "(", "'DLL parameter must be None, a filename or a dll object'", ")", "return", "dllid" ]
given a dll .
train
false
15,082
def is_available_extension(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): exts = available_extensions(user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas) if (name.lower() in [a.lower() for a in exts]): return True return False
[ "def", "is_available_extension", "(", "name", ",", "user", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "maintenance_db", "=", "None", ",", "password", "=", "None", ",", "runas", "=", "None", ")", ":", "exts", "=", "available_extensions", "(", "user", "=", "user", ",", "host", "=", "host", ",", "port", "=", "port", ",", "maintenance_db", "=", "maintenance_db", ",", "password", "=", "password", ",", "runas", "=", "runas", ")", "if", "(", "name", ".", "lower", "(", ")", "in", "[", "a", ".", "lower", "(", ")", "for", "a", "in", "exts", "]", ")", ":", "return", "True", "return", "False" ]
test if a specific extension is available cli example: .
train
true
15,084
def list_tables(dataset_name, project=None): bigquery_client = bigquery.Client(project=project) dataset = bigquery_client.dataset(dataset_name) if (not dataset.exists()): print 'Dataset {} does not exist.'.format(dataset_name) return for table in dataset.list_tables(): print table.name
[ "def", "list_tables", "(", "dataset_name", ",", "project", "=", "None", ")", ":", "bigquery_client", "=", "bigquery", ".", "Client", "(", "project", "=", "project", ")", "dataset", "=", "bigquery_client", ".", "dataset", "(", "dataset_name", ")", "if", "(", "not", "dataset", ".", "exists", "(", ")", ")", ":", "print", "'Dataset {} does not exist.'", ".", "format", "(", "dataset_name", ")", "return", "for", "table", "in", "dataset", ".", "list_tables", "(", ")", ":", "print", "table", ".", "name" ]
lists all of the tables in a given dataset .
train
false
15,086
def receive_message(sock, operation, request_id, max_message_size=MAX_MESSAGE_SIZE): header = _receive_data_on_socket(sock, 16) length = _UNPACK_INT(header[:4])[0] actual_op = _UNPACK_INT(header[12:])[0] if (operation != actual_op): raise ProtocolError(('Got opcode %r but expected %r' % (actual_op, operation))) if (request_id is not None): response_id = _UNPACK_INT(header[8:12])[0] if (request_id != response_id): raise ProtocolError(('Got response id %r but expected %r' % (response_id, request_id))) if (length <= 16): raise ProtocolError(('Message length (%r) not longer than standard message header size (16)' % (length,))) if (length > max_message_size): raise ProtocolError(('Message length (%r) is larger than server max message size (%r)' % (length, max_message_size))) return _receive_data_on_socket(sock, (length - 16))
[ "def", "receive_message", "(", "sock", ",", "operation", ",", "request_id", ",", "max_message_size", "=", "MAX_MESSAGE_SIZE", ")", ":", "header", "=", "_receive_data_on_socket", "(", "sock", ",", "16", ")", "length", "=", "_UNPACK_INT", "(", "header", "[", ":", "4", "]", ")", "[", "0", "]", "actual_op", "=", "_UNPACK_INT", "(", "header", "[", "12", ":", "]", ")", "[", "0", "]", "if", "(", "operation", "!=", "actual_op", ")", ":", "raise", "ProtocolError", "(", "(", "'Got opcode %r but expected %r'", "%", "(", "actual_op", ",", "operation", ")", ")", ")", "if", "(", "request_id", "is", "not", "None", ")", ":", "response_id", "=", "_UNPACK_INT", "(", "header", "[", "8", ":", "12", "]", ")", "[", "0", "]", "if", "(", "request_id", "!=", "response_id", ")", ":", "raise", "ProtocolError", "(", "(", "'Got response id %r but expected %r'", "%", "(", "response_id", ",", "request_id", ")", ")", ")", "if", "(", "length", "<=", "16", ")", ":", "raise", "ProtocolError", "(", "(", "'Message length (%r) not longer than standard message header size (16)'", "%", "(", "length", ",", ")", ")", ")", "if", "(", "length", ">", "max_message_size", ")", ":", "raise", "ProtocolError", "(", "(", "'Message length (%r) is larger than server max message size (%r)'", "%", "(", "length", ",", "max_message_size", ")", ")", ")", "return", "_receive_data_on_socket", "(", "sock", ",", "(", "length", "-", "16", ")", ")" ]
run a label request on an image received from twilio .
train
true
15,088
def compare_forwards(f1, f2): assert_allclose(f1['sol']['data'], f2['sol']['data']) assert_equal(f1['sol']['ncol'], f2['sol']['ncol']) assert_allclose(f1['source_nn'], f2['source_nn']) if (f1['sol_grad'] is not None): assert_true((f2['sol_grad'] is not None)) assert_allclose(f1['sol_grad']['data'], f2['sol_grad']['data']) assert_equal(f1['sol_grad']['ncol'], f2['sol_grad']['ncol']) else: assert_true((f2['sol_grad'] is None)) assert_equal(f1['source_ori'], f2['source_ori']) assert_equal(f1['surf_ori'], f2['surf_ori'])
[ "def", "compare_forwards", "(", "f1", ",", "f2", ")", ":", "assert_allclose", "(", "f1", "[", "'sol'", "]", "[", "'data'", "]", ",", "f2", "[", "'sol'", "]", "[", "'data'", "]", ")", "assert_equal", "(", "f1", "[", "'sol'", "]", "[", "'ncol'", "]", ",", "f2", "[", "'sol'", "]", "[", "'ncol'", "]", ")", "assert_allclose", "(", "f1", "[", "'source_nn'", "]", ",", "f2", "[", "'source_nn'", "]", ")", "if", "(", "f1", "[", "'sol_grad'", "]", "is", "not", "None", ")", ":", "assert_true", "(", "(", "f2", "[", "'sol_grad'", "]", "is", "not", "None", ")", ")", "assert_allclose", "(", "f1", "[", "'sol_grad'", "]", "[", "'data'", "]", ",", "f2", "[", "'sol_grad'", "]", "[", "'data'", "]", ")", "assert_equal", "(", "f1", "[", "'sol_grad'", "]", "[", "'ncol'", "]", ",", "f2", "[", "'sol_grad'", "]", "[", "'ncol'", "]", ")", "else", ":", "assert_true", "(", "(", "f2", "[", "'sol_grad'", "]", "is", "None", ")", ")", "assert_equal", "(", "f1", "[", "'source_ori'", "]", ",", "f2", "[", "'source_ori'", "]", ")", "assert_equal", "(", "f1", "[", "'surf_ori'", "]", ",", "f2", "[", "'surf_ori'", "]", ")" ]
helper to compare two potentially converted forward solutions .
train
false
15,089
def compute_P_from_fundamental(F): e = compute_epipole(F.T) Te = skew(e) return vstack((dot(Te, F.T).T, e)).T
[ "def", "compute_P_from_fundamental", "(", "F", ")", ":", "e", "=", "compute_epipole", "(", "F", ".", "T", ")", "Te", "=", "skew", "(", "e", ")", "return", "vstack", "(", "(", "dot", "(", "Te", ",", "F", ".", "T", ")", ".", "T", ",", "e", ")", ")", ".", "T" ]
computes the second camera matrix from a fundamental matrix .
train
false
15,090
def autoTrack(clip, pattern, tt=None, fps=None, radius=20, xy0=None): if (not autotracking_possible): raise IOError('Sorry, autotrack requires OpenCV for the moment. Install OpenCV (aka cv2) to use it.') if (not xy0): xy0 = findAround(clip.get_frame(tt[0]), pattern) if (tt is None): tt = np.arange(0, clip.duration, (1.0 / fps)) xys = [xy0] for t in tt[1:]: xys.append(findAround(clip.get_frame(t), pattern, xy=xys[(-1)], r=radius)) (xx, yy) = zip(*xys) return Trajectory(tt, xx, yy)
[ "def", "autoTrack", "(", "clip", ",", "pattern", ",", "tt", "=", "None", ",", "fps", "=", "None", ",", "radius", "=", "20", ",", "xy0", "=", "None", ")", ":", "if", "(", "not", "autotracking_possible", ")", ":", "raise", "IOError", "(", "'Sorry, autotrack requires OpenCV for the moment. Install OpenCV (aka cv2) to use it.'", ")", "if", "(", "not", "xy0", ")", ":", "xy0", "=", "findAround", "(", "clip", ".", "get_frame", "(", "tt", "[", "0", "]", ")", ",", "pattern", ")", "if", "(", "tt", "is", "None", ")", ":", "tt", "=", "np", ".", "arange", "(", "0", ",", "clip", ".", "duration", ",", "(", "1.0", "/", "fps", ")", ")", "xys", "=", "[", "xy0", "]", "for", "t", "in", "tt", "[", "1", ":", "]", ":", "xys", ".", "append", "(", "findAround", "(", "clip", ".", "get_frame", "(", "t", ")", ",", "pattern", ",", "xy", "=", "xys", "[", "(", "-", "1", ")", "]", ",", "r", "=", "radius", ")", ")", "(", "xx", ",", "yy", ")", "=", "zip", "(", "*", "xys", ")", "return", "Trajectory", "(", "tt", ",", "xx", ",", "yy", ")" ]
tracks a given pattern in a video clip .
train
false
15,091
@image_comparison(baseline_images=[u'bbox_inches_tight_raster'], remove_text=True, savefig_kwarg={u'bbox_inches': u'tight'}) def test_bbox_inches_tight_raster(): fig = plt.figure() ax = fig.add_subplot(111) ax.plot([1.0, 2.0], rasterized=True)
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'bbox_inches_tight_raster'", "]", ",", "remove_text", "=", "True", ",", "savefig_kwarg", "=", "{", "u'bbox_inches'", ":", "u'tight'", "}", ")", "def", "test_bbox_inches_tight_raster", "(", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "plot", "(", "[", "1.0", ",", "2.0", "]", ",", "rasterized", "=", "True", ")" ]
test rasterization with tight_layout .
train
false
15,092
@hook.command() @hook.command('tv_prev') def tv_last(text, bot=None): api_key = bot.config.get('api_keys', {}).get('tvdb', None) if (api_key is None): return 'error: no api key set' episodes = get_episodes_for_series(text, api_key) if episodes['error']: return episodes['error'] series_name = episodes['name'] ended = episodes['ended'] episodes = episodes['episodes'] prev_ep = None today = datetime.date.today() for episode in reversed(episodes): ep_info = get_episode_info(episode) if (ep_info is None): continue (first_aired, air_date, episode_desc) = ep_info if (air_date < today): prev_ep = '{} ({})'.format(first_aired, episode_desc) break if (not prev_ep): return 'There are no previously aired episodes for {}.'.format(series_name) if ended: return '{} ended. The last episode aired {}.'.format(series_name, prev_ep) return 'The last episode of {} aired {}.'.format(series_name, prev_ep)
[ "@", "hook", ".", "command", "(", ")", "@", "hook", ".", "command", "(", "'tv_prev'", ")", "def", "tv_last", "(", "text", ",", "bot", "=", "None", ")", ":", "api_key", "=", "bot", ".", "config", ".", "get", "(", "'api_keys'", ",", "{", "}", ")", ".", "get", "(", "'tvdb'", ",", "None", ")", "if", "(", "api_key", "is", "None", ")", ":", "return", "'error: no api key set'", "episodes", "=", "get_episodes_for_series", "(", "text", ",", "api_key", ")", "if", "episodes", "[", "'error'", "]", ":", "return", "episodes", "[", "'error'", "]", "series_name", "=", "episodes", "[", "'name'", "]", "ended", "=", "episodes", "[", "'ended'", "]", "episodes", "=", "episodes", "[", "'episodes'", "]", "prev_ep", "=", "None", "today", "=", "datetime", ".", "date", ".", "today", "(", ")", "for", "episode", "in", "reversed", "(", "episodes", ")", ":", "ep_info", "=", "get_episode_info", "(", "episode", ")", "if", "(", "ep_info", "is", "None", ")", ":", "continue", "(", "first_aired", ",", "air_date", ",", "episode_desc", ")", "=", "ep_info", "if", "(", "air_date", "<", "today", ")", ":", "prev_ep", "=", "'{} ({})'", ".", "format", "(", "first_aired", ",", "episode_desc", ")", "break", "if", "(", "not", "prev_ep", ")", ":", "return", "'There are no previously aired episodes for {}.'", ".", "format", "(", "series_name", ")", "if", "ended", ":", "return", "'{} ended. The last episode aired {}.'", ".", "format", "(", "series_name", ",", "prev_ep", ")", "return", "'The last episode of {} aired {}.'", ".", "format", "(", "series_name", ",", "prev_ep", ")" ]
tv_last <series> -- gets the most recently aired episode of <series> .
train
false
15,093
def iscode(object): return isinstance(object, types.CodeType)
[ "def", "iscode", "(", "object", ")", ":", "return", "isinstance", "(", "object", ",", "types", ".", "CodeType", ")" ]
return true if the object is a code object .
train
false
15,094
def test_url_incorrect_case_file_index(script, data): result = script.pip('install', '--index-url', data.find_links3, 'dinner', expect_stderr=True) egg_folder = ((script.site_packages / 'Dinner-1.0-py%s.egg-info') % pyversion) assert (egg_folder not in result.files_created), str(result) egg_folder = ((script.site_packages / 'Dinner-2.0-py%s.egg-info') % pyversion) assert (egg_folder in result.files_created), str(result)
[ "def", "test_url_incorrect_case_file_index", "(", "script", ",", "data", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--index-url'", ",", "data", ".", "find_links3", ",", "'dinner'", ",", "expect_stderr", "=", "True", ")", "egg_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'Dinner-1.0-py%s.egg-info'", ")", "%", "pyversion", ")", "assert", "(", "egg_folder", "not", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")", "egg_folder", "=", "(", "(", "script", ".", "site_packages", "/", "'Dinner-2.0-py%s.egg-info'", ")", "%", "pyversion", ")", "assert", "(", "egg_folder", "in", "result", ".", "files_created", ")", ",", "str", "(", "result", ")" ]
same as test_url_req_case_mismatch_file_index .
train
false
15,095
def query_helper(query, add=None, remove=None): add = string_to_dict(add) remove = string_to_list(remove) params = query.copy() return get_query_string(params, add, remove)
[ "def", "query_helper", "(", "query", ",", "add", "=", "None", ",", "remove", "=", "None", ")", ":", "add", "=", "string_to_dict", "(", "add", ")", "remove", "=", "string_to_list", "(", "remove", ")", "params", "=", "query", ".", "copy", "(", ")", "return", "get_query_string", "(", "params", ",", "add", ",", "remove", ")" ]
helper function for use within views .
train
false
15,096
def do_collectstatic(): try: build.make_collectstatic() return True except (OSError, SystemError) as ex: LOG.error(("Failed to collect the static files. Please fix any problem and run `%s --collectstatic'\n%s" % (PROG_NAME, ex))) return False
[ "def", "do_collectstatic", "(", ")", ":", "try", ":", "build", ".", "make_collectstatic", "(", ")", "return", "True", "except", "(", "OSError", ",", "SystemError", ")", "as", "ex", ":", "LOG", ".", "error", "(", "(", "\"Failed to collect the static files. Please fix any problem and run `%s --collectstatic'\\n%s\"", "%", "(", "PROG_NAME", ",", "ex", ")", ")", ")", "return", "False" ]
collects the static files .
train
false
15,099
def get_archive_content(filename): archive = get_archive(filename) stack.append((filename, archive)) output = [] get_content(archive, recursive=True, brief=True, output=output) do_cleanup() return output
[ "def", "get_archive_content", "(", "filename", ")", ":", "archive", "=", "get_archive", "(", "filename", ")", "stack", ".", "append", "(", "(", "filename", ",", "archive", ")", ")", "output", "=", "[", "]", "get_content", "(", "archive", ",", "recursive", "=", "True", ",", "brief", "=", "True", ",", "output", "=", "output", ")", "do_cleanup", "(", ")", "return", "output" ]
get a list of the content of archive filename .
train
false
15,101
@public def factor_list(f, *gens, **args): return _generic_factor_list(f, gens, args, method='factor')
[ "@", "public", "def", "factor_list", "(", "f", ",", "*", "gens", ",", "**", "args", ")", ":", "return", "_generic_factor_list", "(", "f", ",", "gens", ",", "args", ",", "method", "=", "'factor'", ")" ]
compute a list of irreducible factors of f .
train
false
15,102
def find_cached_job(jid): serial = salt.payload.Serial(__opts__) proc_dir = os.path.join(__opts__['cachedir'], 'minion_jobs') job_dir = os.path.join(proc_dir, str(jid)) if (not os.path.isdir(job_dir)): if (not __opts__.get('cache_jobs')): return 'Local jobs cache directory not found; you may need to enable cache_jobs on this minion' else: return 'Local jobs cache directory {0} not found'.format(job_dir) path = os.path.join(job_dir, 'return.p') with salt.utils.fopen(path, 'rb') as fp_: buf = fp_.read() fp_.close() if buf: try: data = serial.loads(buf) except NameError: return else: return if (not isinstance(data, dict)): return return data
[ "def", "find_cached_job", "(", "jid", ")", ":", "serial", "=", "salt", ".", "payload", ".", "Serial", "(", "__opts__", ")", "proc_dir", "=", "os", ".", "path", ".", "join", "(", "__opts__", "[", "'cachedir'", "]", ",", "'minion_jobs'", ")", "job_dir", "=", "os", ".", "path", ".", "join", "(", "proc_dir", ",", "str", "(", "jid", ")", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "job_dir", ")", ")", ":", "if", "(", "not", "__opts__", ".", "get", "(", "'cache_jobs'", ")", ")", ":", "return", "'Local jobs cache directory not found; you may need to enable cache_jobs on this minion'", "else", ":", "return", "'Local jobs cache directory {0} not found'", ".", "format", "(", "job_dir", ")", "path", "=", "os", ".", "path", ".", "join", "(", "job_dir", ",", "'return.p'", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "path", ",", "'rb'", ")", "as", "fp_", ":", "buf", "=", "fp_", ".", "read", "(", ")", "fp_", ".", "close", "(", ")", "if", "buf", ":", "try", ":", "data", "=", "serial", ".", "loads", "(", "buf", ")", "except", "NameError", ":", "return", "else", ":", "return", "if", "(", "not", "isinstance", "(", "data", ",", "dict", ")", ")", ":", "return", "return", "data" ]
return the data for a specific cached job id .
train
true
15,105
def lod_to_dod(_list, indexkey): results = {} for item in _list: results[item[indexkey]] = item return results
[ "def", "lod_to_dod", "(", "_list", ",", "indexkey", ")", ":", "results", "=", "{", "}", "for", "item", "in", "_list", ":", "results", "[", "item", "[", "indexkey", "]", "]", "=", "item", "return", "results" ]
things like get_distros() returns a list of a dictionaries convert this to a dict of dicts keyed off of an arbitrary field ex: [ { "a" : 2 } .
train
false
15,106
def replace_leaf(arg, leaves, new_leaves, op, neg): for (idx, x) in enumerate(leaves): if (x[0] == arg): x[1][0] ^= neg x[1][1] = op(arg) leaves.pop(idx) new_leaves.append(x) return True return False
[ "def", "replace_leaf", "(", "arg", ",", "leaves", ",", "new_leaves", ",", "op", ",", "neg", ")", ":", "for", "(", "idx", ",", "x", ")", "in", "enumerate", "(", "leaves", ")", ":", "if", "(", "x", "[", "0", "]", "==", "arg", ")", ":", "x", "[", "1", "]", "[", "0", "]", "^=", "neg", "x", "[", "1", "]", "[", "1", "]", "=", "op", "(", "arg", ")", "leaves", ".", "pop", "(", "idx", ")", "new_leaves", ".", "append", "(", "x", ")", "return", "True", "return", "False" ]
attempt to replace a leaf of a multiplication tree .
train
false
15,107
def get_event_source_mapping_ids(EventSourceArn, FunctionName, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: mappings = [] for maps in salt.utils.boto3.paged_call(conn.list_event_source_mappings, EventSourceArn=EventSourceArn, FunctionName=FunctionName): mappings.extend([mapping['UUID'] for mapping in maps['EventSourceMappings']]) return mappings except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "get_event_source_mapping_ids", "(", "EventSourceArn", ",", "FunctionName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "mappings", "=", "[", "]", "for", "maps", "in", "salt", ".", "utils", ".", "boto3", ".", "paged_call", "(", "conn", ".", "list_event_source_mappings", ",", "EventSourceArn", "=", "EventSourceArn", ",", "FunctionName", "=", "FunctionName", ")", ":", "mappings", ".", "extend", "(", "[", "mapping", "[", "'UUID'", "]", "for", "mapping", "in", "maps", "[", "'EventSourceMappings'", "]", "]", ")", "return", "mappings", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
given an event source and function name .
train
false
15,109
@validation_task def validate_file(file_id, hash_, is_webextension=False, **kw): file_ = File.objects.get(pk=file_id) try: return file_.validation.validation except FileValidation.DoesNotExist: listed = (file_.version.channel == amo.RELEASE_CHANNEL_LISTED) if is_webextension: return run_addons_linter(file_.current_file_path, listed=listed) return run_validator(file_.current_file_path, listed=listed)
[ "@", "validation_task", "def", "validate_file", "(", "file_id", ",", "hash_", ",", "is_webextension", "=", "False", ",", "**", "kw", ")", ":", "file_", "=", "File", ".", "objects", ".", "get", "(", "pk", "=", "file_id", ")", "try", ":", "return", "file_", ".", "validation", ".", "validation", "except", "FileValidation", ".", "DoesNotExist", ":", "listed", "=", "(", "file_", ".", "version", ".", "channel", "==", "amo", ".", "RELEASE_CHANNEL_LISTED", ")", "if", "is_webextension", ":", "return", "run_addons_linter", "(", "file_", ".", "current_file_path", ",", "listed", "=", "listed", ")", "return", "run_validator", "(", "file_", ".", "current_file_path", ",", "listed", "=", "listed", ")" ]
validate if requirements_all .
train
false
15,110
def test_getattr_exceptions(): class AttributeTest(object, ): def __getattr__(self, name): raise AttributeError('catch me') x = AttributeTest() try: y = x.throws except AttributeError as ex: AreEqual(ex.args, ('catch me',)) else: Fail('should have thrown')
[ "def", "test_getattr_exceptions", "(", ")", ":", "class", "AttributeTest", "(", "object", ",", ")", ":", "def", "__getattr__", "(", "self", ",", "name", ")", ":", "raise", "AttributeError", "(", "'catch me'", ")", "x", "=", "AttributeTest", "(", ")", "try", ":", "y", "=", "x", ".", "throws", "except", "AttributeError", "as", "ex", ":", "AreEqual", "(", "ex", ".", "args", ",", "(", "'catch me'", ",", ")", ")", "else", ":", "Fail", "(", "'should have thrown'", ")" ]
verify the original exception propagates out .
train
false
15,112
def readmission(aFileName): print ('\nReading mission from file: %s' % aFileName) cmds = vehicle.commands missionlist = [] with open(aFileName) as f: for (i, line) in enumerate(f): if (i == 0): if (not line.startswith('QGC WPL 110')): raise Exception('File is not supported WP version') else: linearray = line.split(' DCTB ') ln_index = int(linearray[0]) ln_currentwp = int(linearray[1]) ln_frame = int(linearray[2]) ln_command = int(linearray[3]) ln_param1 = float(linearray[4]) ln_param2 = float(linearray[5]) ln_param3 = float(linearray[6]) ln_param4 = float(linearray[7]) ln_param5 = float(linearray[8]) ln_param6 = float(linearray[9]) ln_param7 = float(linearray[10]) ln_autocontinue = int(linearray[11].strip()) cmd = Command(0, 0, 0, ln_frame, ln_command, ln_currentwp, ln_autocontinue, ln_param1, ln_param2, ln_param3, ln_param4, ln_param5, ln_param6, ln_param7) missionlist.append(cmd) return missionlist
[ "def", "readmission", "(", "aFileName", ")", ":", "print", "(", "'\\nReading mission from file: %s'", "%", "aFileName", ")", "cmds", "=", "vehicle", ".", "commands", "missionlist", "=", "[", "]", "with", "open", "(", "aFileName", ")", "as", "f", ":", "for", "(", "i", ",", "line", ")", "in", "enumerate", "(", "f", ")", ":", "if", "(", "i", "==", "0", ")", ":", "if", "(", "not", "line", ".", "startswith", "(", "'QGC WPL 110'", ")", ")", ":", "raise", "Exception", "(", "'File is not supported WP version'", ")", "else", ":", "linearray", "=", "line", ".", "split", "(", "' DCTB '", ")", "ln_index", "=", "int", "(", "linearray", "[", "0", "]", ")", "ln_currentwp", "=", "int", "(", "linearray", "[", "1", "]", ")", "ln_frame", "=", "int", "(", "linearray", "[", "2", "]", ")", "ln_command", "=", "int", "(", "linearray", "[", "3", "]", ")", "ln_param1", "=", "float", "(", "linearray", "[", "4", "]", ")", "ln_param2", "=", "float", "(", "linearray", "[", "5", "]", ")", "ln_param3", "=", "float", "(", "linearray", "[", "6", "]", ")", "ln_param4", "=", "float", "(", "linearray", "[", "7", "]", ")", "ln_param5", "=", "float", "(", "linearray", "[", "8", "]", ")", "ln_param6", "=", "float", "(", "linearray", "[", "9", "]", ")", "ln_param7", "=", "float", "(", "linearray", "[", "10", "]", ")", "ln_autocontinue", "=", "int", "(", "linearray", "[", "11", "]", ".", "strip", "(", ")", ")", "cmd", "=", "Command", "(", "0", ",", "0", ",", "0", ",", "ln_frame", ",", "ln_command", ",", "ln_currentwp", ",", "ln_autocontinue", ",", "ln_param1", ",", "ln_param2", ",", "ln_param3", ",", "ln_param4", ",", "ln_param5", ",", "ln_param6", ",", "ln_param7", ")", "missionlist", ".", "append", "(", "cmd", ")", "return", "missionlist" ]
load a mission from a file into a list .
train
true
15,113
def test_softmax_generality(): nvis = 1 num_classes = 2 model = MLP(layers=[Softmax(num_classes, 's', irange=0.1)], nvis=nvis) Z = T.matrix() Y_hat = T.nnet.softmax(Z) Y = T.matrix() model.layers[(-1)].cost(Y=Y, Y_hat=Y_hat)
[ "def", "test_softmax_generality", "(", ")", ":", "nvis", "=", "1", "num_classes", "=", "2", "model", "=", "MLP", "(", "layers", "=", "[", "Softmax", "(", "num_classes", ",", "'s'", ",", "irange", "=", "0.1", ")", "]", ",", "nvis", "=", "nvis", ")", "Z", "=", "T", ".", "matrix", "(", ")", "Y_hat", "=", "T", ".", "nnet", ".", "softmax", "(", "Z", ")", "Y", "=", "T", ".", "matrix", "(", ")", "model", ".", "layers", "[", "(", "-", "1", ")", "]", ".", "cost", "(", "Y", "=", "Y", ",", "Y_hat", "=", "Y_hat", ")" ]
tests that the softmax layer can score outputs it did not create .
train
false
15,114
@csrf_exempt def spell_check(request): try: if (not enchant): raise RuntimeError('install pyenchant for spellchecker functionality') raw = force_text(request.body) input = json.loads(raw) id = input['id'] method = input['method'] params = input['params'] lang = params[0] arg = params[1] if (not enchant.dict_exists(str(lang))): raise RuntimeError('dictionary not found for language {!r}'.format(lang)) checker = enchant.Dict(str(lang)) if (method == 'checkWords'): result = [word for word in arg if (word and (not checker.check(word)))] elif (method == 'getSuggestions'): result = checker.suggest(arg) else: raise RuntimeError('Unknown spellcheck method: {!r}'.format(method)) output = {'id': id, 'result': result, 'error': None} except Exception: logging.exception('Error running spellchecker') return HttpResponse(_('Error running spellchecker')) return HttpResponse(json.dumps(output), content_type='application/json')
[ "@", "csrf_exempt", "def", "spell_check", "(", "request", ")", ":", "try", ":", "if", "(", "not", "enchant", ")", ":", "raise", "RuntimeError", "(", "'install pyenchant for spellchecker functionality'", ")", "raw", "=", "force_text", "(", "request", ".", "body", ")", "input", "=", "json", ".", "loads", "(", "raw", ")", "id", "=", "input", "[", "'id'", "]", "method", "=", "input", "[", "'method'", "]", "params", "=", "input", "[", "'params'", "]", "lang", "=", "params", "[", "0", "]", "arg", "=", "params", "[", "1", "]", "if", "(", "not", "enchant", ".", "dict_exists", "(", "str", "(", "lang", ")", ")", ")", ":", "raise", "RuntimeError", "(", "'dictionary not found for language {!r}'", ".", "format", "(", "lang", ")", ")", "checker", "=", "enchant", ".", "Dict", "(", "str", "(", "lang", ")", ")", "if", "(", "method", "==", "'checkWords'", ")", ":", "result", "=", "[", "word", "for", "word", "in", "arg", "if", "(", "word", "and", "(", "not", "checker", ".", "check", "(", "word", ")", ")", ")", "]", "elif", "(", "method", "==", "'getSuggestions'", ")", ":", "result", "=", "checker", ".", "suggest", "(", "arg", ")", "else", ":", "raise", "RuntimeError", "(", "'Unknown spellcheck method: {!r}'", ".", "format", "(", "method", ")", ")", "output", "=", "{", "'id'", ":", "id", ",", "'result'", ":", "result", ",", "'error'", ":", "None", "}", "except", "Exception", ":", "logging", ".", "exception", "(", "'Error running spellchecker'", ")", "return", "HttpResponse", "(", "_", "(", "'Error running spellchecker'", ")", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "output", ")", ",", "content_type", "=", "'application/json'", ")" ]
returns a httpresponse that implements the tinymce spellchecker protocol .
train
true
15,117
def exit_standby(name, instance_ids, should_decrement_desired_capacity=False, region=None, key=None, keyid=None, profile=None): conn = _get_conn_autoscaling_boto3(region=region, key=key, keyid=keyid, profile=profile) try: response = conn.exit_standby(InstanceIds=instance_ids, AutoScalingGroupName=name) except ClientError as e: err = salt.utils.boto3.get_error(e) if (e.response.get('Error', {}).get('Code') == 'ResourceNotFoundException'): return {'exists': False} return {'error': err} return all(((activity['StatusCode'] != 'Failed') for activity in response['Activities']))
[ "def", "exit_standby", "(", "name", ",", "instance_ids", ",", "should_decrement_desired_capacity", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn_autoscaling_boto3", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "response", "=", "conn", ".", "exit_standby", "(", "InstanceIds", "=", "instance_ids", ",", "AutoScalingGroupName", "=", "name", ")", "except", "ClientError", "as", "e", ":", "err", "=", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "if", "(", "e", ".", "response", ".", "get", "(", "'Error'", ",", "{", "}", ")", ".", "get", "(", "'Code'", ")", "==", "'ResourceNotFoundException'", ")", ":", "return", "{", "'exists'", ":", "False", "}", "return", "{", "'error'", ":", "err", "}", "return", "all", "(", "(", "(", "activity", "[", "'StatusCode'", "]", "!=", "'Failed'", ")", "for", "activity", "in", "response", "[", "'Activities'", "]", ")", ")" ]
exit desired instances from standby mode .
train
true
15,119
def get_absolute_url(path): if is_absolute_url(path): return path site = sites.get_current() return build_url(path, scheme=site.scheme, domain=site.domain)
[ "def", "get_absolute_url", "(", "path", ")", ":", "if", "is_absolute_url", "(", "path", ")", ":", "return", "path", "site", "=", "sites", ".", "get_current", "(", ")", "return", "build_url", "(", "path", ",", "scheme", "=", "site", ".", "scheme", ",", "domain", "=", "site", ".", "domain", ")" ]
generate an absolute url for a resource on the test server .
train
false
15,120
def _default_course_mode(course_id): course_modes = CourseMode.modes_for_course(CourseKey.from_string(course_id)) available_modes = [m.slug for m in course_modes] if (CourseMode.DEFAULT_MODE_SLUG in available_modes): return CourseMode.DEFAULT_MODE_SLUG elif ('audit' in available_modes): return 'audit' elif ('honor' in available_modes): return 'honor' return CourseMode.DEFAULT_MODE_SLUG
[ "def", "_default_course_mode", "(", "course_id", ")", ":", "course_modes", "=", "CourseMode", ".", "modes_for_course", "(", "CourseKey", ".", "from_string", "(", "course_id", ")", ")", "available_modes", "=", "[", "m", ".", "slug", "for", "m", "in", "course_modes", "]", "if", "(", "CourseMode", ".", "DEFAULT_MODE_SLUG", "in", "available_modes", ")", ":", "return", "CourseMode", ".", "DEFAULT_MODE_SLUG", "elif", "(", "'audit'", "in", "available_modes", ")", ":", "return", "'audit'", "elif", "(", "'honor'", "in", "available_modes", ")", ":", "return", "'honor'", "return", "CourseMode", ".", "DEFAULT_MODE_SLUG" ]
return the default enrollment for a course .
train
false
15,121
def get_parsed_option(command_opts, opt_key, default=None): command_opt_value = getattr(command_opts, opt_key, default) if command_opt_value: command_opt_value = listfy(command_opt_value) return command_opt_value
[ "def", "get_parsed_option", "(", "command_opts", ",", "opt_key", ",", "default", "=", "None", ")", ":", "command_opt_value", "=", "getattr", "(", "command_opts", ",", "opt_key", ",", "default", ")", "if", "command_opt_value", ":", "command_opt_value", "=", "listfy", "(", "command_opt_value", ")", "return", "command_opt_value" ]
extract user command option and parse it .
train
false
15,122
def on_agent(path, context): if (path == ''): path = '/' (folder, search_path) = get_folder_and_search_path(path, '/') folders = context.listFiles(folder) return [s.replace(' ', '\\ ') for s in get_suggestions(folder, search_path, map((lambda f: str(f)), folders), '/', True)]
[ "def", "on_agent", "(", "path", ",", "context", ")", ":", "if", "(", "path", "==", "''", ")", ":", "path", "=", "'/'", "(", "folder", ",", "search_path", ")", "=", "get_folder_and_search_path", "(", "path", ",", "'/'", ")", "folders", "=", "context", ".", "listFiles", "(", "folder", ")", "return", "[", "s", ".", "replace", "(", "' '", ",", "'\\\\ '", ")", "for", "s", "in", "get_suggestions", "(", "folder", ",", "search_path", ",", "map", "(", "(", "lambda", "f", ":", "str", "(", "f", ")", ")", ",", "folders", ")", ",", "'/'", ",", "True", ")", "]" ]
provides path completion .
train
false
15,123
def compute_features(net, im): if config.FLAG_CPU_MODE: net.blobs['data'].reshape(*im.shape) net.blobs['data'].data[...] = im net.forward() fc7 = net.blobs['fc7'].data else: fc7 = numpy.array(lasagne.layers.get_output(net['fc7'], im, deterministic=True).eval()) return fc7
[ "def", "compute_features", "(", "net", ",", "im", ")", ":", "if", "config", ".", "FLAG_CPU_MODE", ":", "net", ".", "blobs", "[", "'data'", "]", ".", "reshape", "(", "*", "im", ".", "shape", ")", "net", ".", "blobs", "[", "'data'", "]", ".", "data", "[", "...", "]", "=", "im", "net", ".", "forward", "(", ")", "fc7", "=", "net", ".", "blobs", "[", "'fc7'", "]", ".", "data", "else", ":", "fc7", "=", "numpy", ".", "array", "(", "lasagne", ".", "layers", ".", "get_output", "(", "net", "[", "'fc7'", "]", ",", "im", ",", "deterministic", "=", "True", ")", ".", "eval", "(", ")", ")", "return", "fc7" ]
compute fc7 features for im .
train
false
15,124
def line_graph(G, create_using=None): if G.is_directed(): L = _lg_directed(G, create_using=create_using) else: L = _lg_undirected(G, selfloops=False, create_using=create_using) return L
[ "def", "line_graph", "(", "G", ",", "create_using", "=", "None", ")", ":", "if", "G", ".", "is_directed", "(", ")", ":", "L", "=", "_lg_directed", "(", "G", ",", "create_using", "=", "create_using", ")", "else", ":", "L", "=", "_lg_undirected", "(", "G", ",", "selfloops", "=", "False", ",", "create_using", "=", "create_using", ")", "return", "L" ]
returns the line graph of the graph or digraph g .
train
false
15,125
def _write_file_prefix(f, interpreter): if interpreter: shebang = (('#!' + interpreter.encode(shebang_encoding)) + '\n') f.write(shebang)
[ "def", "_write_file_prefix", "(", "f", ",", "interpreter", ")", ":", "if", "interpreter", ":", "shebang", "=", "(", "(", "'#!'", "+", "interpreter", ".", "encode", "(", "shebang_encoding", ")", ")", "+", "'\\n'", ")", "f", ".", "write", "(", "shebang", ")" ]
write a shebang line .
train
false
15,126
@csrf.exempt @api_experimental.route('/dags/<string:dag_id>/dag_runs', methods=['POST']) @requires_authentication def trigger_dag(dag_id): data = request.get_json(force=True) run_id = None if ('run_id' in data): run_id = data['run_id'] conf = None if ('conf' in data): conf = data['conf'] execution_date = None if ('execution_date' in data): execution_date = data['execution_date'] try: execution_date = datetime.strptime(execution_date, '%Y-%m-%dT%H:%M:%S') except ValueError: error_message = 'Given execution date, {}, could not be identified as a date. Example date format: 2015-11-16T14:34:15'.format(execution_date) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date) except AirflowException as err: _log.error(err) response = jsonify(error='{}'.format(err)) response.status_code = 404 return response if getattr(g, 'user', None): _log.info('User {} created {}'.format(g.user, dr)) response = jsonify(message='Created {}'.format(dr)) return response
[ "@", "csrf", ".", "exempt", "@", "api_experimental", ".", "route", "(", "'/dags/<string:dag_id>/dag_runs'", ",", "methods", "=", "[", "'POST'", "]", ")", "@", "requires_authentication", "def", "trigger_dag", "(", "dag_id", ")", ":", "data", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "run_id", "=", "None", "if", "(", "'run_id'", "in", "data", ")", ":", "run_id", "=", "data", "[", "'run_id'", "]", "conf", "=", "None", "if", "(", "'conf'", "in", "data", ")", ":", "conf", "=", "data", "[", "'conf'", "]", "execution_date", "=", "None", "if", "(", "'execution_date'", "in", "data", ")", ":", "execution_date", "=", "data", "[", "'execution_date'", "]", "try", ":", "execution_date", "=", "datetime", ".", "strptime", "(", "execution_date", ",", "'%Y-%m-%dT%H:%M:%S'", ")", "except", "ValueError", ":", "error_message", "=", "'Given execution date, {}, could not be identified as a date. Example date format: 2015-11-16T14:34:15'", ".", "format", "(", "execution_date", ")", "_log", ".", "info", "(", "error_message", ")", "response", "=", "jsonify", "(", "{", "'error'", ":", "error_message", "}", ")", "response", ".", "status_code", "=", "400", "return", "response", "try", ":", "dr", "=", "trigger", ".", "trigger_dag", "(", "dag_id", ",", "run_id", ",", "conf", ",", "execution_date", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "error", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "'{}'", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "404", "return", "response", "if", "getattr", "(", "g", ",", "'user'", ",", "None", ")", ":", "_log", ".", "info", "(", "'User {} created {}'", ".", "format", "(", "g", ".", "user", ",", "dr", ")", ")", "response", "=", "jsonify", "(", "message", "=", "'Created {}'", ".", "format", "(", "dr", ")", ")", "return", "response" ]
creates a dag run for the specified dag .
train
false
15,127
def getevents(clsid): clsid = str(pywintypes.IID(clsid)) klass = gencache.GetClassForCLSID(clsid) try: return klass.default_source except AttributeError: try: return gencache.GetClassForCLSID(klass.coclass_clsid).default_source except AttributeError: return None
[ "def", "getevents", "(", "clsid", ")", ":", "clsid", "=", "str", "(", "pywintypes", ".", "IID", "(", "clsid", ")", ")", "klass", "=", "gencache", ".", "GetClassForCLSID", "(", "clsid", ")", "try", ":", "return", "klass", ".", "default_source", "except", "AttributeError", ":", "try", ":", "return", "gencache", ".", "GetClassForCLSID", "(", "klass", ".", "coclass_clsid", ")", ".", "default_source", "except", "AttributeError", ":", "return", "None" ]
determine the default outgoing interface for a class .
train
false
15,128
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError]) def get_user_preference(requesting_user, preference_key, username=None): existing_user = _get_authorized_user(requesting_user, username, allow_staff=True) return UserPreference.get_value(existing_user, preference_key)
[ "@", "intercept_errors", "(", "UserAPIInternalError", ",", "ignore_errors", "=", "[", "UserAPIRequestError", "]", ")", "def", "get_user_preference", "(", "requesting_user", ",", "preference_key", ",", "username", "=", "None", ")", ":", "existing_user", "=", "_get_authorized_user", "(", "requesting_user", ",", "username", ",", "allow_staff", "=", "True", ")", "return", "UserPreference", ".", "get_value", "(", "existing_user", ",", "preference_key", ")" ]
returns the value of the user preference with the specified key .
train
false
15,129
def map_process(path, wp, fen, used_flightmodes, options): from mavflightview import mavflightview_show mavflightview_show(path, wp, fen, used_flightmodes, options)
[ "def", "map_process", "(", "path", ",", "wp", ",", "fen", ",", "used_flightmodes", ",", "options", ")", ":", "from", "mavflightview", "import", "mavflightview_show", "mavflightview_show", "(", "path", ",", "wp", ",", "fen", ",", "used_flightmodes", ",", "options", ")" ]
process for displaying a graph .
train
false
15,131
def add_user_to_cohort(cohort, username_or_email): user = get_user_by_username_or_email(username_or_email) membership = CohortMembership(course_user_group=cohort, user=user) membership.save() tracker.emit('edx.cohort.user_add_requested', {'user_id': user.id, 'cohort_id': cohort.id, 'cohort_name': cohort.name, 'previous_cohort_id': membership.previous_cohort_id, 'previous_cohort_name': membership.previous_cohort_name}) return (user, membership.previous_cohort_name)
[ "def", "add_user_to_cohort", "(", "cohort", ",", "username_or_email", ")", ":", "user", "=", "get_user_by_username_or_email", "(", "username_or_email", ")", "membership", "=", "CohortMembership", "(", "course_user_group", "=", "cohort", ",", "user", "=", "user", ")", "membership", ".", "save", "(", ")", "tracker", ".", "emit", "(", "'edx.cohort.user_add_requested'", ",", "{", "'user_id'", ":", "user", ".", "id", ",", "'cohort_id'", ":", "cohort", ".", "id", ",", "'cohort_name'", ":", "cohort", ".", "name", ",", "'previous_cohort_id'", ":", "membership", ".", "previous_cohort_id", ",", "'previous_cohort_name'", ":", "membership", ".", "previous_cohort_name", "}", ")", "return", "(", "user", ",", "membership", ".", "previous_cohort_name", ")" ]
look up the given user .
train
false
15,132
def json_synonym(name): def getter(self): return json.loads(getattr(self, name), decode_datetime=True) def setter(self, entry): setattr(self, name, unicode(json.dumps(entry, encode_datetime=True))) return synonym(name, descriptor=property(getter, setter))
[ "def", "json_synonym", "(", "name", ")", ":", "def", "getter", "(", "self", ")", ":", "return", "json", ".", "loads", "(", "getattr", "(", "self", ",", "name", ")", ",", "decode_datetime", "=", "True", ")", "def", "setter", "(", "self", ",", "entry", ")", ":", "setattr", "(", "self", ",", "name", ",", "unicode", "(", "json", ".", "dumps", "(", "entry", ",", "encode_datetime", "=", "True", ")", ")", ")", "return", "synonym", "(", "name", ",", "descriptor", "=", "property", "(", "getter", ",", "setter", ")", ")" ]
use json to serialize python objects for db storage .
train
false
15,133
def setup_switch(device_id, name, insteonhub, hass, add_devices_callback): if (device_id in _CONFIGURING): request_id = _CONFIGURING.pop(device_id) configurator = get_component('configurator') configurator.request_done(request_id) _LOGGER.info('Device configuration done!') conf_switch = config_from_file(hass.config.path(INSTEON_LOCAL_SWITCH_CONF)) if (device_id not in conf_switch): conf_switch[device_id] = name if (not config_from_file(hass.config.path(INSTEON_LOCAL_SWITCH_CONF), conf_switch)): _LOGGER.error('Failed to save configuration file') device = insteonhub.switch(device_id) add_devices_callback([InsteonLocalSwitchDevice(device, name)])
[ "def", "setup_switch", "(", "device_id", ",", "name", ",", "insteonhub", ",", "hass", ",", "add_devices_callback", ")", ":", "if", "(", "device_id", "in", "_CONFIGURING", ")", ":", "request_id", "=", "_CONFIGURING", ".", "pop", "(", "device_id", ")", "configurator", "=", "get_component", "(", "'configurator'", ")", "configurator", ".", "request_done", "(", "request_id", ")", "_LOGGER", ".", "info", "(", "'Device configuration done!'", ")", "conf_switch", "=", "config_from_file", "(", "hass", ".", "config", ".", "path", "(", "INSTEON_LOCAL_SWITCH_CONF", ")", ")", "if", "(", "device_id", "not", "in", "conf_switch", ")", ":", "conf_switch", "[", "device_id", "]", "=", "name", "if", "(", "not", "config_from_file", "(", "hass", ".", "config", ".", "path", "(", "INSTEON_LOCAL_SWITCH_CONF", ")", ",", "conf_switch", ")", ")", ":", "_LOGGER", ".", "error", "(", "'Failed to save configuration file'", ")", "device", "=", "insteonhub", ".", "switch", "(", "device_id", ")", "add_devices_callback", "(", "[", "InsteonLocalSwitchDevice", "(", "device", ",", "name", ")", "]", ")" ]
set up the switch .
train
false
15,134
def verify_tunnel_range(tunnel_range, tunnel_type): mappings = {p_const.TYPE_GRE: is_valid_gre_id, p_const.TYPE_VXLAN: is_valid_vxlan_vni, p_const.TYPE_GENEVE: is_valid_geneve_vni} if (tunnel_type in mappings): for ident in tunnel_range: if (not mappings[tunnel_type](ident)): raise exceptions.NetworkTunnelRangeError(tunnel_range=tunnel_range, error=(_('%(id)s is not a valid %(type)s identifier') % {'id': ident, 'type': tunnel_type})) if (tunnel_range[1] < tunnel_range[0]): raise exceptions.NetworkTunnelRangeError(tunnel_range=tunnel_range, error=_('End of tunnel range is less than start of tunnel range'))
[ "def", "verify_tunnel_range", "(", "tunnel_range", ",", "tunnel_type", ")", ":", "mappings", "=", "{", "p_const", ".", "TYPE_GRE", ":", "is_valid_gre_id", ",", "p_const", ".", "TYPE_VXLAN", ":", "is_valid_vxlan_vni", ",", "p_const", ".", "TYPE_GENEVE", ":", "is_valid_geneve_vni", "}", "if", "(", "tunnel_type", "in", "mappings", ")", ":", "for", "ident", "in", "tunnel_range", ":", "if", "(", "not", "mappings", "[", "tunnel_type", "]", "(", "ident", ")", ")", ":", "raise", "exceptions", ".", "NetworkTunnelRangeError", "(", "tunnel_range", "=", "tunnel_range", ",", "error", "=", "(", "_", "(", "'%(id)s is not a valid %(type)s identifier'", ")", "%", "{", "'id'", ":", "ident", ",", "'type'", ":", "tunnel_type", "}", ")", ")", "if", "(", "tunnel_range", "[", "1", "]", "<", "tunnel_range", "[", "0", "]", ")", ":", "raise", "exceptions", ".", "NetworkTunnelRangeError", "(", "tunnel_range", "=", "tunnel_range", ",", "error", "=", "_", "(", "'End of tunnel range is less than start of tunnel range'", ")", ")" ]
raise an exception for invalid tunnel range or malformed range .
train
false
15,135
def formatted_response(response, fields, labels, iterable=None): output_format = validated_parameter(request.args, 'format', values=['json', 'json_lines', 'csv'], default='json') header_type = validated_parameter(request.args, 'header', values=['names', 'labels', 'none'], default='labels') if (header_type == 'names'): header = fields elif (header_type == 'labels'): header = labels else: header = None iterable = (iterable or response) if (output_format == 'json'): return jsonify(response) elif (output_format == 'json_lines'): return Response(JSONLinesGenerator(iterable), mimetype='application/x-json-lines') elif (output_format == 'csv'): generator = csv_generator(iterable, fields, include_header=bool(header), header=header) headers = {'Content-Disposition': 'attachment; filename="facts.csv"'} return Response(generator, mimetype='text/csv', headers=headers)
[ "def", "formatted_response", "(", "response", ",", "fields", ",", "labels", ",", "iterable", "=", "None", ")", ":", "output_format", "=", "validated_parameter", "(", "request", ".", "args", ",", "'format'", ",", "values", "=", "[", "'json'", ",", "'json_lines'", ",", "'csv'", "]", ",", "default", "=", "'json'", ")", "header_type", "=", "validated_parameter", "(", "request", ".", "args", ",", "'header'", ",", "values", "=", "[", "'names'", ",", "'labels'", ",", "'none'", "]", ",", "default", "=", "'labels'", ")", "if", "(", "header_type", "==", "'names'", ")", ":", "header", "=", "fields", "elif", "(", "header_type", "==", "'labels'", ")", ":", "header", "=", "labels", "else", ":", "header", "=", "None", "iterable", "=", "(", "iterable", "or", "response", ")", "if", "(", "output_format", "==", "'json'", ")", ":", "return", "jsonify", "(", "response", ")", "elif", "(", "output_format", "==", "'json_lines'", ")", ":", "return", "Response", "(", "JSONLinesGenerator", "(", "iterable", ")", ",", "mimetype", "=", "'application/x-json-lines'", ")", "elif", "(", "output_format", "==", "'csv'", ")", ":", "generator", "=", "csv_generator", "(", "iterable", ",", "fields", ",", "include_header", "=", "bool", "(", "header", ")", ",", "header", "=", "header", ")", "headers", "=", "{", "'Content-Disposition'", ":", "'attachment; filename=\"facts.csv\"'", "}", "return", "Response", "(", "generator", ",", "mimetype", "=", "'text/csv'", ",", "headers", "=", "headers", ")" ]
wraps request which returns response that can be formatted .
train
false
15,136
def _Intersect(handler_list): if (not handler_list): return set() handlers = set([handler_list[0]]) for input_handler in handler_list[1:]: new_handlers = set() for g in handlers: new_handlers |= _IntersectTwoHandlers(input_handler, g) handlers = new_handlers return list(handlers)
[ "def", "_Intersect", "(", "handler_list", ")", ":", "if", "(", "not", "handler_list", ")", ":", "return", "set", "(", ")", "handlers", "=", "set", "(", "[", "handler_list", "[", "0", "]", "]", ")", "for", "input_handler", "in", "handler_list", "[", "1", ":", "]", ":", "new_handlers", "=", "set", "(", ")", "for", "g", "in", "handlers", ":", "new_handlers", "|=", "_IntersectTwoHandlers", "(", "input_handler", ",", "g", ")", "handlers", "=", "new_handlers", "return", "list", "(", "handlers", ")" ]
returns an unordered list of all possible intersections of handlers .
train
false
15,139
def test_Gpujoin_inplace(): s = T.lscalar() data = numpy.array([3, 4, 5], dtype=theano.config.floatX) x = gpuarray_shared_constructor(data, borrow=True) z = T.zeros((s,)) join = GpuJoin(view=0) c = join(0, x, z) f = theano.function([s], theano.Out(c, borrow=True)) assert (x.get_value(borrow=True, return_internal_type=True) is f(0)) assert numpy.allclose(f(0), [3, 4, 5])
[ "def", "test_Gpujoin_inplace", "(", ")", ":", "s", "=", "T", ".", "lscalar", "(", ")", "data", "=", "numpy", ".", "array", "(", "[", "3", ",", "4", ",", "5", "]", ",", "dtype", "=", "theano", ".", "config", ".", "floatX", ")", "x", "=", "gpuarray_shared_constructor", "(", "data", ",", "borrow", "=", "True", ")", "z", "=", "T", ".", "zeros", "(", "(", "s", ",", ")", ")", "join", "=", "GpuJoin", "(", "view", "=", "0", ")", "c", "=", "join", "(", "0", ",", "x", ",", "z", ")", "f", "=", "theano", ".", "function", "(", "[", "s", "]", ",", "theano", ".", "Out", "(", "c", ",", "borrow", "=", "True", ")", ")", "assert", "(", "x", ".", "get_value", "(", "borrow", "=", "True", ",", "return_internal_type", "=", "True", ")", "is", "f", "(", "0", ")", ")", "assert", "numpy", ".", "allclose", "(", "f", "(", "0", ")", ",", "[", "3", ",", "4", ",", "5", "]", ")" ]
test gpujoin to work inplace .
train
false
15,140
def restarted(manager, containers, count, name): containers.refresh() for container in manager.get_differing_containers(): manager.stop_containers([container]) manager.remove_containers([container]) containers.refresh() manager.restart_containers(containers.running) started(manager, containers, count, name)
[ "def", "restarted", "(", "manager", ",", "containers", ",", "count", ",", "name", ")", ":", "containers", ".", "refresh", "(", ")", "for", "container", "in", "manager", ".", "get_differing_containers", "(", ")", ":", "manager", ".", "stop_containers", "(", "[", "container", "]", ")", "manager", ".", "remove_containers", "(", "[", "container", "]", ")", "containers", ".", "refresh", "(", ")", "manager", ".", "restart_containers", "(", "containers", ".", "running", ")", "started", "(", "manager", ",", "containers", ",", "count", ",", "name", ")" ]
require a service to be restarted .
train
false
15,142
def ParseJSONResponse(response): content_type = response.headers.get('Content-Type', '') if (not any((content_type.startswith(x) for x in _CONTENT_TYPES))): raise web.HTTPError(response.code, ('%r' % response.headers)) try: json_dict = json.loads(response.body) except: if (response.code == 200): raise json_dict = {'error': response.body} if (response.code != 200): error = 'unknown' if (isinstance(json_dict, dict) and json_dict.get('error')): error = json_dict.get('error') raise web.HTTPError(response.code, ('%s' % error)) return json_dict
[ "def", "ParseJSONResponse", "(", "response", ")", ":", "content_type", "=", "response", ".", "headers", ".", "get", "(", "'Content-Type'", ",", "''", ")", "if", "(", "not", "any", "(", "(", "content_type", ".", "startswith", "(", "x", ")", "for", "x", "in", "_CONTENT_TYPES", ")", ")", ")", ":", "raise", "web", ".", "HTTPError", "(", "response", ".", "code", ",", "(", "'%r'", "%", "response", ".", "headers", ")", ")", "try", ":", "json_dict", "=", "json", ".", "loads", "(", "response", ".", "body", ")", "except", ":", "if", "(", "response", ".", "code", "==", "200", ")", ":", "raise", "json_dict", "=", "{", "'error'", ":", "response", ".", "body", "}", "if", "(", "response", ".", "code", "!=", "200", ")", ":", "error", "=", "'unknown'", "if", "(", "isinstance", "(", "json_dict", ",", "dict", ")", "and", "json_dict", ".", "get", "(", "'error'", ")", ")", ":", "error", "=", "json_dict", ".", "get", "(", "'error'", ")", "raise", "web", ".", "HTTPError", "(", "response", ".", "code", ",", "(", "'%s'", "%", "error", ")", ")", "return", "json_dict" ]
parse the json-encoded contents of the response body and return the python data object .
train
false
15,143
def hough_ellipse(img, threshold=4, accuracy=1, min_size=4, max_size=None): return _hough_ellipse(img, threshold, accuracy, min_size, max_size)
[ "def", "hough_ellipse", "(", "img", ",", "threshold", "=", "4", ",", "accuracy", "=", "1", ",", "min_size", "=", "4", ",", "max_size", "=", "None", ")", ":", "return", "_hough_ellipse", "(", "img", ",", "threshold", ",", "accuracy", ",", "min_size", ",", "max_size", ")" ]
perform an elliptical hough transform .
train
false
15,144
@pytest.mark.cmd @pytest.mark.parametrize('command,app', [(command, app) for (command, app) in get_commands().iteritems() if (app.startswith('pootle_') or (app in CORE_APPS_WITH_COMMANDS))]) def test_initdb_help(capfd, command, app): print ('Command: %s, App: %s' % (command, app)) with pytest.raises(SystemExit): call_command(command, '--help') (out, err) = capfd.readouterr() assert ('--help' in out)
[ "@", "pytest", ".", "mark", ".", "cmd", "@", "pytest", ".", "mark", ".", "parametrize", "(", "'command,app'", ",", "[", "(", "command", ",", "app", ")", "for", "(", "command", ",", "app", ")", "in", "get_commands", "(", ")", ".", "iteritems", "(", ")", "if", "(", "app", ".", "startswith", "(", "'pootle_'", ")", "or", "(", "app", "in", "CORE_APPS_WITH_COMMANDS", ")", ")", "]", ")", "def", "test_initdb_help", "(", "capfd", ",", "command", ",", "app", ")", ":", "print", "(", "'Command: %s, App: %s'", "%", "(", "command", ",", "app", ")", ")", "with", "pytest", ".", "raises", "(", "SystemExit", ")", ":", "call_command", "(", "command", ",", "'--help'", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "(", "'--help'", "in", "out", ")" ]
catch any simple command issues .
train
false
15,146
def is_email_valid(email_address): return (invalid_email_reason(email_address, '') is None)
[ "def", "is_email_valid", "(", "email_address", ")", ":", "return", "(", "invalid_email_reason", "(", "email_address", ",", "''", ")", "is", "None", ")" ]
determine if email is invalid .
train
false
15,148
def _username(): if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username
[ "def", "_username", "(", ")", ":", "if", "pwd", ":", "username", "=", "pwd", ".", "getpwuid", "(", "os", ".", "getuid", "(", ")", ")", ".", "pw_name", "else", ":", "username", "=", "getpass", ".", "getuser", "(", ")", "return", "username" ]
grain for the minion username .
train
true
15,150
def weights_bartlett(nlags): return (1 - (np.arange((nlags + 1)) / (nlags + 1.0)))
[ "def", "weights_bartlett", "(", "nlags", ")", ":", "return", "(", "1", "-", "(", "np", ".", "arange", "(", "(", "nlags", "+", "1", ")", ")", "/", "(", "nlags", "+", "1.0", ")", ")", ")" ]
bartlett weights for hac this will be moved to another module parameters nlags : int highest lag in the kernel window .
train
false
15,151
def init_flowgram_file(filename=None, n=0, l=400, prefix='/tmp/'): if (filename is None): (fd, filename) = mkstemp(dir=prefix, suffix='.dat') close(fd) fh = open(filename, 'w') fh.write(('%d %d\n' % (n, l))) return (fh, filename)
[ "def", "init_flowgram_file", "(", "filename", "=", "None", ",", "n", "=", "0", ",", "l", "=", "400", ",", "prefix", "=", "'/tmp/'", ")", ":", "if", "(", "filename", "is", "None", ")", ":", "(", "fd", ",", "filename", ")", "=", "mkstemp", "(", "dir", "=", "prefix", ",", "suffix", "=", "'.dat'", ")", "close", "(", "fd", ")", "fh", "=", "open", "(", "filename", ",", "'w'", ")", "fh", ".", "write", "(", "(", "'%d %d\\n'", "%", "(", "n", ",", "l", ")", ")", ")", "return", "(", "fh", ",", "filename", ")" ]
opens a file in plain flowgram format and writes header information .
train
false
15,153
def adjust_timestamp(ts, offset, here=utcoffset): return (ts - ((offset - here()) * 3600))
[ "def", "adjust_timestamp", "(", "ts", ",", "offset", ",", "here", "=", "utcoffset", ")", ":", "return", "(", "ts", "-", "(", "(", "offset", "-", "here", "(", ")", ")", "*", "3600", ")", ")" ]
adjust timestamp based on provided utcoffset .
train
false
15,154
def do_organization_info(ava): if (ava is None): return None org = md.Organization() for (dkey, (ckey, klass)) in ORG_ATTR_TRANSL.items(): if (ckey not in ava): continue if isinstance(ava[ckey], basestring): setattr(org, dkey, [_localized_name(ava[ckey], klass)]) elif isinstance(ava[ckey], list): setattr(org, dkey, [_localized_name(n, klass) for n in ava[ckey]]) else: setattr(org, dkey, [_localized_name(ava[ckey], klass)]) return org
[ "def", "do_organization_info", "(", "ava", ")", ":", "if", "(", "ava", "is", "None", ")", ":", "return", "None", "org", "=", "md", ".", "Organization", "(", ")", "for", "(", "dkey", ",", "(", "ckey", ",", "klass", ")", ")", "in", "ORG_ATTR_TRANSL", ".", "items", "(", ")", ":", "if", "(", "ckey", "not", "in", "ava", ")", ":", "continue", "if", "isinstance", "(", "ava", "[", "ckey", "]", ",", "basestring", ")", ":", "setattr", "(", "org", ",", "dkey", ",", "[", "_localized_name", "(", "ava", "[", "ckey", "]", ",", "klass", ")", "]", ")", "elif", "isinstance", "(", "ava", "[", "ckey", "]", ",", "list", ")", ":", "setattr", "(", "org", ",", "dkey", ",", "[", "_localized_name", "(", "n", ",", "klass", ")", "for", "n", "in", "ava", "[", "ckey", "]", "]", ")", "else", ":", "setattr", "(", "org", ",", "dkey", ",", "[", "_localized_name", "(", "ava", "[", "ckey", "]", ",", "klass", ")", "]", ")", "return", "org" ]
description of an organization in the configuration is a dictionary of keys and values .
train
true
15,156
def compare_languge(language, lang_list): found = False for l in lang_list: if (language == l.lower()): found = True break return found
[ "def", "compare_languge", "(", "language", ",", "lang_list", ")", ":", "found", "=", "False", "for", "l", "in", "lang_list", ":", "if", "(", "language", "==", "l", ".", "lower", "(", ")", ")", ":", "found", "=", "True", "break", "return", "found" ]
check if language is found .
train
false
15,157
def rstrips(text, remove): return _strips('r', text, remove)
[ "def", "rstrips", "(", "text", ",", "remove", ")", ":", "return", "_strips", "(", "'r'", ",", "text", ",", "remove", ")" ]
removes the string remove from the right of text .
train
false
15,158
def send_nscript(title, msg, gtype, force=False, test=None): if test: script = test.get('nscript_script') parameters = test.get('nscript_parameters') else: script = sabnzbd.cfg.nscript_script() parameters = sabnzbd.cfg.nscript_parameters() if (not script): return T('Cannot send, missing required data') title = (u'SABnzbd: ' + Tx(NOTIFICATION.get(gtype, 'other'))) if (force or check_classes(gtype, 'nscript')): script_path = make_script_path(script) if script_path: (output, ret) = external_script(script_path, gtype, title, msg, parameters) if ret: logging.error((T('Script returned exit code %s and output "%s"') % (ret, output))) return (T('Script returned exit code %s and output "%s"') % (ret, output)) else: logging.info(('Successfully executed notification script ' + script_path)) else: return (T('Notification script "%s" does not exist') % script_path) return ''
[ "def", "send_nscript", "(", "title", ",", "msg", ",", "gtype", ",", "force", "=", "False", ",", "test", "=", "None", ")", ":", "if", "test", ":", "script", "=", "test", ".", "get", "(", "'nscript_script'", ")", "parameters", "=", "test", ".", "get", "(", "'nscript_parameters'", ")", "else", ":", "script", "=", "sabnzbd", ".", "cfg", ".", "nscript_script", "(", ")", "parameters", "=", "sabnzbd", ".", "cfg", ".", "nscript_parameters", "(", ")", "if", "(", "not", "script", ")", ":", "return", "T", "(", "'Cannot send, missing required data'", ")", "title", "=", "(", "u'SABnzbd: '", "+", "Tx", "(", "NOTIFICATION", ".", "get", "(", "gtype", ",", "'other'", ")", ")", ")", "if", "(", "force", "or", "check_classes", "(", "gtype", ",", "'nscript'", ")", ")", ":", "script_path", "=", "make_script_path", "(", "script", ")", "if", "script_path", ":", "(", "output", ",", "ret", ")", "=", "external_script", "(", "script_path", ",", "gtype", ",", "title", ",", "msg", ",", "parameters", ")", "if", "ret", ":", "logging", ".", "error", "(", "(", "T", "(", "'Script returned exit code %s and output \"%s\"'", ")", "%", "(", "ret", ",", "output", ")", ")", ")", "return", "(", "T", "(", "'Script returned exit code %s and output \"%s\"'", ")", "%", "(", "ret", ",", "output", ")", ")", "else", ":", "logging", ".", "info", "(", "(", "'Successfully executed notification script '", "+", "script_path", ")", ")", "else", ":", "return", "(", "T", "(", "'Notification script \"%s\" does not exist'", ")", "%", "script_path", ")", "return", "''" ]
run users notification script .
train
false
15,159
def lint_repeats(tool_xml, lint_ctx): repeats = tool_xml.findall('./inputs//repeat') for repeat in repeats: if ('name' not in repeat.attrib): lint_ctx.error('Repeat does not specify name attribute.') if ('title' not in repeat.attrib): lint_ctx.error('Repeat does not specify title attribute.')
[ "def", "lint_repeats", "(", "tool_xml", ",", "lint_ctx", ")", ":", "repeats", "=", "tool_xml", ".", "findall", "(", "'./inputs//repeat'", ")", "for", "repeat", "in", "repeats", ":", "if", "(", "'name'", "not", "in", "repeat", ".", "attrib", ")", ":", "lint_ctx", ".", "error", "(", "'Repeat does not specify name attribute.'", ")", "if", "(", "'title'", "not", "in", "repeat", ".", "attrib", ")", ":", "lint_ctx", ".", "error", "(", "'Repeat does not specify title attribute.'", ")" ]
lint repeat blocks in tool inputs .
train
false
15,160
def _iscommand(cmd): if _isexecutable(cmd): return True path = os.environ.get('PATH') if (not path): return False for d in path.split(os.pathsep): exe = os.path.join(d, cmd) if _isexecutable(exe): return True return False
[ "def", "_iscommand", "(", "cmd", ")", ":", "if", "_isexecutable", "(", "cmd", ")", ":", "return", "True", "path", "=", "os", ".", "environ", ".", "get", "(", "'PATH'", ")", "if", "(", "not", "path", ")", ":", "return", "False", "for", "d", "in", "path", ".", "split", "(", "os", ".", "pathsep", ")", ":", "exe", "=", "os", ".", "path", ".", "join", "(", "d", ",", "cmd", ")", "if", "_isexecutable", "(", "exe", ")", ":", "return", "True", "return", "False" ]
return true if cmd is executable or can be found on the executable search path .
train
false
15,161
def test_cache_deactivated_remove_data(config_stub, tmpdir): config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': True}} disk_cache = cache.DiskCache(str(tmpdir)) url = QUrl('http://www.example.com/') assert (not disk_cache.remove(url))
[ "def", "test_cache_deactivated_remove_data", "(", "config_stub", ",", "tmpdir", ")", ":", "config_stub", ".", "data", "=", "{", "'storage'", ":", "{", "'cache-size'", ":", "1024", "}", ",", "'general'", ":", "{", "'private-browsing'", ":", "True", "}", "}", "disk_cache", "=", "cache", ".", "DiskCache", "(", "str", "(", "tmpdir", ")", ")", "url", "=", "QUrl", "(", "'http://www.example.com/'", ")", "assert", "(", "not", "disk_cache", ".", "remove", "(", "url", ")", ")" ]
test removing some data from a deactivated cache .
train
false
15,163
def describe_template_sets(): import inspect import sys templatesets = inspect.getmembers(sys.modules[__name__], inspect.isfunction) for (name, obj) in templatesets: if (name == 'describe_template_sets'): continue print(name, obj.__doc__, '\n')
[ "def", "describe_template_sets", "(", ")", ":", "import", "inspect", "import", "sys", "templatesets", "=", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "inspect", ".", "isfunction", ")", "for", "(", "name", ",", "obj", ")", "in", "templatesets", ":", "if", "(", "name", "==", "'describe_template_sets'", ")", ":", "continue", "print", "(", "name", ",", "obj", ".", "__doc__", ",", "'\\n'", ")" ]
print the available template sets in this demo .
train
false
15,164
def _load_all_namespaces(resolver): url_patterns = getattr(resolver, 'url_patterns', []) namespaces = [url.namespace for url in url_patterns if (getattr(url, 'namespace', None) is not None)] for pattern in url_patterns: namespaces.extend(_load_all_namespaces(pattern)) return namespaces
[ "def", "_load_all_namespaces", "(", "resolver", ")", ":", "url_patterns", "=", "getattr", "(", "resolver", ",", "'url_patterns'", ",", "[", "]", ")", "namespaces", "=", "[", "url", ".", "namespace", "for", "url", "in", "url_patterns", "if", "(", "getattr", "(", "url", ",", "'namespace'", ",", "None", ")", "is", "not", "None", ")", "]", "for", "pattern", "in", "url_patterns", ":", "namespaces", ".", "extend", "(", "_load_all_namespaces", "(", "pattern", ")", ")", "return", "namespaces" ]
recursively load all namespaces from url patterns .
train
false
15,165
def get_provider_driver_class(driver, namespace=SERVICE_PROVIDERS): try: driver_manager = stevedore.driver.DriverManager(namespace, driver).driver except ImportError: return driver except RuntimeError: return driver new_driver = ('%s.%s' % (driver_manager.__module__, driver_manager.__name__)) LOG.warning(_LW('The configured driver %(driver)s has been moved, automatically using %(new_driver)s instead. Please update your config files, as this automatic fixup will be removed in a future release.'), {'driver': driver, 'new_driver': new_driver}) return new_driver
[ "def", "get_provider_driver_class", "(", "driver", ",", "namespace", "=", "SERVICE_PROVIDERS", ")", ":", "try", ":", "driver_manager", "=", "stevedore", ".", "driver", ".", "DriverManager", "(", "namespace", ",", "driver", ")", ".", "driver", "except", "ImportError", ":", "return", "driver", "except", "RuntimeError", ":", "return", "driver", "new_driver", "=", "(", "'%s.%s'", "%", "(", "driver_manager", ".", "__module__", ",", "driver_manager", ".", "__name__", ")", ")", "LOG", ".", "warning", "(", "_LW", "(", "'The configured driver %(driver)s has been moved, automatically using %(new_driver)s instead. Please update your config files, as this automatic fixup will be removed in a future release.'", ")", ",", "{", "'driver'", ":", "driver", ",", "'new_driver'", ":", "new_driver", "}", ")", "return", "new_driver" ]
return path to provider driver class in order to keep backward compatibility with configs < kilo .
train
false
15,166
def list_refs(refnames=None, repo_dir=None, limit_to_heads=False, limit_to_tags=False): argv = ['git', 'show-ref'] if limit_to_heads: argv.append('--heads') if limit_to_tags: argv.append('--tags') argv.append('--') if refnames: argv += refnames p = subprocess.Popen(argv, preexec_fn=_gitenv(repo_dir), stdout=subprocess.PIPE) out = p.stdout.read().strip() rv = p.wait() if rv: assert (not out) if out: for d in out.split('\n'): (sha, name) = d.split(' ', 1) (yield (name, sha.decode('hex')))
[ "def", "list_refs", "(", "refnames", "=", "None", ",", "repo_dir", "=", "None", ",", "limit_to_heads", "=", "False", ",", "limit_to_tags", "=", "False", ")", ":", "argv", "=", "[", "'git'", ",", "'show-ref'", "]", "if", "limit_to_heads", ":", "argv", ".", "append", "(", "'--heads'", ")", "if", "limit_to_tags", ":", "argv", ".", "append", "(", "'--tags'", ")", "argv", ".", "append", "(", "'--'", ")", "if", "refnames", ":", "argv", "+=", "refnames", "p", "=", "subprocess", ".", "Popen", "(", "argv", ",", "preexec_fn", "=", "_gitenv", "(", "repo_dir", ")", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "out", "=", "p", ".", "stdout", ".", "read", "(", ")", ".", "strip", "(", ")", "rv", "=", "p", ".", "wait", "(", ")", "if", "rv", ":", "assert", "(", "not", "out", ")", "if", "out", ":", "for", "d", "in", "out", ".", "split", "(", "'\\n'", ")", ":", "(", "sha", ",", "name", ")", "=", "d", ".", "split", "(", "' '", ",", "1", ")", "(", "yield", "(", "name", ",", "sha", ".", "decode", "(", "'hex'", ")", ")", ")" ]
yield tuples for all repository refs unless refnames are specified .
train
false
15,167
def _ts_kde(ax, x, data, color, **kwargs): kde_data = [] (y_min, y_max) = (data.min(), data.max()) y_vals = np.linspace(y_min, y_max, 100) upsampler = interpolate.interp1d(x, data) data_upsample = upsampler(np.linspace(x.min(), x.max(), 100)) for pt_data in data_upsample.T: pt_kde = stats.kde.gaussian_kde(pt_data) kde_data.append(pt_kde(y_vals)) kde_data = np.transpose(kde_data) rgb = mpl.colors.ColorConverter().to_rgb(color) img = np.zeros((kde_data.shape[0], kde_data.shape[1], 4)) img[:, :, :3] = rgb kde_data /= kde_data.max(axis=0) kde_data[(kde_data > 1)] = 1 img[:, :, 3] = kde_data ax.imshow(img, interpolation='spline16', zorder=2, extent=(x.min(), x.max(), y_min, y_max), aspect='auto', origin='lower')
[ "def", "_ts_kde", "(", "ax", ",", "x", ",", "data", ",", "color", ",", "**", "kwargs", ")", ":", "kde_data", "=", "[", "]", "(", "y_min", ",", "y_max", ")", "=", "(", "data", ".", "min", "(", ")", ",", "data", ".", "max", "(", ")", ")", "y_vals", "=", "np", ".", "linspace", "(", "y_min", ",", "y_max", ",", "100", ")", "upsampler", "=", "interpolate", ".", "interp1d", "(", "x", ",", "data", ")", "data_upsample", "=", "upsampler", "(", "np", ".", "linspace", "(", "x", ".", "min", "(", ")", ",", "x", ".", "max", "(", ")", ",", "100", ")", ")", "for", "pt_data", "in", "data_upsample", ".", "T", ":", "pt_kde", "=", "stats", ".", "kde", ".", "gaussian_kde", "(", "pt_data", ")", "kde_data", ".", "append", "(", "pt_kde", "(", "y_vals", ")", ")", "kde_data", "=", "np", ".", "transpose", "(", "kde_data", ")", "rgb", "=", "mpl", ".", "colors", ".", "ColorConverter", "(", ")", ".", "to_rgb", "(", "color", ")", "img", "=", "np", ".", "zeros", "(", "(", "kde_data", ".", "shape", "[", "0", "]", ",", "kde_data", ".", "shape", "[", "1", "]", ",", "4", ")", ")", "img", "[", ":", ",", ":", ",", ":", "3", "]", "=", "rgb", "kde_data", "/=", "kde_data", ".", "max", "(", "axis", "=", "0", ")", "kde_data", "[", "(", "kde_data", ">", "1", ")", "]", "=", "1", "img", "[", ":", ",", ":", ",", "3", "]", "=", "kde_data", "ax", ".", "imshow", "(", "img", ",", "interpolation", "=", "'spline16'", ",", "zorder", "=", "2", ",", "extent", "=", "(", "x", ".", "min", "(", ")", ",", "x", ".", "max", "(", ")", ",", "y_min", ",", "y_max", ")", ",", "aspect", "=", "'auto'", ",", "origin", "=", "'lower'", ")" ]
upsample over time and plot a kde of the bootstrap distribution .
train
false
15,170
def status_battery(): data = status() if ('TONBATT' in data): return (not (data['TONBATT'] == '0 Seconds')) return {'Error': 'Battery status not available.'}
[ "def", "status_battery", "(", ")", ":", "data", "=", "status", "(", ")", "if", "(", "'TONBATT'", "in", "data", ")", ":", "return", "(", "not", "(", "data", "[", "'TONBATT'", "]", "==", "'0 Seconds'", ")", ")", "return", "{", "'Error'", ":", "'Battery status not available.'", "}" ]
return true if running on battery power cli example: .
train
false
15,171
def set_gamedir(path): global GAMEDIR Ndepth = 10 settings_path = os.path.join('server', 'conf', 'settings.py') for i in range(Ndepth): gpath = os.getcwd() if ('server' in os.listdir(gpath)): if os.path.isfile(settings_path): GAMEDIR = gpath return os.chdir(os.pardir) print(ERROR_NO_GAMEDIR) sys.exit()
[ "def", "set_gamedir", "(", "path", ")", ":", "global", "GAMEDIR", "Ndepth", "=", "10", "settings_path", "=", "os", ".", "path", ".", "join", "(", "'server'", ",", "'conf'", ",", "'settings.py'", ")", "for", "i", "in", "range", "(", "Ndepth", ")", ":", "gpath", "=", "os", ".", "getcwd", "(", ")", "if", "(", "'server'", "in", "os", ".", "listdir", "(", "gpath", ")", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "settings_path", ")", ":", "GAMEDIR", "=", "gpath", "return", "os", ".", "chdir", "(", "os", ".", "pardir", ")", "print", "(", "ERROR_NO_GAMEDIR", ")", "sys", ".", "exit", "(", ")" ]
set gamedir based on path .
train
false
15,172
def SetQuickFixList(quickfix_list, focus=False, autoclose=False): vim.eval(u'setqflist( {0} )'.format(json.dumps(quickfix_list))) OpenQuickFixList(focus, autoclose)
[ "def", "SetQuickFixList", "(", "quickfix_list", ",", "focus", "=", "False", ",", "autoclose", "=", "False", ")", ":", "vim", ".", "eval", "(", "u'setqflist( {0} )'", ".", "format", "(", "json", ".", "dumps", "(", "quickfix_list", ")", ")", ")", "OpenQuickFixList", "(", "focus", ",", "autoclose", ")" ]
populate the quickfix list and open it .
train
false
15,174
def downsample(data, n, axis=0, xvals='subsample'): ma = None if (hasattr(data, 'implements') and data.implements('MetaArray')): ma = data data = data.view(np.ndarray) if hasattr(axis, '__len__'): if (not hasattr(n, '__len__')): n = ([n] * len(axis)) for i in range(len(axis)): data = downsample(data, n[i], axis[i]) return data if (n <= 1): return data nPts = int((data.shape[axis] / n)) s = list(data.shape) s[axis] = nPts s.insert((axis + 1), n) sl = ([slice(None)] * data.ndim) sl[axis] = slice(0, (nPts * n)) d1 = data[tuple(sl)] d1.shape = tuple(s) d2 = d1.mean((axis + 1)) if (ma is None): return d2 else: info = ma.infoCopy() if ('values' in info[axis]): if (xvals == 'subsample'): info[axis]['values'] = info[axis]['values'][::n][:nPts] elif (xvals == 'downsample'): info[axis]['values'] = downsample(info[axis]['values'], n) return MetaArray(d2, info=info)
[ "def", "downsample", "(", "data", ",", "n", ",", "axis", "=", "0", ",", "xvals", "=", "'subsample'", ")", ":", "ma", "=", "None", "if", "(", "hasattr", "(", "data", ",", "'implements'", ")", "and", "data", ".", "implements", "(", "'MetaArray'", ")", ")", ":", "ma", "=", "data", "data", "=", "data", ".", "view", "(", "np", ".", "ndarray", ")", "if", "hasattr", "(", "axis", ",", "'__len__'", ")", ":", "if", "(", "not", "hasattr", "(", "n", ",", "'__len__'", ")", ")", ":", "n", "=", "(", "[", "n", "]", "*", "len", "(", "axis", ")", ")", "for", "i", "in", "range", "(", "len", "(", "axis", ")", ")", ":", "data", "=", "downsample", "(", "data", ",", "n", "[", "i", "]", ",", "axis", "[", "i", "]", ")", "return", "data", "if", "(", "n", "<=", "1", ")", ":", "return", "data", "nPts", "=", "int", "(", "(", "data", ".", "shape", "[", "axis", "]", "/", "n", ")", ")", "s", "=", "list", "(", "data", ".", "shape", ")", "s", "[", "axis", "]", "=", "nPts", "s", ".", "insert", "(", "(", "axis", "+", "1", ")", ",", "n", ")", "sl", "=", "(", "[", "slice", "(", "None", ")", "]", "*", "data", ".", "ndim", ")", "sl", "[", "axis", "]", "=", "slice", "(", "0", ",", "(", "nPts", "*", "n", ")", ")", "d1", "=", "data", "[", "tuple", "(", "sl", ")", "]", "d1", ".", "shape", "=", "tuple", "(", "s", ")", "d2", "=", "d1", ".", "mean", "(", "(", "axis", "+", "1", ")", ")", "if", "(", "ma", "is", "None", ")", ":", "return", "d2", "else", ":", "info", "=", "ma", ".", "infoCopy", "(", ")", "if", "(", "'values'", "in", "info", "[", "axis", "]", ")", ":", "if", "(", "xvals", "==", "'subsample'", ")", ":", "info", "[", "axis", "]", "[", "'values'", "]", "=", "info", "[", "axis", "]", "[", "'values'", "]", "[", ":", ":", "n", "]", "[", ":", "nPts", "]", "elif", "(", "xvals", "==", "'downsample'", ")", ":", "info", "[", "axis", "]", "[", "'values'", "]", "=", "downsample", "(", "info", "[", "axis", "]", "[", "'values'", "]", ",", "n", ")", "return", "MetaArray", "(", "d2", ",", "info", "=", "info", ")" ]
downsample by averaging points together across axis .
train
false
15,175
def config_edit_using_option_strings(config_filepath, desired_option_strings, section, edit=False): desired_options = [parse_option_string(section, desired_option_string, raise_on_error=True) for desired_option_string in desired_option_strings] config_edit(config_filepath, desired_options, edit=edit)
[ "def", "config_edit_using_option_strings", "(", "config_filepath", ",", "desired_option_strings", ",", "section", ",", "edit", "=", "False", ")", ":", "desired_options", "=", "[", "parse_option_string", "(", "section", ",", "desired_option_string", ",", "raise_on_error", "=", "True", ")", "for", "desired_option_string", "in", "desired_option_strings", "]", "config_edit", "(", "config_filepath", ",", "desired_options", ",", "edit", "=", "edit", ")" ]
writes the desired_option_strings to the config file .
train
false