id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
51,584
def fetch_tool_versions(app, tool_shed_repository): try: tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, str(tool_shed_repository.tool_shed)) params = dict(name=str(tool_shed_repository.name), owner=str(tool_shed_repository.owner), changeset_revision=str(tool_shed_repository.changeset_revision)) pathspec = ['repository', 'get_tool_versions'] url = util.build_url(tool_shed_url, pathspec=pathspec, params=params) text = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params) if text: return json.loads(text) else: raise Exception(('No content returned from Tool Shed repository version request to %s' % url)) except Exception: log.exception('Failed to fetch tool version information for Tool Shed repository.') raise
[ "def", "fetch_tool_versions", "(", "app", ",", "tool_shed_repository", ")", ":", "try", ":", "tool_shed_url", "=", "common_util", ".", "get_tool_shed_url_from_tool_shed_registry", "(", "app", ",", "str", "(", "tool_shed_repository", ".", "tool_shed", ")", ")", "params", "=", "dict", "(", "name", "=", "str", "(", "tool_shed_repository", ".", "name", ")", ",", "owner", "=", "str", "(", "tool_shed_repository", ".", "owner", ")", ",", "changeset_revision", "=", "str", "(", "tool_shed_repository", ".", "changeset_revision", ")", ")", "pathspec", "=", "[", "'repository'", ",", "'get_tool_versions'", "]", "url", "=", "util", ".", "build_url", "(", "tool_shed_url", ",", "pathspec", "=", "pathspec", ",", "params", "=", "params", ")", "text", "=", "util", ".", "url_get", "(", "tool_shed_url", ",", "password_mgr", "=", "app", ".", "tool_shed_registry", ".", "url_auth", "(", "tool_shed_url", ")", ",", "pathspec", "=", "pathspec", ",", "params", "=", "params", ")", "if", "text", ":", "return", "json", ".", "loads", "(", "text", ")", "else", ":", "raise", "Exception", "(", "(", "'No content returned from Tool Shed repository version request to %s'", "%", "url", ")", ")", "except", "Exception", ":", "log", ".", "exception", "(", "'Failed to fetch tool version information for Tool Shed repository.'", ")", "raise" ]
fetch a data structure describing tool shed versions from the tool shed corresponding to a tool_shed_repository object .
train
false
51,587
def convert_filter(text): text = text.strip().lower() if text.startswith('re:'): txt = text[3:].strip() else: txt = wildcard_to_re(text) try: return re.compile(txt, re.I) except: logging.debug('Could not compile regex: %s', text) return None
[ "def", "convert_filter", "(", "text", ")", ":", "text", "=", "text", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "text", ".", "startswith", "(", "'re:'", ")", ":", "txt", "=", "text", "[", "3", ":", "]", ".", "strip", "(", ")", "else", ":", "txt", "=", "wildcard_to_re", "(", "text", ")", "try", ":", "return", "re", ".", "compile", "(", "txt", ",", "re", ".", "I", ")", "except", ":", "logging", ".", "debug", "(", "'Could not compile regex: %s'", ",", "text", ")", "return", "None" ]
return compiled regex .
train
false
51,588
def checkcache(filename=None): if (filename is None): filenames = list(cache.keys()) elif (filename in cache): filenames = [filename] else: return for filename in filenames: (size, mtime, lines, fullname) = cache[filename] if (mtime is None): continue try: stat = os.stat(fullname) except OSError: del cache[filename] continue if ((size != stat.st_size) or (mtime != stat.st_mtime)): del cache[filename]
[ "def", "checkcache", "(", "filename", "=", "None", ")", ":", "if", "(", "filename", "is", "None", ")", ":", "filenames", "=", "list", "(", "cache", ".", "keys", "(", ")", ")", "elif", "(", "filename", "in", "cache", ")", ":", "filenames", "=", "[", "filename", "]", "else", ":", "return", "for", "filename", "in", "filenames", ":", "(", "size", ",", "mtime", ",", "lines", ",", "fullname", ")", "=", "cache", "[", "filename", "]", "if", "(", "mtime", "is", "None", ")", ":", "continue", "try", ":", "stat", "=", "os", ".", "stat", "(", "fullname", ")", "except", "OSError", ":", "del", "cache", "[", "filename", "]", "continue", "if", "(", "(", "size", "!=", "stat", ".", "st_size", ")", "or", "(", "mtime", "!=", "stat", ".", "st_mtime", ")", ")", ":", "del", "cache", "[", "filename", "]" ]
discard cache entries that are out of date .
train
true
51,589
def test_install_from_wheel_no_setuptools_entrypoint(script, data): result = script.pip('install', 'script.wheel1==0.1', '--no-index', ('--find-links=' + data.find_links), expect_error=False) if (os.name == 'nt'): wrapper_file = (script.bin / 't1.exe') else: wrapper_file = (script.bin / 't1') wrapper_helper = (script.bin / 't1-script.py') assert (wrapper_file in result.files_created) assert (wrapper_helper not in result.files_created)
[ "def", "test_install_from_wheel_no_setuptools_entrypoint", "(", "script", ",", "data", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'script.wheel1==0.1'", ",", "'--no-index'", ",", "(", "'--find-links='", "+", "data", ".", "find_links", ")", ",", "expect_error", "=", "False", ")", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "wrapper_file", "=", "(", "script", ".", "bin", "/", "'t1.exe'", ")", "else", ":", "wrapper_file", "=", "(", "script", ".", "bin", "/", "'t1'", ")", "wrapper_helper", "=", "(", "script", ".", "bin", "/", "'t1-script.py'", ")", "assert", "(", "wrapper_file", "in", "result", ".", "files_created", ")", "assert", "(", "wrapper_helper", "not", "in", "result", ".", "files_created", ")" ]
test that when we generate scripts .
train
false
51,590
def _has_required_moto(): if (not HAS_MOTO): return False else: if (_get_moto_version() < LooseVersion(required_moto_version)): return False return True
[ "def", "_has_required_moto", "(", ")", ":", "if", "(", "not", "HAS_MOTO", ")", ":", "return", "False", "else", ":", "if", "(", "_get_moto_version", "(", ")", "<", "LooseVersion", "(", "required_moto_version", ")", ")", ":", "return", "False", "return", "True" ]
returns true/false boolean depending on if moto is installed and correct version .
train
false
51,591
def get_appcontroller_client(): raw_ips = file_io.read('/etc/appscale/load_balancer_ips') ips = raw_ips.split('\n') head_node = ips[0] secret_file = '/etc/appscale/secret.key' secret = read_file_contents(secret_file) return AppControllerClient(head_node, secret)
[ "def", "get_appcontroller_client", "(", ")", ":", "raw_ips", "=", "file_io", ".", "read", "(", "'/etc/appscale/load_balancer_ips'", ")", "ips", "=", "raw_ips", ".", "split", "(", "'\\n'", ")", "head_node", "=", "ips", "[", "0", "]", "secret_file", "=", "'/etc/appscale/secret.key'", "secret", "=", "read_file_contents", "(", "secret_file", ")", "return", "AppControllerClient", "(", "head_node", ",", "secret", ")" ]
returns an appcontrollerclient instance for this deployment .
train
false
51,592
def medfilt2d(input, kernel_size=3): image = asarray(input) if (kernel_size is None): kernel_size = ([3] * 2) kernel_size = asarray(kernel_size) if (kernel_size.shape == ()): kernel_size = np.repeat(kernel_size.item(), 2) for size in kernel_size: if ((size % 2) != 1): raise ValueError('Each element of kernel_size should be odd.') return sigtools._medfilt2d(image, kernel_size)
[ "def", "medfilt2d", "(", "input", ",", "kernel_size", "=", "3", ")", ":", "image", "=", "asarray", "(", "input", ")", "if", "(", "kernel_size", "is", "None", ")", ":", "kernel_size", "=", "(", "[", "3", "]", "*", "2", ")", "kernel_size", "=", "asarray", "(", "kernel_size", ")", "if", "(", "kernel_size", ".", "shape", "==", "(", ")", ")", ":", "kernel_size", "=", "np", ".", "repeat", "(", "kernel_size", ".", "item", "(", ")", ",", "2", ")", "for", "size", "in", "kernel_size", ":", "if", "(", "(", "size", "%", "2", ")", "!=", "1", ")", ":", "raise", "ValueError", "(", "'Each element of kernel_size should be odd.'", ")", "return", "sigtools", ".", "_medfilt2d", "(", "image", ",", "kernel_size", ")" ]
median filter a 2-dimensional array .
train
false
51,593
def second_nibble_hex_encoding(t): parts = [] for c in t: (x, y) = _get_nibbles(c) parts.append(('%s%%%X' % (x, ord(y)))) return ('%' + '%'.join(parts))
[ "def", "second_nibble_hex_encoding", "(", "t", ")", ":", "parts", "=", "[", "]", "for", "c", "in", "t", ":", "(", "x", ",", "y", ")", "=", "_get_nibbles", "(", "c", ")", "parts", ".", "append", "(", "(", "'%s%%%X'", "%", "(", "x", ",", "ord", "(", "y", ")", ")", ")", ")", "return", "(", "'%'", "+", "'%'", ".", "join", "(", "parts", ")", ")" ]
second nibble hex encoding method .
train
false
51,595
def s3_authorstamp(): auth = current.auth utable = auth.settings.table_user if auth.is_logged_in(): current_user = current.session.auth.user.id else: current_user = None if (current.deployment_settings.get_ui_auth_user_represent() == 'name'): represent = s3_auth_user_represent_name else: represent = s3_auth_user_represent s3_meta_created_by = S3ReusableField('created_by', utable, readable=False, writable=False, requires=None, default=current_user, represent=represent, ondelete='RESTRICT') s3_meta_modified_by = S3ReusableField('modified_by', utable, readable=False, writable=False, requires=None, default=current_user, update=current_user, represent=represent, ondelete='RESTRICT') return (s3_meta_created_by(), s3_meta_modified_by())
[ "def", "s3_authorstamp", "(", ")", ":", "auth", "=", "current", ".", "auth", "utable", "=", "auth", ".", "settings", ".", "table_user", "if", "auth", ".", "is_logged_in", "(", ")", ":", "current_user", "=", "current", ".", "session", ".", "auth", ".", "user", ".", "id", "else", ":", "current_user", "=", "None", "if", "(", "current", ".", "deployment_settings", ".", "get_ui_auth_user_represent", "(", ")", "==", "'name'", ")", ":", "represent", "=", "s3_auth_user_represent_name", "else", ":", "represent", "=", "s3_auth_user_represent", "s3_meta_created_by", "=", "S3ReusableField", "(", "'created_by'", ",", "utable", ",", "readable", "=", "False", ",", "writable", "=", "False", ",", "requires", "=", "None", ",", "default", "=", "current_user", ",", "represent", "=", "represent", ",", "ondelete", "=", "'RESTRICT'", ")", "s3_meta_modified_by", "=", "S3ReusableField", "(", "'modified_by'", ",", "utable", ",", "readable", "=", "False", ",", "writable", "=", "False", ",", "requires", "=", "None", ",", "default", "=", "current_user", ",", "update", "=", "current_user", ",", "represent", "=", "represent", ",", "ondelete", "=", "'RESTRICT'", ")", "return", "(", "s3_meta_created_by", "(", ")", ",", "s3_meta_modified_by", "(", ")", ")" ]
record ownership meta-fields .
train
false
51,596
def output(): return s3_rest_controller()
[ "def", "output", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
update the progress bar .
train
false
51,597
def delayed_loader(real_name, *args, **kwargs): from google.appengine._internal.django.conf import settings if settings.USE_I18N: from google.appengine._internal.django.utils.translation import trans_real as trans else: from google.appengine._internal.django.utils.translation import trans_null as trans return getattr(trans, real_name)(*args, **kwargs)
[ "def", "delayed_loader", "(", "real_name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "from", "google", ".", "appengine", ".", "_internal", ".", "django", ".", "conf", "import", "settings", "if", "settings", ".", "USE_I18N", ":", "from", "google", ".", "appengine", ".", "_internal", ".", "django", ".", "utils", ".", "translation", "import", "trans_real", "as", "trans", "else", ":", "from", "google", ".", "appengine", ".", "_internal", ".", "django", ".", "utils", ".", "translation", "import", "trans_null", "as", "trans", "return", "getattr", "(", "trans", ",", "real_name", ")", "(", "*", "args", ",", "**", "kwargs", ")" ]
call the real .
train
false
51,598
def walker_factory(name, bot, dest_lat, dest_lng, dest_alt=None, *args, **kwargs): if ('StepWalker' == name): ret = StepWalker(bot, dest_lat, dest_lng, dest_alt) elif ('PolylineWalker' == name): ret = PolylineWalker(bot, dest_lat, dest_lng) return ret
[ "def", "walker_factory", "(", "name", ",", "bot", ",", "dest_lat", ",", "dest_lng", ",", "dest_alt", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "'StepWalker'", "==", "name", ")", ":", "ret", "=", "StepWalker", "(", "bot", ",", "dest_lat", ",", "dest_lng", ",", "dest_alt", ")", "elif", "(", "'PolylineWalker'", "==", "name", ")", ":", "ret", "=", "PolylineWalker", "(", "bot", ",", "dest_lat", ",", "dest_lng", ")", "return", "ret" ]
charlie and the walker factory .
train
false
51,599
def reverse_complement(seq): compdict = {'a': 't', 'c': 'g', 'g': 'c', 't': 'a', 'u': 't', 'm': 'k', 'r': 'y', 'w': 'w', 's': 's', 'y': 'r', 'k': 'm', 'v': 'b', 'h': 'd', 'd': 'h', 'b': 'v', 'x': 'x', 'n': 'n', 'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'U': 'T', 'M': 'K', 'R': 'Y', 'W': 'W', 'S': 'S', 'Y': 'R', 'K': 'M', 'V': 'B', 'H': 'D', 'D': 'H', 'B': 'V', 'X': 'X', 'N': 'N', '*': '*'} complseq = ''.join([compdict[base] for base in seq]) complseq = complseq[::(-1)] return complseq
[ "def", "reverse_complement", "(", "seq", ")", ":", "compdict", "=", "{", "'a'", ":", "'t'", ",", "'c'", ":", "'g'", ",", "'g'", ":", "'c'", ",", "'t'", ":", "'a'", ",", "'u'", ":", "'t'", ",", "'m'", ":", "'k'", ",", "'r'", ":", "'y'", ",", "'w'", ":", "'w'", ",", "'s'", ":", "'s'", ",", "'y'", ":", "'r'", ",", "'k'", ":", "'m'", ",", "'v'", ":", "'b'", ",", "'h'", ":", "'d'", ",", "'d'", ":", "'h'", ",", "'b'", ":", "'v'", ",", "'x'", ":", "'x'", ",", "'n'", ":", "'n'", ",", "'A'", ":", "'T'", ",", "'C'", ":", "'G'", ",", "'G'", ":", "'C'", ",", "'T'", ":", "'A'", ",", "'U'", ":", "'T'", ",", "'M'", ":", "'K'", ",", "'R'", ":", "'Y'", ",", "'W'", ":", "'W'", ",", "'S'", ":", "'S'", ",", "'Y'", ":", "'R'", ",", "'K'", ":", "'M'", ",", "'V'", ":", "'B'", ",", "'H'", ":", "'D'", ",", "'D'", ":", "'H'", ",", "'B'", ":", "'V'", ",", "'X'", ":", "'X'", ",", "'N'", ":", "'N'", ",", "'*'", ":", "'*'", "}", "complseq", "=", "''", ".", "join", "(", "[", "compdict", "[", "base", "]", "for", "base", "in", "seq", "]", ")", "complseq", "=", "complseq", "[", ":", ":", "(", "-", "1", ")", "]", "return", "complseq" ]
returns the reverse complement of a dna sequence as string .
train
false
51,600
def gpa_iterator(handle): inline = handle.readline() if (inline.strip() == '!gpa-version: 1.1'): return _gpa11iterator(handle) elif (inline.strip() == '!gpa-version: 1.0'): return _gpa10iterator(handle) else: raise ValueError('Unknown GPA version {0}\n'.format(inline))
[ "def", "gpa_iterator", "(", "handle", ")", ":", "inline", "=", "handle", ".", "readline", "(", ")", "if", "(", "inline", ".", "strip", "(", ")", "==", "'!gpa-version: 1.1'", ")", ":", "return", "_gpa11iterator", "(", "handle", ")", "elif", "(", "inline", ".", "strip", "(", ")", "==", "'!gpa-version: 1.0'", ")", ":", "return", "_gpa10iterator", "(", "handle", ")", "else", ":", "raise", "ValueError", "(", "'Unknown GPA version {0}\\n'", ".", "format", "(", "inline", ")", ")" ]
wrapper function: read gpa format files .
train
false
51,601
def _addHook(klass, name, phase, func): _enhook(klass, name) if (not hasattr(klass, phase(klass, name))): setattr(klass, phase(klass, name), []) phaselist = getattr(klass, phase(klass, name)) phaselist.append(func)
[ "def", "_addHook", "(", "klass", ",", "name", ",", "phase", ",", "func", ")", ":", "_enhook", "(", "klass", ",", "name", ")", "if", "(", "not", "hasattr", "(", "klass", ",", "phase", "(", "klass", ",", "name", ")", ")", ")", ":", "setattr", "(", "klass", ",", "phase", "(", "klass", ",", "name", ")", ",", "[", "]", ")", "phaselist", "=", "getattr", "(", "klass", ",", "phase", "(", "klass", ",", "name", ")", ")", "phaselist", ".", "append", "(", "func", ")" ]
adds a hook to a method on a class .
train
false
51,602
def p_simple_stmt(p): p[0] = p[1]
[ "def", "p_simple_stmt", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
simple_stmt : small_stmts newline | small_stmts semicolon newline .
train
false
51,603
def setIdleDelay(delaySeconds, **kwargs): _gsession = _GSettings(user=kwargs.get('user'), schema='org.gnome.desktop.session', key='idle-delay') return _gsession._set(delaySeconds)
[ "def", "setIdleDelay", "(", "delaySeconds", ",", "**", "kwargs", ")", ":", "_gsession", "=", "_GSettings", "(", "user", "=", "kwargs", ".", "get", "(", "'user'", ")", ",", "schema", "=", "'org.gnome.desktop.session'", ",", "key", "=", "'idle-delay'", ")", "return", "_gsession", ".", "_set", "(", "delaySeconds", ")" ]
set the current idle delay setting in seconds cli example: .
train
true
51,604
def system_standby(): logging.info('Performing system standby') if sabnzbd.WIN32: powersup.win_standby() elif DARWIN: powersup.osx_standby() else: powersup.linux_standby()
[ "def", "system_standby", "(", ")", ":", "logging", ".", "info", "(", "'Performing system standby'", ")", "if", "sabnzbd", ".", "WIN32", ":", "powersup", ".", "win_standby", "(", ")", "elif", "DARWIN", ":", "powersup", ".", "osx_standby", "(", ")", "else", ":", "powersup", ".", "linux_standby", "(", ")" ]
standby system .
train
false
51,605
def eval_levicivita(*args): from sympy import factorial n = len(args) return prod(((prod(((args[j] - args[i]) for j in range((i + 1), n))) / factorial(i)) for i in range(n)))
[ "def", "eval_levicivita", "(", "*", "args", ")", ":", "from", "sympy", "import", "factorial", "n", "=", "len", "(", "args", ")", "return", "prod", "(", "(", "(", "prod", "(", "(", "(", "args", "[", "j", "]", "-", "args", "[", "i", "]", ")", "for", "j", "in", "range", "(", "(", "i", "+", "1", ")", ",", "n", ")", ")", ")", "/", "factorial", "(", "i", ")", ")", "for", "i", "in", "range", "(", "n", ")", ")", ")" ]
evaluate levi-civita symbol .
train
false
51,606
def paged(query, size=1000): offset = 0 while True: page = query.offset(offset).limit(size).execute() if (page.rowcount <= 0): break for row in page: (yield row) offset += size
[ "def", "paged", "(", "query", ",", "size", "=", "1000", ")", ":", "offset", "=", "0", "while", "True", ":", "page", "=", "query", ".", "offset", "(", "offset", ")", ".", "limit", "(", "size", ")", ".", "execute", "(", ")", "if", "(", "page", ".", "rowcount", "<=", "0", ")", ":", "break", "for", "row", "in", "page", ":", "(", "yield", "row", ")", "offset", "+=", "size" ]
page query results .
train
false
51,607
def send_html_mail(subject, message, message_html, from_email, recipient_list, priority=None, fail_silently=False, auth_user=None, auth_password=None, headers={}): from django.utils.encoding import force_text from django.core.mail import EmailMultiAlternatives from mailer.models import make_message priority = get_priority(priority) subject = force_text(subject) message = force_text(message) msg = make_message(subject=subject, body=message, from_email=from_email, to=recipient_list, priority=priority) email = msg.email email = EmailMultiAlternatives(email.subject, email.body, email.from_email, email.to, headers=headers) email.attach_alternative(message_html, 'text/html') msg.email = email msg.save() return 1
[ "def", "send_html_mail", "(", "subject", ",", "message", ",", "message_html", ",", "from_email", ",", "recipient_list", ",", "priority", "=", "None", ",", "fail_silently", "=", "False", ",", "auth_user", "=", "None", ",", "auth_password", "=", "None", ",", "headers", "=", "{", "}", ")", ":", "from", "django", ".", "utils", ".", "encoding", "import", "force_text", "from", "django", ".", "core", ".", "mail", "import", "EmailMultiAlternatives", "from", "mailer", ".", "models", "import", "make_message", "priority", "=", "get_priority", "(", "priority", ")", "subject", "=", "force_text", "(", "subject", ")", "message", "=", "force_text", "(", "message", ")", "msg", "=", "make_message", "(", "subject", "=", "subject", ",", "body", "=", "message", ",", "from_email", "=", "from_email", ",", "to", "=", "recipient_list", ",", "priority", "=", "priority", ")", "email", "=", "msg", ".", "email", "email", "=", "EmailMultiAlternatives", "(", "email", ".", "subject", ",", "email", ".", "body", ",", "email", ".", "from_email", ",", "email", ".", "to", ",", "headers", "=", "headers", ")", "email", ".", "attach_alternative", "(", "message_html", ",", "'text/html'", ")", "msg", ".", "email", "=", "email", "msg", ".", "save", "(", ")", "return", "1" ]
function to queue html e-mails .
train
true
51,609
def jog_music(): if (music_enabled and (not music.get_busy())): start_next_music()
[ "def", "jog_music", "(", ")", ":", "if", "(", "music_enabled", "and", "(", "not", "music", ".", "get_busy", "(", ")", ")", ")", ":", "start_next_music", "(", ")" ]
if no music is currently playing .
train
false
51,611
def check_float(string): try: float(string) return True except ValueError: return False
[ "def", "check_float", "(", "string", ")", ":", "try", ":", "float", "(", "string", ")", "return", "True", "except", "ValueError", ":", "return", "False" ]
helper function for checking if a string can be converted to a float .
train
false
51,612
def missing_folders(paths): ret = [] pool = set(paths) for path in paths: expanded = expand(path) for i in range(len(expanded)): folder = os.path.join(*expanded[:(len(expanded) - i)]) if (folder and (folder not in pool)): pool.add(folder) ret.append(folder) return ret
[ "def", "missing_folders", "(", "paths", ")", ":", "ret", "=", "[", "]", "pool", "=", "set", "(", "paths", ")", "for", "path", "in", "paths", ":", "expanded", "=", "expand", "(", "path", ")", "for", "i", "in", "range", "(", "len", "(", "expanded", ")", ")", ":", "folder", "=", "os", ".", "path", ".", "join", "(", "*", "expanded", "[", ":", "(", "len", "(", "expanded", ")", "-", "i", ")", "]", ")", "if", "(", "folder", "and", "(", "folder", "not", "in", "pool", ")", ")", ":", "pool", ".", "add", "(", "folder", ")", "ret", ".", "append", "(", "folder", ")", "return", "ret" ]
missing_folders => [a .
train
false
51,613
@public def apart_list(f, x=None, dummies=None, **options): allowed_flags(options, []) f = sympify(f) if f.is_Atom: return f else: (P, Q) = f.as_numer_denom() options = set_defaults(options, extension=True) ((P, Q), opt) = parallel_poly_from_expr((P, Q), x, **options) if P.is_multivariate: raise NotImplementedError('multivariate partial fraction decomposition') (common, P, Q) = P.cancel(Q) (poly, P) = P.div(Q, auto=True) (P, Q) = P.rat_clear_denoms(Q) polypart = poly if (dummies is None): def dummies(name): d = Dummy(name) while True: (yield d) dummies = dummies('w') rationalpart = apart_list_full_decomposition(P, Q, dummies) return (common, polypart, rationalpart)
[ "@", "public", "def", "apart_list", "(", "f", ",", "x", "=", "None", ",", "dummies", "=", "None", ",", "**", "options", ")", ":", "allowed_flags", "(", "options", ",", "[", "]", ")", "f", "=", "sympify", "(", "f", ")", "if", "f", ".", "is_Atom", ":", "return", "f", "else", ":", "(", "P", ",", "Q", ")", "=", "f", ".", "as_numer_denom", "(", ")", "options", "=", "set_defaults", "(", "options", ",", "extension", "=", "True", ")", "(", "(", "P", ",", "Q", ")", ",", "opt", ")", "=", "parallel_poly_from_expr", "(", "(", "P", ",", "Q", ")", ",", "x", ",", "**", "options", ")", "if", "P", ".", "is_multivariate", ":", "raise", "NotImplementedError", "(", "'multivariate partial fraction decomposition'", ")", "(", "common", ",", "P", ",", "Q", ")", "=", "P", ".", "cancel", "(", "Q", ")", "(", "poly", ",", "P", ")", "=", "P", ".", "div", "(", "Q", ",", "auto", "=", "True", ")", "(", "P", ",", "Q", ")", "=", "P", ".", "rat_clear_denoms", "(", "Q", ")", "polypart", "=", "poly", "if", "(", "dummies", "is", "None", ")", ":", "def", "dummies", "(", "name", ")", ":", "d", "=", "Dummy", "(", "name", ")", "while", "True", ":", "(", "yield", "d", ")", "dummies", "=", "dummies", "(", "'w'", ")", "rationalpart", "=", "apart_list_full_decomposition", "(", "P", ",", "Q", ",", "dummies", ")", "return", "(", "common", ",", "polypart", ",", "rationalpart", ")" ]
compute partial fraction decomposition of a rational function and return the result in structured form .
train
false
51,616
def get_python_code(paths): retval = [] for p in paths: if (not os.path.isdir(p)): raise Exception(("'%s' is not a directory." % p)) for (dirpath, dirnames, filenames) in os.walk(p): for f in filenames: if (len([True for e in PYTHON_SOURCE_EXTENSIONS if f.endswith(e)]) > 0): fn = os.path.join(dirpath, f) with open(fn) as fd: content = [l.decode(PYTHON_ENCODING) for l in fd.readlines()] retval.append((fn, content)) return retval
[ "def", "get_python_code", "(", "paths", ")", ":", "retval", "=", "[", "]", "for", "p", "in", "paths", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "p", ")", ")", ":", "raise", "Exception", "(", "(", "\"'%s' is not a directory.\"", "%", "p", ")", ")", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "p", ")", ":", "for", "f", "in", "filenames", ":", "if", "(", "len", "(", "[", "True", "for", "e", "in", "PYTHON_SOURCE_EXTENSIONS", "if", "f", ".", "endswith", "(", "e", ")", "]", ")", ">", "0", ")", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "f", ")", "with", "open", "(", "fn", ")", "as", "fd", ":", "content", "=", "[", "l", ".", "decode", "(", "PYTHON_ENCODING", ")", "for", "l", "in", "fd", ".", "readlines", "(", ")", "]", "retval", ".", "append", "(", "(", "fn", ",", "content", ")", ")", "return", "retval" ]
returns all python code .
train
false
51,617
def pb_set(msg, field_name, val): field_desc = msg.DESCRIPTOR.fields_by_name[field_name] proper_type = cpp_type_to_python[field_desc.cpp_type] try_types = (type(val), proper_type) for t in try_types: log.debug(u'attempt %s.%s = %s(%r)', msg.__class__.__name__, field_name, t, val) try: setattr(msg, field_name, t(val)) log.debug(u'! success') break except (TypeError, ValueError): log.debug(u'X failure') else: return False return True
[ "def", "pb_set", "(", "msg", ",", "field_name", ",", "val", ")", ":", "field_desc", "=", "msg", ".", "DESCRIPTOR", ".", "fields_by_name", "[", "field_name", "]", "proper_type", "=", "cpp_type_to_python", "[", "field_desc", ".", "cpp_type", "]", "try_types", "=", "(", "type", "(", "val", ")", ",", "proper_type", ")", "for", "t", "in", "try_types", ":", "log", ".", "debug", "(", "u'attempt %s.%s = %s(%r)'", ",", "msg", ".", "__class__", ".", "__name__", ",", "field_name", ",", "t", ",", "val", ")", "try", ":", "setattr", "(", "msg", ",", "field_name", ",", "t", "(", "val", ")", ")", "log", ".", "debug", "(", "u'! success'", ")", "break", "except", "(", "TypeError", ",", "ValueError", ")", ":", "log", ".", "debug", "(", "u'X failure'", ")", "else", ":", "return", "False", "return", "True" ]
return true and set val to field_name in msg if the assignment is type-compatible .
train
false
51,618
def write_font_record(data, obfuscate=True, compress=True): flags = 0 key_len = 20 usize = len(data) xor_key = '' if compress: flags |= 1 data = zlib.compress(data, 9) if (obfuscate and (len(data) >= 1040)): flags |= 2 xor_key = os.urandom(key_len) key = bytearray(xor_key) data = bytearray(data) for i in xrange(1040): data[i] ^= key[(i % key_len)] data = bytes(data) key_start = (struct.calcsize('>5L') + 4) data_start = (key_start + len(xor_key)) header = ('FONT' + struct.pack('>5L', usize, flags, data_start, len(xor_key), key_start)) return ((header + xor_key) + data)
[ "def", "write_font_record", "(", "data", ",", "obfuscate", "=", "True", ",", "compress", "=", "True", ")", ":", "flags", "=", "0", "key_len", "=", "20", "usize", "=", "len", "(", "data", ")", "xor_key", "=", "''", "if", "compress", ":", "flags", "|=", "1", "data", "=", "zlib", ".", "compress", "(", "data", ",", "9", ")", "if", "(", "obfuscate", "and", "(", "len", "(", "data", ")", ">=", "1040", ")", ")", ":", "flags", "|=", "2", "xor_key", "=", "os", ".", "urandom", "(", "key_len", ")", "key", "=", "bytearray", "(", "xor_key", ")", "data", "=", "bytearray", "(", "data", ")", "for", "i", "in", "xrange", "(", "1040", ")", ":", "data", "[", "i", "]", "^=", "key", "[", "(", "i", "%", "key_len", ")", "]", "data", "=", "bytes", "(", "data", ")", "key_start", "=", "(", "struct", ".", "calcsize", "(", "'>5L'", ")", "+", "4", ")", "data_start", "=", "(", "key_start", "+", "len", "(", "xor_key", ")", ")", "header", "=", "(", "'FONT'", "+", "struct", ".", "pack", "(", "'>5L'", ",", "usize", ",", "flags", ",", "data_start", ",", "len", "(", "xor_key", ")", ",", "key_start", ")", ")", "return", "(", "(", "header", "+", "xor_key", ")", "+", "data", ")" ]
write the ttf/otf font represented by data into a font record .
train
false
51,620
def girvan_newman(G, most_valuable_edge=None): if (G.number_of_edges() == 0): (yield tuple(nx.connected_components(G))) return if (most_valuable_edge is None): def most_valuable_edge(G): 'Returns the edge with the highest betweenness centrality\n in the graph `G`.\n\n ' betweenness = nx.edge_betweenness_centrality(G) return max(betweenness, key=betweenness.get) g = G.copy().to_undirected() g.remove_edges_from(g.selfloop_edges()) while (g.number_of_edges() > 0): (yield _without_most_central_edges(g, most_valuable_edge))
[ "def", "girvan_newman", "(", "G", ",", "most_valuable_edge", "=", "None", ")", ":", "if", "(", "G", ".", "number_of_edges", "(", ")", "==", "0", ")", ":", "(", "yield", "tuple", "(", "nx", ".", "connected_components", "(", "G", ")", ")", ")", "return", "if", "(", "most_valuable_edge", "is", "None", ")", ":", "def", "most_valuable_edge", "(", "G", ")", ":", "betweenness", "=", "nx", ".", "edge_betweenness_centrality", "(", "G", ")", "return", "max", "(", "betweenness", ",", "key", "=", "betweenness", ".", "get", ")", "g", "=", "G", ".", "copy", "(", ")", ".", "to_undirected", "(", ")", "g", ".", "remove_edges_from", "(", "g", ".", "selfloop_edges", "(", ")", ")", "while", "(", "g", ".", "number_of_edges", "(", ")", ">", "0", ")", ":", "(", "yield", "_without_most_central_edges", "(", "g", ",", "most_valuable_edge", ")", ")" ]
finds communities in a graph using the girvan–newman method .
train
false
51,621
def get_all_subclasses(cls): subclasses = cls.__subclasses__() to_visit = list(subclasses) while to_visit: for sc in to_visit: to_visit.remove(sc) for ssc in sc.__subclasses__(): subclasses.append(ssc) to_visit.append(ssc) return subclasses
[ "def", "get_all_subclasses", "(", "cls", ")", ":", "subclasses", "=", "cls", ".", "__subclasses__", "(", ")", "to_visit", "=", "list", "(", "subclasses", ")", "while", "to_visit", ":", "for", "sc", "in", "to_visit", ":", "to_visit", ".", "remove", "(", "sc", ")", "for", "ssc", "in", "sc", ".", "__subclasses__", "(", ")", ":", "subclasses", ".", "append", "(", "ssc", ")", "to_visit", ".", "append", "(", "ssc", ")", "return", "subclasses" ]
used by modules like hardware or network fact classes to retrieve all subclasses of a given class .
train
false
51,623
def read_bin_fragment(struct_def, fileh, offset=0, data=None, byte_padding=None): if (data is None): data = {} bytes_read = 0 for item in struct_def: fileh.seek((offset + bytes_read)) n_bytes = struct.calcsize(item[1]) buffer = fileh.read(n_bytes) read = struct.unpack(('>' + item[1]), buffer) if (len(read) == 1): read = read[0] data[item[0]] = read bytes_read += n_bytes if (byte_padding is not None): pad = byte_padding bytes_read = ((((bytes_read + pad) - 1) // pad) * pad) return (bytes_read, data)
[ "def", "read_bin_fragment", "(", "struct_def", ",", "fileh", ",", "offset", "=", "0", ",", "data", "=", "None", ",", "byte_padding", "=", "None", ")", ":", "if", "(", "data", "is", "None", ")", ":", "data", "=", "{", "}", "bytes_read", "=", "0", "for", "item", "in", "struct_def", ":", "fileh", ".", "seek", "(", "(", "offset", "+", "bytes_read", ")", ")", "n_bytes", "=", "struct", ".", "calcsize", "(", "item", "[", "1", "]", ")", "buffer", "=", "fileh", ".", "read", "(", "n_bytes", ")", "read", "=", "struct", ".", "unpack", "(", "(", "'>'", "+", "item", "[", "1", "]", ")", ",", "buffer", ")", "if", "(", "len", "(", "read", ")", "==", "1", ")", ":", "read", "=", "read", "[", "0", "]", "data", "[", "item", "[", "0", "]", "]", "=", "read", "bytes_read", "+=", "n_bytes", "if", "(", "byte_padding", "is", "not", "None", ")", ":", "pad", "=", "byte_padding", "bytes_read", "=", "(", "(", "(", "(", "bytes_read", "+", "pad", ")", "-", "1", ")", "//", "pad", ")", "*", "pad", ")", "return", "(", "bytes_read", ",", "data", ")" ]
it reads a chunk of a binary file .
train
false
51,624
def sina_download_by_vid(vid, title=None, output_dir='.', merge=True, info_only=False): xml = video_info_xml(vid) sina_download_by_xml(xml, title, output_dir, merge, info_only)
[ "def", "sina_download_by_vid", "(", "vid", ",", "title", "=", "None", ",", "output_dir", "=", "'.'", ",", "merge", "=", "True", ",", "info_only", "=", "False", ")", ":", "xml", "=", "video_info_xml", "(", "vid", ")", "sina_download_by_xml", "(", "xml", ",", "title", ",", "output_dir", ",", "merge", ",", "info_only", ")" ]
downloads a sina video by its unique vid .
train
false
51,625
@task(base=BaseInstructorTask) def cohort_students(entry_id, xmodule_instance_args): action_name = ugettext_noop('cohorted') task_fn = partial(cohort_students_and_upload, xmodule_instance_args) return run_main_task(entry_id, task_fn, action_name)
[ "@", "task", "(", "base", "=", "BaseInstructorTask", ")", "def", "cohort_students", "(", "entry_id", ",", "xmodule_instance_args", ")", ":", "action_name", "=", "ugettext_noop", "(", "'cohorted'", ")", "task_fn", "=", "partial", "(", "cohort_students_and_upload", ",", "xmodule_instance_args", ")", "return", "run_main_task", "(", "entry_id", ",", "task_fn", ",", "action_name", ")" ]
cohort students in bulk .
train
false
51,627
def _parse_time_str(time_str): time_obj = time.strptime(time_str, '%M:%S') return ((time_obj.tm_min * 60) + time_obj.tm_sec)
[ "def", "_parse_time_str", "(", "time_str", ")", ":", "time_obj", "=", "time", ".", "strptime", "(", "time_str", ",", "'%M:%S'", ")", "return", "(", "(", "time_obj", ".", "tm_min", "*", "60", ")", "+", "time_obj", ".", "tm_sec", ")" ]
parse a string of the form 1:23 into seconds .
train
false
51,628
def initialize_sys_path(): global ad_paths global hooked dd = tools.config.addons_data_dir if (dd not in ad_paths): ad_paths.append(dd) for ad in tools.config['addons_path'].split(','): ad = os.path.abspath(tools.ustr(ad.strip())) if (ad not in ad_paths): ad_paths.append(ad) base_path = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'addons')) if (base_path not in ad_paths): ad_paths.append(base_path) for ad in __import__('odoo.addons').addons.__path__: ad = os.path.abspath(ad) if (ad not in ad_paths): ad_paths.append(ad) if (not hooked): sys.meta_path.append(AddonsHook()) sys.meta_path.append(OdooHook()) hooked = True
[ "def", "initialize_sys_path", "(", ")", ":", "global", "ad_paths", "global", "hooked", "dd", "=", "tools", ".", "config", ".", "addons_data_dir", "if", "(", "dd", "not", "in", "ad_paths", ")", ":", "ad_paths", ".", "append", "(", "dd", ")", "for", "ad", "in", "tools", ".", "config", "[", "'addons_path'", "]", ".", "split", "(", "','", ")", ":", "ad", "=", "os", ".", "path", ".", "abspath", "(", "tools", ".", "ustr", "(", "ad", ".", "strip", "(", ")", ")", ")", "if", "(", "ad", "not", "in", "ad_paths", ")", ":", "ad_paths", ".", "append", "(", "ad", ")", "base_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", ",", "'addons'", ")", ")", "if", "(", "base_path", "not", "in", "ad_paths", ")", ":", "ad_paths", ".", "append", "(", "base_path", ")", "for", "ad", "in", "__import__", "(", "'odoo.addons'", ")", ".", "addons", ".", "__path__", ":", "ad", "=", "os", ".", "path", ".", "abspath", "(", "ad", ")", "if", "(", "ad", "not", "in", "ad_paths", ")", ":", "ad_paths", ".", "append", "(", "ad", ")", "if", "(", "not", "hooked", ")", ":", "sys", ".", "meta_path", ".", "append", "(", "AddonsHook", "(", ")", ")", "sys", ".", "meta_path", ".", "append", "(", "OdooHook", "(", ")", ")", "hooked", "=", "True" ]
setup an import-hook to be able to import openerp addons from the different addons paths .
train
false
51,629
def antidivisor_count(n): n = as_int(abs(n)) if (n <= 2): return 0 return ((((divisor_count(((2 * n) - 1)) + divisor_count(((2 * n) + 1))) + divisor_count(n)) - divisor_count(n, 2)) - 5)
[ "def", "antidivisor_count", "(", "n", ")", ":", "n", "=", "as_int", "(", "abs", "(", "n", ")", ")", "if", "(", "n", "<=", "2", ")", ":", "return", "0", "return", "(", "(", "(", "(", "divisor_count", "(", "(", "(", "2", "*", "n", ")", "-", "1", ")", ")", "+", "divisor_count", "(", "(", "(", "2", "*", "n", ")", "+", "1", ")", ")", ")", "+", "divisor_count", "(", "n", ")", ")", "-", "divisor_count", "(", "n", ",", "2", ")", ")", "-", "5", ")" ]
return the number of antidivisors [1]_ of n .
train
false
51,630
def closeSerial(): snap.closeSerial()
[ "def", "closeSerial", "(", ")", ":", "snap", ".", "closeSerial", "(", ")" ]
close serial port for snap reprap communications .
train
false
51,631
@pytest.fixture(scope='module') def strcat_sym(): ds = dshape('3 * {name: string, comment: string, num: int32}') s = symbol('s', dshape=ds) return s
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'module'", ")", "def", "strcat_sym", "(", ")", ":", "ds", "=", "dshape", "(", "'3 * {name: string, comment: string, num: int32}'", ")", "s", "=", "symbol", "(", "'s'", ",", "dshape", "=", "ds", ")", "return", "s" ]
blaze symbol used to test exceptions raised by cat() .
train
false
51,632
def _checkDigestResponse(auth_map, password, method='GET', A1=None, **kwargs): if (auth_map['realm'] != kwargs.get('realm', None)): return False response = _computeDigestResponse(auth_map, password, method, A1, **kwargs) return (response == auth_map['response'])
[ "def", "_checkDigestResponse", "(", "auth_map", ",", "password", ",", "method", "=", "'GET'", ",", "A1", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "auth_map", "[", "'realm'", "]", "!=", "kwargs", ".", "get", "(", "'realm'", ",", "None", ")", ")", ":", "return", "False", "response", "=", "_computeDigestResponse", "(", "auth_map", ",", "password", ",", "method", ",", "A1", ",", "**", "kwargs", ")", "return", "(", "response", "==", "auth_map", "[", "'response'", "]", ")" ]
this function is used to verify the response given by the client when he tries to authenticate .
train
false
51,634
def MA(ds, count, timeperiod=(- (2 ** 31)), matype=0): return call_talib_with_ds(ds, count, talib.MA, timeperiod, matype)
[ "def", "MA", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ",", "matype", "=", "0", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "MA", ",", "timeperiod", ",", "matype", ")" ]
all moving average .
train
false
51,636
def generate_password(length=20, symbolgroups=DEFAULT_PASSWORD_SYMBOLS): r = random.SystemRandom() password = [r.choice(s) for s in symbolgroups] r.shuffle(password) password = password[:length] length -= len(password) symbols = ''.join(symbolgroups) password.extend([r.choice(symbols) for _i in xrange(length)]) r.shuffle(password) return ''.join(password)
[ "def", "generate_password", "(", "length", "=", "20", ",", "symbolgroups", "=", "DEFAULT_PASSWORD_SYMBOLS", ")", ":", "r", "=", "random", ".", "SystemRandom", "(", ")", "password", "=", "[", "r", ".", "choice", "(", "s", ")", "for", "s", "in", "symbolgroups", "]", "r", ".", "shuffle", "(", "password", ")", "password", "=", "password", "[", ":", "length", "]", "length", "-=", "len", "(", "password", ")", "symbols", "=", "''", ".", "join", "(", "symbolgroups", ")", "password", ".", "extend", "(", "[", "r", ".", "choice", "(", "symbols", ")", "for", "_i", "in", "xrange", "(", "length", ")", "]", ")", "r", ".", "shuffle", "(", "password", ")", "return", "''", ".", "join", "(", "password", ")" ]
generate internal password for externally authenticated user .
train
false
51,637
def parseTimestamp(s): s = s.strip() for pattern in DATETIME_FORMATS: try: return datetime.datetime.strptime(s, pattern) except ValueError: pass raise ValueError(('The provided timestamp %s is malformed. The supported formats are: [%s]' % (s, ', '.join(DATETIME_FORMATS))))
[ "def", "parseTimestamp", "(", "s", ")", ":", "s", "=", "s", ".", "strip", "(", ")", "for", "pattern", "in", "DATETIME_FORMATS", ":", "try", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "s", ",", "pattern", ")", "except", "ValueError", ":", "pass", "raise", "ValueError", "(", "(", "'The provided timestamp %s is malformed. The supported formats are: [%s]'", "%", "(", "s", ",", "', '", ".", "join", "(", "DATETIME_FORMATS", ")", ")", ")", ")" ]
parses a textual datetime format and return a python datetime object .
train
true
51,639
def define_field(field_descriptor): field_class = _VARIANT_MAP[field_descriptor.variant] params = {'number': field_descriptor.number, 'variant': field_descriptor.variant} if (field_descriptor.label == descriptor.FieldDescriptor.Label.REQUIRED): params['required'] = True elif (field_descriptor.label == descriptor.FieldDescriptor.Label.REPEATED): params['repeated'] = True message_type_field = _MESSAGE_TYPE_MAP.get(field_descriptor.type_name) if message_type_field: return message_type_field(**params) elif (field_class in (messages.EnumField, messages.MessageField)): return field_class(field_descriptor.type_name, **params) else: return field_class(**params)
[ "def", "define_field", "(", "field_descriptor", ")", ":", "field_class", "=", "_VARIANT_MAP", "[", "field_descriptor", ".", "variant", "]", "params", "=", "{", "'number'", ":", "field_descriptor", ".", "number", ",", "'variant'", ":", "field_descriptor", ".", "variant", "}", "if", "(", "field_descriptor", ".", "label", "==", "descriptor", ".", "FieldDescriptor", ".", "Label", ".", "REQUIRED", ")", ":", "params", "[", "'required'", "]", "=", "True", "elif", "(", "field_descriptor", ".", "label", "==", "descriptor", ".", "FieldDescriptor", ".", "Label", ".", "REPEATED", ")", ":", "params", "[", "'repeated'", "]", "=", "True", "message_type_field", "=", "_MESSAGE_TYPE_MAP", ".", "get", "(", "field_descriptor", ".", "type_name", ")", "if", "message_type_field", ":", "return", "message_type_field", "(", "**", "params", ")", "elif", "(", "field_class", "in", "(", "messages", ".", "EnumField", ",", "messages", ".", "MessageField", ")", ")", ":", "return", "field_class", "(", "field_descriptor", ".", "type_name", ",", "**", "params", ")", "else", ":", "return", "field_class", "(", "**", "params", ")" ]
define field instance from descriptor .
train
false
51,640
def tag_structure(tag, site): return {'tag_id': tag.pk, 'name': tag.name, 'count': tag.count, 'slug': tag.name, 'html_url': ('%s://%s%s' % (PROTOCOL, site.domain, reverse('zinnia:tag_detail', args=[tag.name]))), 'rss_url': ('%s://%s%s' % (PROTOCOL, site.domain, reverse('zinnia:tag_feed', args=[tag.name])))}
[ "def", "tag_structure", "(", "tag", ",", "site", ")", ":", "return", "{", "'tag_id'", ":", "tag", ".", "pk", ",", "'name'", ":", "tag", ".", "name", ",", "'count'", ":", "tag", ".", "count", ",", "'slug'", ":", "tag", ".", "name", ",", "'html_url'", ":", "(", "'%s://%s%s'", "%", "(", "PROTOCOL", ",", "site", ".", "domain", ",", "reverse", "(", "'zinnia:tag_detail'", ",", "args", "=", "[", "tag", ".", "name", "]", ")", ")", ")", ",", "'rss_url'", ":", "(", "'%s://%s%s'", "%", "(", "PROTOCOL", ",", "site", ".", "domain", ",", "reverse", "(", "'zinnia:tag_feed'", ",", "args", "=", "[", "tag", ".", "name", "]", ")", ")", ")", "}" ]
a tag structure .
train
true
51,641
def test_scenarios_with_extra_whitespace(): feature = Feature.from_string(FEATURE14) assert_equals(type(feature.scenarios), list) assert_equals(len(feature.scenarios), 1, 'It should have 1 scenario') assert_equals(feature.name, 'Extra whitespace feature') scenario = feature.scenarios[0] assert_equals(type(scenario), Scenario) assert_equals(scenario.name, 'Extra whitespace scenario')
[ "def", "test_scenarios_with_extra_whitespace", "(", ")", ":", "feature", "=", "Feature", ".", "from_string", "(", "FEATURE14", ")", "assert_equals", "(", "type", "(", "feature", ".", "scenarios", ")", ",", "list", ")", "assert_equals", "(", "len", "(", "feature", ".", "scenarios", ")", ",", "1", ",", "'It should have 1 scenario'", ")", "assert_equals", "(", "feature", ".", "name", ",", "'Extra whitespace feature'", ")", "scenario", "=", "feature", ".", "scenarios", "[", "0", "]", "assert_equals", "(", "type", "(", "scenario", ")", ",", "Scenario", ")", "assert_equals", "(", "scenario", ".", "name", ",", "'Extra whitespace scenario'", ")" ]
make sure that extra leading whitespace is ignored .
train
false
51,642
def findSingleton(sList): count_dict = {} single_dict = {} for key in sList: if (key in count_dict.keys()): count_dict[key] += 1 single_dict.pop(key, None) else: count_dict[key] = 1 single_dict[key] = True return single_dict.keys()
[ "def", "findSingleton", "(", "sList", ")", ":", "count_dict", "=", "{", "}", "single_dict", "=", "{", "}", "for", "key", "in", "sList", ":", "if", "(", "key", "in", "count_dict", ".", "keys", "(", ")", ")", ":", "count_dict", "[", "key", "]", "+=", "1", "single_dict", ".", "pop", "(", "key", ",", "None", ")", "else", ":", "count_dict", "[", "key", "]", "=", "1", "single_dict", "[", "key", "]", "=", "True", "return", "single_dict", ".", "keys", "(", ")" ]
returns entries that only occured once in list .
train
false
51,643
def get_function_by_ifname(ifname): dev_path = ('/sys/class/net/%s/device' % ifname) sriov_totalvfs = 0 if os.path.isdir(dev_path): try: with open(os.path.join(dev_path, _SRIOV_TOTALVFS)) as fd: sriov_totalvfs = int(fd.read()) return (os.readlink(dev_path).strip('./'), (sriov_totalvfs > 0)) except (IOError, ValueError): return (os.readlink(dev_path).strip('./'), False) return (None, False)
[ "def", "get_function_by_ifname", "(", "ifname", ")", ":", "dev_path", "=", "(", "'/sys/class/net/%s/device'", "%", "ifname", ")", "sriov_totalvfs", "=", "0", "if", "os", ".", "path", ".", "isdir", "(", "dev_path", ")", ":", "try", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "dev_path", ",", "_SRIOV_TOTALVFS", ")", ")", "as", "fd", ":", "sriov_totalvfs", "=", "int", "(", "fd", ".", "read", "(", ")", ")", "return", "(", "os", ".", "readlink", "(", "dev_path", ")", ".", "strip", "(", "'./'", ")", ",", "(", "sriov_totalvfs", ">", "0", ")", ")", "except", "(", "IOError", ",", "ValueError", ")", ":", "return", "(", "os", ".", "readlink", "(", "dev_path", ")", ".", "strip", "(", "'./'", ")", ",", "False", ")", "return", "(", "None", ",", "False", ")" ]
given the device name .
train
false
51,644
def processor_hash(value): shared_secret = get_processor_config().get('SHARED_SECRET', '') hash_obj = hmac.new(shared_secret.encode('utf-8'), value.encode('utf-8'), sha1) return binascii.b2a_base64(hash_obj.digest())[:(-1)]
[ "def", "processor_hash", "(", "value", ")", ":", "shared_secret", "=", "get_processor_config", "(", ")", ".", "get", "(", "'SHARED_SECRET'", ",", "''", ")", "hash_obj", "=", "hmac", ".", "new", "(", "shared_secret", ".", "encode", "(", "'utf-8'", ")", ",", "value", ".", "encode", "(", "'utf-8'", ")", ",", "sha1", ")", "return", "binascii", ".", "b2a_base64", "(", "hash_obj", ".", "digest", "(", ")", ")", "[", ":", "(", "-", "1", ")", "]" ]
performs the base64(hmac_sha1) used by cybersource hosted order page .
train
false
51,645
@pytest.fixture def forum(category, default_settings, default_groups): forum = Forum(title='Test Forum', category_id=category.id) forum.groups = default_groups forum.save() return forum
[ "@", "pytest", ".", "fixture", "def", "forum", "(", "category", ",", "default_settings", ",", "default_groups", ")", ":", "forum", "=", "Forum", "(", "title", "=", "'Test Forum'", ",", "category_id", "=", "category", ".", "id", ")", "forum", ".", "groups", "=", "default_groups", "forum", ".", "save", "(", ")", "return", "forum" ]
a single forum in a category .
train
false
51,646
def scan_readme_files(dirname): samples = [] keyword_set = set() for (root, dirs, files) in os.walk(dirname): if ('README' in files): filename = os.path.join(root, 'README') with open(filename, 'r') as f: content = f.read() lines = content.splitlines() desc = ' '.join(itertools.takewhile((lambda x: x), lines)) api = get_lines('api', lines) keywords = get_lines('keywords', lines) uri = get_lines('uri', lines) if (not uri): uri = None for k in keywords: if (k not in KEYWORDS): raise ValueError(('%s is not a valid keyword in file %s' % (k, filename))) keyword_set.update(keywords) if (not api): api = [None] samples.append((api[0], keywords, root[1:], desc, uri)) samples.sort() return (samples, keyword_set)
[ "def", "scan_readme_files", "(", "dirname", ")", ":", "samples", "=", "[", "]", "keyword_set", "=", "set", "(", ")", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "dirname", ")", ":", "if", "(", "'README'", "in", "files", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "root", ",", "'README'", ")", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "content", "=", "f", ".", "read", "(", ")", "lines", "=", "content", ".", "splitlines", "(", ")", "desc", "=", "' '", ".", "join", "(", "itertools", ".", "takewhile", "(", "(", "lambda", "x", ":", "x", ")", ",", "lines", ")", ")", "api", "=", "get_lines", "(", "'api'", ",", "lines", ")", "keywords", "=", "get_lines", "(", "'keywords'", ",", "lines", ")", "uri", "=", "get_lines", "(", "'uri'", ",", "lines", ")", "if", "(", "not", "uri", ")", ":", "uri", "=", "None", "for", "k", "in", "keywords", ":", "if", "(", "k", "not", "in", "KEYWORDS", ")", ":", "raise", "ValueError", "(", "(", "'%s is not a valid keyword in file %s'", "%", "(", "k", ",", "filename", ")", ")", ")", "keyword_set", ".", "update", "(", "keywords", ")", "if", "(", "not", "api", ")", ":", "api", "=", "[", "None", "]", "samples", ".", "append", "(", "(", "api", "[", "0", "]", ",", "keywords", ",", "root", "[", "1", ":", "]", ",", "desc", ",", "uri", ")", ")", "samples", ".", "sort", "(", ")", "return", "(", "samples", ",", "keyword_set", ")" ]
scans all subdirs of dirname for readme files .
train
false
51,647
def _add_propstat_to(element, tag, status_number): propstat = ET.Element(_tag('D', 'propstat')) element.append(propstat) prop = ET.Element(_tag('D', 'prop')) propstat.append(prop) clark_tag = (tag if ('{' in tag) else _tag(*tag.split(':', 1))) prop_tag = ET.Element(clark_tag) prop.append(prop_tag) status = ET.Element(_tag('D', 'status')) status.text = _response(status_number) propstat.append(status)
[ "def", "_add_propstat_to", "(", "element", ",", "tag", ",", "status_number", ")", ":", "propstat", "=", "ET", ".", "Element", "(", "_tag", "(", "'D'", ",", "'propstat'", ")", ")", "element", ".", "append", "(", "propstat", ")", "prop", "=", "ET", ".", "Element", "(", "_tag", "(", "'D'", ",", "'prop'", ")", ")", "propstat", ".", "append", "(", "prop", ")", "clark_tag", "=", "(", "tag", "if", "(", "'{'", "in", "tag", ")", "else", "_tag", "(", "*", "tag", ".", "split", "(", "':'", ",", "1", ")", ")", ")", "prop_tag", "=", "ET", ".", "Element", "(", "clark_tag", ")", "prop", ".", "append", "(", "prop_tag", ")", "status", "=", "ET", ".", "Element", "(", "_tag", "(", "'D'", ",", "'status'", ")", ")", "status", ".", "text", "=", "_response", "(", "status_number", ")", "propstat", ".", "append", "(", "status", ")" ]
add a propstat response structure to an element .
train
false
51,648
@control_command(args=[(u'queue', text_t)], signature=u'<queue>') def cancel_consumer(state, queue, **_): state.consumer.call_soon(state.consumer.cancel_task_queue, queue) return ok(u'no longer consuming from {0}'.format(queue))
[ "@", "control_command", "(", "args", "=", "[", "(", "u'queue'", ",", "text_t", ")", "]", ",", "signature", "=", "u'<queue>'", ")", "def", "cancel_consumer", "(", "state", ",", "queue", ",", "**", "_", ")", ":", "state", ".", "consumer", ".", "call_soon", "(", "state", ".", "consumer", ".", "cancel_task_queue", ",", "queue", ")", "return", "ok", "(", "u'no longer consuming from {0}'", ".", "format", "(", "queue", ")", ")" ]
tell worker(s) to stop consuming from task queue by name .
train
false
51,649
def locked_function(origfunc): def wrapped(*args, **kwargs): cache_lock.acquire() try: return origfunc(*args, **kwargs) finally: cache_lock.release() return wrapped
[ "def", "locked_function", "(", "origfunc", ")", ":", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "cache_lock", ".", "acquire", "(", ")", "try", ":", "return", "origfunc", "(", "*", "args", ",", "**", "kwargs", ")", "finally", ":", "cache_lock", ".", "release", "(", ")", "return", "wrapped" ]
decorator to execute function under lock .
train
false
51,650
def print_threads(d): id = 1 rel = {} for partner in d: messages = d[partner] count = len(messages) screen_name = ('@' + partner[0]) name = partner[1] screen_name = color_func(c['MESSAGE']['partner'])(screen_name) name = cycle_color(name) thread_id = color_func(c['MESSAGE']['id'])(('thread_id:' + str(id))) line = ((((((((' ' * 2) + name) + ' ') + screen_name) + ' (') + str(count)) + ' message) ') + thread_id) printNicely(line) rel[id] = partner id += 1 dg['thread'] = d return rel
[ "def", "print_threads", "(", "d", ")", ":", "id", "=", "1", "rel", "=", "{", "}", "for", "partner", "in", "d", ":", "messages", "=", "d", "[", "partner", "]", "count", "=", "len", "(", "messages", ")", "screen_name", "=", "(", "'@'", "+", "partner", "[", "0", "]", ")", "name", "=", "partner", "[", "1", "]", "screen_name", "=", "color_func", "(", "c", "[", "'MESSAGE'", "]", "[", "'partner'", "]", ")", "(", "screen_name", ")", "name", "=", "cycle_color", "(", "name", ")", "thread_id", "=", "color_func", "(", "c", "[", "'MESSAGE'", "]", "[", "'id'", "]", ")", "(", "(", "'thread_id:'", "+", "str", "(", "id", ")", ")", ")", "line", "=", "(", "(", "(", "(", "(", "(", "(", "(", "' '", "*", "2", ")", "+", "name", ")", "+", "' '", ")", "+", "screen_name", ")", "+", "' ('", ")", "+", "str", "(", "count", ")", ")", "+", "' message) '", ")", "+", "thread_id", ")", "printNicely", "(", "line", ")", "rel", "[", "id", "]", "=", "partner", "id", "+=", "1", "dg", "[", "'thread'", "]", "=", "d", "return", "rel" ]
print threads of messages .
train
false
51,651
def _convert_2to3(path): (base, ext) = os.path.splitext(path) new_path = ('%s.temp2to3%s' % (base, ext)) copyfile(path, new_path) args = ['--doctests_only', '--no-diffs', '--write', '--nobackups', new_path] lib2to3main('lib2to3.fixes', args=args) return new_path
[ "def", "_convert_2to3", "(", "path", ")", ":", "(", "base", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "path", ")", "new_path", "=", "(", "'%s.temp2to3%s'", "%", "(", "base", ",", "ext", ")", ")", "copyfile", "(", "path", ",", "new_path", ")", "args", "=", "[", "'--doctests_only'", ",", "'--no-diffs'", ",", "'--write'", ",", "'--nobackups'", ",", "new_path", "]", "lib2to3main", "(", "'lib2to3.fixes'", ",", "args", "=", "args", ")", "return", "new_path" ]
convert the given file .
train
false
51,652
def clone_image(img): return QImage(img)
[ "def", "clone_image", "(", "img", ")", ":", "return", "QImage", "(", "img", ")" ]
returns a shallow copy of the image .
train
false
51,653
def get_deprecated_download_cell_limit(): return ((DOWNLOAD_CELL_LIMIT.get() / 100) if (DOWNLOAD_CELL_LIMIT.get() > 0) else DOWNLOAD_CELL_LIMIT.get())
[ "def", "get_deprecated_download_cell_limit", "(", ")", ":", "return", "(", "(", "DOWNLOAD_CELL_LIMIT", ".", "get", "(", ")", "/", "100", ")", "if", "(", "DOWNLOAD_CELL_LIMIT", ".", "get", "(", ")", ">", "0", ")", "else", "DOWNLOAD_CELL_LIMIT", ".", "get", "(", ")", ")" ]
get the old default .
train
false
51,654
def enable_edxnotes_for_the_course(course, user_id): course.tabs.append(CourseTab.load('edxnotes')) modulestore().update_item(course, user_id)
[ "def", "enable_edxnotes_for_the_course", "(", "course", ",", "user_id", ")", ":", "course", ".", "tabs", ".", "append", "(", "CourseTab", ".", "load", "(", "'edxnotes'", ")", ")", "modulestore", "(", ")", ".", "update_item", "(", "course", ",", "user_id", ")" ]
enable edxnotes for the course .
train
false
51,655
def bind_unused_port(reuse_port=False): sock = netutil.bind_sockets(None, '127.0.0.1', family=socket.AF_INET, reuse_port=reuse_port)[0] port = sock.getsockname()[1] return (sock, port)
[ "def", "bind_unused_port", "(", "reuse_port", "=", "False", ")", ":", "sock", "=", "netutil", ".", "bind_sockets", "(", "None", ",", "'127.0.0.1'", ",", "family", "=", "socket", ".", "AF_INET", ",", "reuse_port", "=", "reuse_port", ")", "[", "0", "]", "port", "=", "sock", ".", "getsockname", "(", ")", "[", "1", "]", "return", "(", "sock", ",", "port", ")" ]
binds a server socket to an available port on localhost .
train
false
51,656
def mini_interactive_loop(input_func): from IPython.core.inputsplitter import InputSplitter isp = InputSplitter() while isp.push_accepts_more(): indent = (' ' * isp.indent_spaces) prompt = ('>>> ' + indent) line = (indent + input_func(prompt)) isp.push(line) src = isp.source_reset() return src
[ "def", "mini_interactive_loop", "(", "input_func", ")", ":", "from", "IPython", ".", "core", ".", "inputsplitter", "import", "InputSplitter", "isp", "=", "InputSplitter", "(", ")", "while", "isp", ".", "push_accepts_more", "(", ")", ":", "indent", "=", "(", "' '", "*", "isp", ".", "indent_spaces", ")", "prompt", "=", "(", "'>>> '", "+", "indent", ")", "line", "=", "(", "indent", "+", "input_func", "(", "prompt", ")", ")", "isp", ".", "push", "(", "line", ")", "src", "=", "isp", ".", "source_reset", "(", ")", "return", "src" ]
minimal example of the logic of an interactive interpreter loop .
train
false
51,657
def gen_key(path): cmd = 'ssh-keygen -P "" -f {0} -t rsa -q'.format(path) if (not os.path.isdir(os.path.dirname(path))): os.makedirs(os.path.dirname(path)) subprocess.call(cmd, shell=True)
[ "def", "gen_key", "(", "path", ")", ":", "cmd", "=", "'ssh-keygen -P \"\" -f {0} -t rsa -q'", ".", "format", "(", "path", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ")", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ")", "subprocess", ".", "call", "(", "cmd", ",", "shell", "=", "True", ")" ]
generate a key for use with salt-ssh .
train
true
51,658
def check_site_enabled(site): if site.endswith('.conf'): site_file = site else: site_file = '{0}.conf'.format(site) if os.path.islink('{0}/{1}'.format(SITE_ENABLED_DIR, site_file)): return True elif ((site == 'default') and os.path.islink('{0}/000-{1}'.format(SITE_ENABLED_DIR, site_file))): return True else: return False
[ "def", "check_site_enabled", "(", "site", ")", ":", "if", "site", ".", "endswith", "(", "'.conf'", ")", ":", "site_file", "=", "site", "else", ":", "site_file", "=", "'{0}.conf'", ".", "format", "(", "site", ")", "if", "os", ".", "path", ".", "islink", "(", "'{0}/{1}'", ".", "format", "(", "SITE_ENABLED_DIR", ",", "site_file", ")", ")", ":", "return", "True", "elif", "(", "(", "site", "==", "'default'", ")", "and", "os", ".", "path", ".", "islink", "(", "'{0}/000-{1}'", ".", "format", "(", "SITE_ENABLED_DIR", ",", "site_file", ")", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
checks to see if the specific site symlink is in /etc/apache2/sites-enabled .
train
true
51,659
@receiver(PROBLEM_WEIGHTED_SCORE_CHANGED) def enqueue_subsection_update(sender, **kwargs): _emit_problem_submitted_event(kwargs) result = recalculate_subsection_grade_v3.apply_async(kwargs=dict(user_id=kwargs['user_id'], anonymous_user_id=kwargs.get('anonymous_user_id'), course_id=kwargs['course_id'], usage_id=kwargs['usage_id'], only_if_higher=kwargs.get('only_if_higher'), expected_modified_time=to_timestamp(kwargs['modified']), score_deleted=kwargs.get('score_deleted', False), event_transaction_id=unicode(get_event_transaction_id()), event_transaction_type=unicode(get_event_transaction_type()), score_db_table=kwargs['score_db_table'])) log.info(u'Grades: Request async calculation of subsection grades with args: {}. Task [{}]'.format(', '.join(('{}:{}'.format(arg, kwargs[arg]) for arg in sorted(kwargs))), getattr(result, 'id', 'N/A')))
[ "@", "receiver", "(", "PROBLEM_WEIGHTED_SCORE_CHANGED", ")", "def", "enqueue_subsection_update", "(", "sender", ",", "**", "kwargs", ")", ":", "_emit_problem_submitted_event", "(", "kwargs", ")", "result", "=", "recalculate_subsection_grade_v3", ".", "apply_async", "(", "kwargs", "=", "dict", "(", "user_id", "=", "kwargs", "[", "'user_id'", "]", ",", "anonymous_user_id", "=", "kwargs", ".", "get", "(", "'anonymous_user_id'", ")", ",", "course_id", "=", "kwargs", "[", "'course_id'", "]", ",", "usage_id", "=", "kwargs", "[", "'usage_id'", "]", ",", "only_if_higher", "=", "kwargs", ".", "get", "(", "'only_if_higher'", ")", ",", "expected_modified_time", "=", "to_timestamp", "(", "kwargs", "[", "'modified'", "]", ")", ",", "score_deleted", "=", "kwargs", ".", "get", "(", "'score_deleted'", ",", "False", ")", ",", "event_transaction_id", "=", "unicode", "(", "get_event_transaction_id", "(", ")", ")", ",", "event_transaction_type", "=", "unicode", "(", "get_event_transaction_type", "(", ")", ")", ",", "score_db_table", "=", "kwargs", "[", "'score_db_table'", "]", ")", ")", "log", ".", "info", "(", "u'Grades: Request async calculation of subsection grades with args: {}. Task [{}]'", ".", "format", "(", "', '", ".", "join", "(", "(", "'{}:{}'", ".", "format", "(", "arg", ",", "kwargs", "[", "arg", "]", ")", "for", "arg", "in", "sorted", "(", "kwargs", ")", ")", ")", ",", "getattr", "(", "result", ",", "'id'", ",", "'N/A'", ")", ")", ")" ]
handles the problem_weighted_score_changed signal by enqueueing a subsection update operation to occur asynchronously .
train
false
51,660
def _squeeze_cat(results, combine, squeeze): if combine: results = np.concatenate(results) if (not squeeze): results = [results] elif (squeeze and (len(results) == 1)): results = results[0] return results
[ "def", "_squeeze_cat", "(", "results", ",", "combine", ",", "squeeze", ")", ":", "if", "combine", ":", "results", "=", "np", ".", "concatenate", "(", "results", ")", "if", "(", "not", "squeeze", ")", ":", "results", "=", "[", "results", "]", "elif", "(", "squeeze", "and", "(", "len", "(", "results", ")", "==", "1", ")", ")", ":", "results", "=", "results", "[", "0", "]", "return", "results" ]
squeeze and concatenate the results depending on values of combine and squeeze .
train
false
51,662
def S_ISCHR(mode): return (S_IFMT(mode) == S_IFCHR)
[ "def", "S_ISCHR", "(", "mode", ")", ":", "return", "(", "S_IFMT", "(", "mode", ")", "==", "S_IFCHR", ")" ]
return true if mode is from a character special device file .
train
false
51,664
def dmp_discriminant(f, u, K): if (not u): return dup_discriminant(f, K) (d, v) = (dmp_degree(f, u), (u - 1)) if (d <= 0): return dmp_zero(v) else: s = ((-1) ** ((d * (d - 1)) // 2)) c = dmp_LC(f, K) r = dmp_resultant(f, dmp_diff(f, 1, u, K), u, K) c = dmp_mul_ground(c, K(s), v, K) return dmp_quo(r, c, v, K)
[ "def", "dmp_discriminant", "(", "f", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_discriminant", "(", "f", ",", "K", ")", "(", "d", ",", "v", ")", "=", "(", "dmp_degree", "(", "f", ",", "u", ")", ",", "(", "u", "-", "1", ")", ")", "if", "(", "d", "<=", "0", ")", ":", "return", "dmp_zero", "(", "v", ")", "else", ":", "s", "=", "(", "(", "-", "1", ")", "**", "(", "(", "d", "*", "(", "d", "-", "1", ")", ")", "//", "2", ")", ")", "c", "=", "dmp_LC", "(", "f", ",", "K", ")", "r", "=", "dmp_resultant", "(", "f", ",", "dmp_diff", "(", "f", ",", "1", ",", "u", ",", "K", ")", ",", "u", ",", "K", ")", "c", "=", "dmp_mul_ground", "(", "c", ",", "K", "(", "s", ")", ",", "v", ",", "K", ")", "return", "dmp_quo", "(", "r", ",", "c", ",", "v", ",", "K", ")" ]
computes discriminant of a polynomial in k[x] .
train
false
51,665
def download_webfile(url, filename, overwrite=False): if (os.path.exists(filename) and (not overwrite)): return if ('.' in url): urlretrieve(url, filename) else: try: subprocess_call(['youtube-dl', url, '-o', filename]) except OSError as e: raise OSError((e.message + '\n A possible reason is that youtube-dl is not installed on your computer. Install it with "pip install youtube-dl"'))
[ "def", "download_webfile", "(", "url", ",", "filename", ",", "overwrite", "=", "False", ")", ":", "if", "(", "os", ".", "path", ".", "exists", "(", "filename", ")", "and", "(", "not", "overwrite", ")", ")", ":", "return", "if", "(", "'.'", "in", "url", ")", ":", "urlretrieve", "(", "url", ",", "filename", ")", "else", ":", "try", ":", "subprocess_call", "(", "[", "'youtube-dl'", ",", "url", ",", "'-o'", ",", "filename", "]", ")", "except", "OSError", "as", "e", ":", "raise", "OSError", "(", "(", "e", ".", "message", "+", "'\\n A possible reason is that youtube-dl is not installed on your computer. Install it with \"pip install youtube-dl\"'", ")", ")" ]
small utility to download the file at url under name filename .
train
false
51,669
def mod_list(only_persist=False): mods = set() if only_persist: conf = _get_modules_conf() if os.path.exists(conf): try: with salt.utils.fopen(conf, 'r') as modules_file: for line in modules_file: line = line.strip() mod_name = _strip_module_name(line) if ((not line.startswith('#')) and mod_name): mods.add(mod_name) except IOError: log.error('kmod module could not open modules file at {0}'.format(conf)) else: for mod in lsmod(): mods.add(mod['module']) return sorted(list(mods))
[ "def", "mod_list", "(", "only_persist", "=", "False", ")", ":", "mods", "=", "set", "(", ")", "if", "only_persist", ":", "conf", "=", "_get_modules_conf", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "conf", ")", ":", "try", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "conf", ",", "'r'", ")", "as", "modules_file", ":", "for", "line", "in", "modules_file", ":", "line", "=", "line", ".", "strip", "(", ")", "mod_name", "=", "_strip_module_name", "(", "line", ")", "if", "(", "(", "not", "line", ".", "startswith", "(", "'#'", ")", ")", "and", "mod_name", ")", ":", "mods", ".", "add", "(", "mod_name", ")", "except", "IOError", ":", "log", ".", "error", "(", "'kmod module could not open modules file at {0}'", ".", "format", "(", "conf", ")", ")", "else", ":", "for", "mod", "in", "lsmod", "(", ")", ":", "mods", ".", "add", "(", "mod", "[", "'module'", "]", ")", "return", "sorted", "(", "list", "(", "mods", ")", ")" ]
return a list of the loaded module names only_persist only return the list of loaded persistent modules cli example: .
train
true
51,670
def thread(): try: thread_id = int(g['stuff']) print_thread(g['message_threads'][thread_id], g['original_name'], g['full_name']) except Exception: debug_option() printNicely(red('No such thread.'))
[ "def", "thread", "(", ")", ":", "try", ":", "thread_id", "=", "int", "(", "g", "[", "'stuff'", "]", ")", "print_thread", "(", "g", "[", "'message_threads'", "]", "[", "thread_id", "]", ",", "g", "[", "'original_name'", "]", ",", "g", "[", "'full_name'", "]", ")", "except", "Exception", ":", "debug_option", "(", ")", "printNicely", "(", "red", "(", "'No such thread.'", ")", ")" ]
view a thread of message .
train
false
51,673
def cell_count(inline_admin_form): count = 1 for fieldset in inline_admin_form: for line in fieldset: for field in line: count += 1 if inline_admin_form.formset.can_delete: count += 1 return count
[ "def", "cell_count", "(", "inline_admin_form", ")", ":", "count", "=", "1", "for", "fieldset", "in", "inline_admin_form", ":", "for", "line", "in", "fieldset", ":", "for", "field", "in", "line", ":", "count", "+=", "1", "if", "inline_admin_form", ".", "formset", ".", "can_delete", ":", "count", "+=", "1", "return", "count" ]
returns the number of cells used in a tabular inline .
train
false
51,674
def flatFormat(event): fieldValues = event['log_flattened'] s = [] keyFlattener = KeyFlattener() formatFields = aFormatter.parse(event['log_format']) for (literalText, fieldName, formatSpec, conversion) in formatFields: s.append(literalText) if (fieldName is not None): key = keyFlattener.flatKey(fieldName, formatSpec, (conversion or 's')) s.append(unicode(fieldValues[key])) return u''.join(s)
[ "def", "flatFormat", "(", "event", ")", ":", "fieldValues", "=", "event", "[", "'log_flattened'", "]", "s", "=", "[", "]", "keyFlattener", "=", "KeyFlattener", "(", ")", "formatFields", "=", "aFormatter", ".", "parse", "(", "event", "[", "'log_format'", "]", ")", "for", "(", "literalText", ",", "fieldName", ",", "formatSpec", ",", "conversion", ")", "in", "formatFields", ":", "s", ".", "append", "(", "literalText", ")", "if", "(", "fieldName", "is", "not", "None", ")", ":", "key", "=", "keyFlattener", ".", "flatKey", "(", "fieldName", ",", "formatSpec", ",", "(", "conversion", "or", "'s'", ")", ")", "s", ".", "append", "(", "unicode", "(", "fieldValues", "[", "key", "]", ")", ")", "return", "u''", ".", "join", "(", "s", ")" ]
format an event which has been flattened with l{flattenevent} .
train
false
51,676
def strip_trailing_whitespace(content): return re.sub(u' +\n', u'\n', content)
[ "def", "strip_trailing_whitespace", "(", "content", ")", ":", "return", "re", ".", "sub", "(", "u' +\\n'", ",", "u'\\n'", ",", "content", ")" ]
seems to be some inconsistencies re .
train
false
51,677
def test_scenario_with_hash_within_single_quotes(): scenario = Scenario.from_string(INLINE_COMMENTS_IGNORED_WITHIN_SINGLE_QUOTES) (step1, step2) = scenario.steps expect(step1.sentence).to.equal(u'Given I am logged in on twitter') expect(step2.sentence).to.equal(u"When I search for the hashtag '#hammer'")
[ "def", "test_scenario_with_hash_within_single_quotes", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "INLINE_COMMENTS_IGNORED_WITHIN_SINGLE_QUOTES", ")", "(", "step1", ",", "step2", ")", "=", "scenario", ".", "steps", "expect", "(", "step1", ".", "sentence", ")", ".", "to", ".", "equal", "(", "u'Given I am logged in on twitter'", ")", "expect", "(", "step2", ".", "sentence", ")", ".", "to", ".", "equal", "(", "u\"When I search for the hashtag '#hammer'\"", ")" ]
scenarios have hashes within single quotes and yet dont consider them as comments .
train
false
51,678
def broadcast_channel(message, channel): try: socket = CLIENTS[CHANNELS.get(channel, [])[0]][1] except (IndexError, KeyError): raise NoSocket(('There are no clients on the channel: ' + channel)) socket.send_and_broadcast_channel(message, channel)
[ "def", "broadcast_channel", "(", "message", ",", "channel", ")", ":", "try", ":", "socket", "=", "CLIENTS", "[", "CHANNELS", ".", "get", "(", "channel", ",", "[", "]", ")", "[", "0", "]", "]", "[", "1", "]", "except", "(", "IndexError", ",", "KeyError", ")", ":", "raise", "NoSocket", "(", "(", "'There are no clients on the channel: '", "+", "channel", ")", ")", "socket", ".", "send_and_broadcast_channel", "(", "message", ",", "channel", ")" ]
find the first socket for the given channel .
train
true
51,679
def check_string(result, func, cargs): if (not result): raise GEOSException(('Error encountered checking string return value in GEOS C function "%s".' % func.__name__)) s = string_at(result) free(result) return s
[ "def", "check_string", "(", "result", ",", "func", ",", "cargs", ")", ":", "if", "(", "not", "result", ")", ":", "raise", "GEOSException", "(", "(", "'Error encountered checking string return value in GEOS C function \"%s\".'", "%", "func", ".", "__name__", ")", ")", "s", "=", "string_at", "(", "result", ")", "free", "(", "result", ")", "return", "s" ]
error checking for routines that return strings .
train
false
51,680
def test_roundtrip_commented_format(): infile = utils.copy_test_data(u'old-style-format-w-comments.fq') outfile = utils.get_temp_filename(u'test2.fq') in_dir = os.path.dirname(infile) (_, out, err) = utils.runscript(u'split-paired-reads.py', [infile], in_dir) utils.runscript(u'interleave-reads.py', [(infile + u'.1'), (infile + u'.2'), u'-o', outfile], in_dir) r = open(infile).read() r2 = open(outfile).read() assert (r == r2), (r, r2)
[ "def", "test_roundtrip_commented_format", "(", ")", ":", "infile", "=", "utils", ".", "copy_test_data", "(", "u'old-style-format-w-comments.fq'", ")", "outfile", "=", "utils", ".", "get_temp_filename", "(", "u'test2.fq'", ")", "in_dir", "=", "os", ".", "path", ".", "dirname", "(", "infile", ")", "(", "_", ",", "out", ",", "err", ")", "=", "utils", ".", "runscript", "(", "u'split-paired-reads.py'", ",", "[", "infile", "]", ",", "in_dir", ")", "utils", ".", "runscript", "(", "u'interleave-reads.py'", ",", "[", "(", "infile", "+", "u'.1'", ")", ",", "(", "infile", "+", "u'.2'", ")", ",", "u'-o'", ",", "outfile", "]", ",", "in_dir", ")", "r", "=", "open", "(", "infile", ")", ".", "read", "(", ")", "r2", "=", "open", "(", "outfile", ")", ".", "read", "(", ")", "assert", "(", "r", "==", "r2", ")", ",", "(", "r", ",", "r2", ")" ]
split/interleave roundtrip for old style format with comments .
train
false
51,683
def resolve_provider_class(class_): if isinstance(class_, str): path = '.'.join([__package__, 'providers', class_]) return (import_string(class_, True) or import_string(path)) else: return class_
[ "def", "resolve_provider_class", "(", "class_", ")", ":", "if", "isinstance", "(", "class_", ",", "str", ")", ":", "path", "=", "'.'", ".", "join", "(", "[", "__package__", ",", "'providers'", ",", "class_", "]", ")", "return", "(", "import_string", "(", "class_", ",", "True", ")", "or", "import_string", "(", "path", ")", ")", "else", ":", "return", "class_" ]
returns a provider class .
train
true
51,684
def _from_utc_timestamp(timestamp): return datetime.datetime.strptime(timestamp, UTC_TIMESTAMP_FORMAT)
[ "def", "_from_utc_timestamp", "(", "timestamp", ")", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "timestamp", ",", "UTC_TIMESTAMP_FORMAT", ")" ]
return datetime obj where date and time are pulled from timestamp string .
train
false
51,685
def batched_dot(a, b): (a, b) = (as_tensor_variable(a), as_tensor_variable(b)) if (a.ndim == 0): raise TypeError('a must have at least one (batch) axis') elif (b.ndim == 0): raise TypeError('b must have at least one (batch) axis') elif (a.ndim == 1): return (a.dimshuffle(*([0] + (['x'] * (b.ndim - 1)))) * b) elif (b.ndim == 1): return (a * b.dimshuffle(*([0] + (['x'] * (a.ndim - 1))))) elif ((a.ndim > 3) or (b.ndim > 3)): return batched_tensordot(a, b, [[(a.ndim - 1)], [numpy.maximum(1, (b.ndim - 2))]]) else: return theano.tensor.blas.BatchedDot()(a, b)
[ "def", "batched_dot", "(", "a", ",", "b", ")", ":", "(", "a", ",", "b", ")", "=", "(", "as_tensor_variable", "(", "a", ")", ",", "as_tensor_variable", "(", "b", ")", ")", "if", "(", "a", ".", "ndim", "==", "0", ")", ":", "raise", "TypeError", "(", "'a must have at least one (batch) axis'", ")", "elif", "(", "b", ".", "ndim", "==", "0", ")", ":", "raise", "TypeError", "(", "'b must have at least one (batch) axis'", ")", "elif", "(", "a", ".", "ndim", "==", "1", ")", ":", "return", "(", "a", ".", "dimshuffle", "(", "*", "(", "[", "0", "]", "+", "(", "[", "'x'", "]", "*", "(", "b", ".", "ndim", "-", "1", ")", ")", ")", ")", "*", "b", ")", "elif", "(", "b", ".", "ndim", "==", "1", ")", ":", "return", "(", "a", "*", "b", ".", "dimshuffle", "(", "*", "(", "[", "0", "]", "+", "(", "[", "'x'", "]", "*", "(", "a", ".", "ndim", "-", "1", ")", ")", ")", ")", ")", "elif", "(", "(", "a", ".", "ndim", ">", "3", ")", "or", "(", "b", ".", "ndim", ">", "3", ")", ")", ":", "return", "batched_tensordot", "(", "a", ",", "b", ",", "[", "[", "(", "a", ".", "ndim", "-", "1", ")", "]", ",", "[", "numpy", ".", "maximum", "(", "1", ",", "(", "b", ".", "ndim", "-", "2", ")", ")", "]", "]", ")", "else", ":", "return", "theano", ".", "tensor", ".", "blas", ".", "BatchedDot", "(", ")", "(", "a", ",", "b", ")" ]
compute the batched dot product of two variables: batched_dot[i] = dot note that this batched_dot function does one of three things .
train
false
51,686
def teardown_test_episode_file(): if os.path.exists(FILE_DIR): shutil.rmtree(FILE_DIR)
[ "def", "teardown_test_episode_file", "(", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "FILE_DIR", ")", ":", "shutil", ".", "rmtree", "(", "FILE_DIR", ")" ]
remove the test episode .
train
false
51,687
def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): for i in range(tries): G = watts_strogatz_graph(n, k, p, seed) if nx.is_connected(G): return G raise nx.NetworkXError('Maximum number of tries exceeded')
[ "def", "connected_watts_strogatz_graph", "(", "n", ",", "k", ",", "p", ",", "tries", "=", "100", ",", "seed", "=", "None", ")", ":", "for", "i", "in", "range", "(", "tries", ")", ":", "G", "=", "watts_strogatz_graph", "(", "n", ",", "k", ",", "p", ",", "seed", ")", "if", "nx", ".", "is_connected", "(", "G", ")", ":", "return", "G", "raise", "nx", ".", "NetworkXError", "(", "'Maximum number of tries exceeded'", ")" ]
returns a connected watts–strogatz small-world graph .
train
false
51,689
def _nova_to_osvif_subnets(subnets): return objects.subnet.SubnetList(objects=[_nova_to_osvif_subnet(subnet) for subnet in subnets])
[ "def", "_nova_to_osvif_subnets", "(", "subnets", ")", ":", "return", "objects", ".", "subnet", ".", "SubnetList", "(", "objects", "=", "[", "_nova_to_osvif_subnet", "(", "subnet", ")", "for", "subnet", "in", "subnets", "]", ")" ]
convert nova subnet list into os_vif object .
train
false
51,690
def process_document_parser(http_resp, processes, hash_string, debug): pid = multiprocessing.current_process().pid processes[hash_string] = pid if debug: msg = '[mp_document_parser] PID %s is starting to parse %s' args = (pid, http_resp.get_url()) om.out.debug((msg % args)) try: document_parser = DocumentParser(http_resp) except Exception as e: if debug: msg = '[mp_document_parser] PID %s finished parsing %s with exception: "%s"' args = (pid, http_resp.get_url(), e) om.out.debug((msg % args)) raise else: if debug: msg = '[mp_document_parser] PID %s finished parsing %s without any exception' args = (pid, http_resp.get_url()) om.out.debug((msg % args)) return document_parser
[ "def", "process_document_parser", "(", "http_resp", ",", "processes", ",", "hash_string", ",", "debug", ")", ":", "pid", "=", "multiprocessing", ".", "current_process", "(", ")", ".", "pid", "processes", "[", "hash_string", "]", "=", "pid", "if", "debug", ":", "msg", "=", "'[mp_document_parser] PID %s is starting to parse %s'", "args", "=", "(", "pid", ",", "http_resp", ".", "get_url", "(", ")", ")", "om", ".", "out", ".", "debug", "(", "(", "msg", "%", "args", ")", ")", "try", ":", "document_parser", "=", "DocumentParser", "(", "http_resp", ")", "except", "Exception", "as", "e", ":", "if", "debug", ":", "msg", "=", "'[mp_document_parser] PID %s finished parsing %s with exception: \"%s\"'", "args", "=", "(", "pid", ",", "http_resp", ".", "get_url", "(", ")", ",", "e", ")", "om", ".", "out", ".", "debug", "(", "(", "msg", "%", "args", ")", ")", "raise", "else", ":", "if", "debug", ":", "msg", "=", "'[mp_document_parser] PID %s finished parsing %s without any exception'", "args", "=", "(", "pid", ",", "http_resp", ".", "get_url", "(", ")", ")", "om", ".", "out", ".", "debug", "(", "(", "msg", "%", "args", ")", ")", "return", "document_parser" ]
simple wrapper to get the current process id and store it in a shared object so we can kill the process if needed .
train
false
51,692
def upload_server_cert(cert_name, cert_body, private_key, cert_chain=None, path=None, region=None, key=None, keyid=None, profile=None): exists = get_server_certificate(cert_name, region, key, keyid, profile) if exists: return True conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: info = conn.upload_server_cert(cert_name, cert_body, private_key, cert_chain) log.info('Created certificate {0}.'.format(cert_name)) return info except boto.exception.BotoServerError as e: log.debug(e) msg = 'Failed to failed to create certificate {0}.' log.error(msg.format(cert_name)) return False
[ "def", "upload_server_cert", "(", "cert_name", ",", "cert_body", ",", "private_key", ",", "cert_chain", "=", "None", ",", "path", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "exists", "=", "get_server_certificate", "(", "cert_name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "exists", ":", "return", "True", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "info", "=", "conn", ".", "upload_server_cert", "(", "cert_name", ",", "cert_body", ",", "private_key", ",", "cert_chain", ")", "log", ".", "info", "(", "'Created certificate {0}.'", ".", "format", "(", "cert_name", ")", ")", "return", "info", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "msg", "=", "'Failed to failed to create certificate {0}.'", "log", ".", "error", "(", "msg", ".", "format", "(", "cert_name", ")", ")", "return", "False" ]
upload a certificate to amazon .
train
true
51,694
def reindex_course_and_check_access(course_key, user): if (not has_course_author_access(user, course_key)): raise PermissionDenied() return CoursewareSearchIndexer.do_course_reindex(modulestore(), course_key)
[ "def", "reindex_course_and_check_access", "(", "course_key", ",", "user", ")", ":", "if", "(", "not", "has_course_author_access", "(", "user", ",", "course_key", ")", ")", ":", "raise", "PermissionDenied", "(", ")", "return", "CoursewareSearchIndexer", ".", "do_course_reindex", "(", "modulestore", "(", ")", ",", "course_key", ")" ]
internal method used to restart indexing on a course .
train
false
51,695
def remote_postgres(client_ip, host, command): return remote_command(client_ip, ('psql', ((((((('postgres://' + POSTGRESQL_USERNAME) + ':') + POSTGRESQL_PASSWORD) + '@') + host) + ':') + str(POSTGRESQL_PORT)), '--command={}'.format(command)))
[ "def", "remote_postgres", "(", "client_ip", ",", "host", ",", "command", ")", ":", "return", "remote_command", "(", "client_ip", ",", "(", "'psql'", ",", "(", "(", "(", "(", "(", "(", "(", "'postgres://'", "+", "POSTGRESQL_USERNAME", ")", "+", "':'", ")", "+", "POSTGRESQL_PASSWORD", ")", "+", "'@'", ")", "+", "host", ")", "+", "':'", ")", "+", "str", "(", "POSTGRESQL_PORT", ")", ")", ",", "'--command={}'", ".", "format", "(", "command", ")", ")", ")" ]
run psql on client_ip .
train
false
51,696
def get_pkg_data_filename(data_name, package=None, show_progress=True, remote_timeout=None): data_name = os.path.normpath(data_name) if (remote_timeout is None): remote_timeout = conf.remote_timeout if data_name.startswith(u'hash/'): hashfn = _find_hash_fn(data_name[5:]) if (hashfn is None): return download_file((conf.dataurl + data_name), cache=True, show_progress=show_progress, timeout=remote_timeout) else: return hashfn else: datafn = _find_pkg_data_path(data_name, package=package) if os.path.isdir(datafn): raise IOError(u"Tried to access a data file that's actually a package data directory") elif os.path.isfile(datafn): return datafn else: return download_file((conf.dataurl + data_name), cache=True, show_progress=show_progress, timeout=remote_timeout)
[ "def", "get_pkg_data_filename", "(", "data_name", ",", "package", "=", "None", ",", "show_progress", "=", "True", ",", "remote_timeout", "=", "None", ")", ":", "data_name", "=", "os", ".", "path", ".", "normpath", "(", "data_name", ")", "if", "(", "remote_timeout", "is", "None", ")", ":", "remote_timeout", "=", "conf", ".", "remote_timeout", "if", "data_name", ".", "startswith", "(", "u'hash/'", ")", ":", "hashfn", "=", "_find_hash_fn", "(", "data_name", "[", "5", ":", "]", ")", "if", "(", "hashfn", "is", "None", ")", ":", "return", "download_file", "(", "(", "conf", ".", "dataurl", "+", "data_name", ")", ",", "cache", "=", "True", ",", "show_progress", "=", "show_progress", ",", "timeout", "=", "remote_timeout", ")", "else", ":", "return", "hashfn", "else", ":", "datafn", "=", "_find_pkg_data_path", "(", "data_name", ",", "package", "=", "package", ")", "if", "os", ".", "path", ".", "isdir", "(", "datafn", ")", ":", "raise", "IOError", "(", "u\"Tried to access a data file that's actually a package data directory\"", ")", "elif", "os", ".", "path", ".", "isfile", "(", "datafn", ")", ":", "return", "datafn", "else", ":", "return", "download_file", "(", "(", "conf", ".", "dataurl", "+", "data_name", ")", ",", "cache", "=", "True", ",", "show_progress", "=", "show_progress", ",", "timeout", "=", "remote_timeout", ")" ]
retrieves a data file from the standard locations for the package and provides a local filename for the data .
train
false
51,697
def test_SAMPHubServer(): SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
[ "def", "test_SAMPHubServer", "(", ")", ":", "SAMPHubServer", "(", "web_profile", "=", "False", ",", "mode", "=", "'multiple'", ",", "pool_size", "=", "1", ")" ]
test that samphub can be instantiated .
train
false
51,698
def findObj(regex): allObjs = get_all_objects() objs = [] r = re.compile(regex) for i in allObjs: obj = allObjs[i] if r.search(typeStr(obj)): objs.append(obj) return objs
[ "def", "findObj", "(", "regex", ")", ":", "allObjs", "=", "get_all_objects", "(", ")", "objs", "=", "[", "]", "r", "=", "re", ".", "compile", "(", "regex", ")", "for", "i", "in", "allObjs", ":", "obj", "=", "allObjs", "[", "i", "]", "if", "r", ".", "search", "(", "typeStr", "(", "obj", ")", ")", ":", "objs", ".", "append", "(", "obj", ")", "return", "objs" ]
return a list of objects whose typestr matches regex .
train
false
51,699
def MathtextBackendBitmap(): return MathtextBackendBbox(MathtextBackendBitmapRender())
[ "def", "MathtextBackendBitmap", "(", ")", ":", "return", "MathtextBackendBbox", "(", "MathtextBackendBitmapRender", "(", ")", ")" ]
a backend to generate standalone mathtext images .
train
false
51,700
def semanage_fcontext_delete(module, result, target, ftype, do_reload, sestore=''): changed = False prepared_diff = '' try: sefcontext = seobject.fcontextRecords(sestore) sefcontext.set_reload(do_reload) exists = semanage_fcontext_exists(sefcontext, target, ftype) if exists: (orig_seuser, orig_serole, orig_setype, orig_serange) = exists if (not module.check_mode): sefcontext.delete(target, ftype) changed = True if module._diff: prepared_diff += '# Deletion to semanage file context mappings\n' prepared_diff += ('-%s %s %s:%s:%s:%s\n' % (target, ftype, exists[0], exists[1], exists[2], exists[3])) except Exception: e = get_exception() module.fail_json(msg=('%s: %s\n' % (e.__class__.__name__, to_native(e)))) if (module._diff and prepared_diff): result['diff'] = dict(prepared=prepared_diff) module.exit_json(changed=changed, **result)
[ "def", "semanage_fcontext_delete", "(", "module", ",", "result", ",", "target", ",", "ftype", ",", "do_reload", ",", "sestore", "=", "''", ")", ":", "changed", "=", "False", "prepared_diff", "=", "''", "try", ":", "sefcontext", "=", "seobject", ".", "fcontextRecords", "(", "sestore", ")", "sefcontext", ".", "set_reload", "(", "do_reload", ")", "exists", "=", "semanage_fcontext_exists", "(", "sefcontext", ",", "target", ",", "ftype", ")", "if", "exists", ":", "(", "orig_seuser", ",", "orig_serole", ",", "orig_setype", ",", "orig_serange", ")", "=", "exists", "if", "(", "not", "module", ".", "check_mode", ")", ":", "sefcontext", ".", "delete", "(", "target", ",", "ftype", ")", "changed", "=", "True", "if", "module", ".", "_diff", ":", "prepared_diff", "+=", "'# Deletion to semanage file context mappings\\n'", "prepared_diff", "+=", "(", "'-%s %s %s:%s:%s:%s\\n'", "%", "(", "target", ",", "ftype", ",", "exists", "[", "0", "]", ",", "exists", "[", "1", "]", ",", "exists", "[", "2", "]", ",", "exists", "[", "3", "]", ")", ")", "except", "Exception", ":", "e", "=", "get_exception", "(", ")", "module", ".", "fail_json", "(", "msg", "=", "(", "'%s: %s\\n'", "%", "(", "e", ".", "__class__", ".", "__name__", ",", "to_native", "(", "e", ")", ")", ")", ")", "if", "(", "module", ".", "_diff", "and", "prepared_diff", ")", ":", "result", "[", "'diff'", "]", "=", "dict", "(", "prepared", "=", "prepared_diff", ")", "module", ".", "exit_json", "(", "changed", "=", "changed", ",", "**", "result", ")" ]
delete selinux file context mapping definition from the policy .
train
false
51,703
def _gp_float(tok): try: return float(tok) except ValueError: return str(tok)
[ "def", "_gp_float", "(", "tok", ")", ":", "try", ":", "return", "float", "(", "tok", ")", "except", "ValueError", ":", "return", "str", "(", "tok", ")" ]
gets a float from a token .
train
false
51,704
def almost_same_datetime(dt1, dt2, allowed_delta=timedelta(minutes=1)): return (abs((dt1 - dt2)) < allowed_delta)
[ "def", "almost_same_datetime", "(", "dt1", ",", "dt2", ",", "allowed_delta", "=", "timedelta", "(", "minutes", "=", "1", ")", ")", ":", "return", "(", "abs", "(", "(", "dt1", "-", "dt2", ")", ")", "<", "allowed_delta", ")" ]
returns true if these are w/in a minute of each other .
train
false
51,706
def delete_image_location_from_backend(context, image_id, location): deleted = False if CONF.delayed_delete: deleted = schedule_delayed_delete_from_backend(context, image_id, location) if (not deleted): safe_delete_from_backend(context, image_id, location)
[ "def", "delete_image_location_from_backend", "(", "context", ",", "image_id", ",", "location", ")", ":", "deleted", "=", "False", "if", "CONF", ".", "delayed_delete", ":", "deleted", "=", "schedule_delayed_delete_from_backend", "(", "context", ",", "image_id", ",", "location", ")", "if", "(", "not", "deleted", ")", ":", "safe_delete_from_backend", "(", "context", ",", "image_id", ",", "location", ")" ]
given a location .
train
false
51,708
def GetClientURNsForHostnames(hostnames, token=None): index = CreateClientIndex(token=token) keywords = set() for hostname in hostnames: if hostname.startswith('host:'): keywords.add(hostname) else: keywords.add(('host:%s' % hostname)) results = index.ReadClientPostingLists(keywords) result = {} for (keyword, hits) in results.iteritems(): result[keyword[len('host:'):]] = hits return result
[ "def", "GetClientURNsForHostnames", "(", "hostnames", ",", "token", "=", "None", ")", ":", "index", "=", "CreateClientIndex", "(", "token", "=", "token", ")", "keywords", "=", "set", "(", ")", "for", "hostname", "in", "hostnames", ":", "if", "hostname", ".", "startswith", "(", "'host:'", ")", ":", "keywords", ".", "add", "(", "hostname", ")", "else", ":", "keywords", ".", "add", "(", "(", "'host:%s'", "%", "hostname", ")", ")", "results", "=", "index", ".", "ReadClientPostingLists", "(", "keywords", ")", "result", "=", "{", "}", "for", "(", "keyword", ",", "hits", ")", "in", "results", ".", "iteritems", "(", ")", ":", "result", "[", "keyword", "[", "len", "(", "'host:'", ")", ":", "]", "]", "=", "hits", "return", "result" ]
gets all client_ids for a given list of hostnames or fqdns .
train
true
51,710
def poll_for_callable(func, *args, **kwargs): timeout = 5 if ('timeout' in kwargs): timeout = kwargs.pop('timeout') start = time() last_exception = None while ((time() - start) < timeout): try: func_args = [] for arg in args: if callable(arg): func_args.append(arg()) else: func_args.append(arg) func(*func_args) except AssertionError as e: last_exception = e sleep(0.1) else: return True raise (last_exception or AssertionError('No exception triggered yet'))
[ "def", "poll_for_callable", "(", "func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "timeout", "=", "5", "if", "(", "'timeout'", "in", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "pop", "(", "'timeout'", ")", "start", "=", "time", "(", ")", "last_exception", "=", "None", "while", "(", "(", "time", "(", ")", "-", "start", ")", "<", "timeout", ")", ":", "try", ":", "func_args", "=", "[", "]", "for", "arg", "in", "args", ":", "if", "callable", "(", "arg", ")", ":", "func_args", ".", "append", "(", "arg", "(", ")", ")", "else", ":", "func_args", ".", "append", "(", "arg", ")", "func", "(", "*", "func_args", ")", "except", "AssertionError", "as", "e", ":", "last_exception", "=", "e", "sleep", "(", "0.1", ")", "else", ":", "return", "True", "raise", "(", "last_exception", "or", "AssertionError", "(", "'No exception triggered yet'", ")", ")" ]
replay to update the status during timeout seconds .
train
false
51,711
def GetS3CompatibleFileList(root, prefix=None): def _ListFiles(dir): for obj in os.listdir(dir): objpath = os.path.join(dir, obj) if os.path.isfile(objpath): (yield os.path.relpath(objpath, root)) elif os.path.isdir(objpath): for f in _ListFiles(objpath): (yield f) filelist = [x for x in _ListFiles(root) if ((not prefix) or x.startswith(prefix))] return sorted(filelist)
[ "def", "GetS3CompatibleFileList", "(", "root", ",", "prefix", "=", "None", ")", ":", "def", "_ListFiles", "(", "dir", ")", ":", "for", "obj", "in", "os", ".", "listdir", "(", "dir", ")", ":", "objpath", "=", "os", ".", "path", ".", "join", "(", "dir", ",", "obj", ")", "if", "os", ".", "path", ".", "isfile", "(", "objpath", ")", ":", "(", "yield", "os", ".", "path", ".", "relpath", "(", "objpath", ",", "root", ")", ")", "elif", "os", ".", "path", ".", "isdir", "(", "objpath", ")", ":", "for", "f", "in", "_ListFiles", "(", "objpath", ")", ":", "(", "yield", "f", ")", "filelist", "=", "[", "x", "for", "x", "in", "_ListFiles", "(", "root", ")", "if", "(", "(", "not", "prefix", ")", "or", "x", ".", "startswith", "(", "prefix", ")", ")", "]", "return", "sorted", "(", "filelist", ")" ]
returns a list of filenames from the local object store which emulates the sorting order of keys returned from an aws s3 file store .
train
false
51,712
@frappe.whitelist() def update_doc(doc): doc = json.loads(doc) try: to_update = doc doctype = doc[u'doctype'] docname = doc[u'name'] doc = frappe.get_doc(doctype, docname) doc.update(to_update) doc.save() except: return {u'doc': doc, u'exc': frappe.utils.get_traceback()} return doc
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "update_doc", "(", "doc", ")", ":", "doc", "=", "json", ".", "loads", "(", "doc", ")", "try", ":", "to_update", "=", "doc", "doctype", "=", "doc", "[", "u'doctype'", "]", "docname", "=", "doc", "[", "u'name'", "]", "doc", "=", "frappe", ".", "get_doc", "(", "doctype", ",", "docname", ")", "doc", ".", "update", "(", "to_update", ")", "doc", ".", "save", "(", ")", "except", ":", "return", "{", "u'doc'", ":", "doc", ",", "u'exc'", ":", "frappe", ".", "utils", ".", "get_traceback", "(", ")", "}", "return", "doc" ]
updates the doc when card is edited .
train
false
51,713
def concrete(seq): if isinstance(seq, Iterator): seq = list(seq) if isinstance(seq, (tuple, list)): seq = list(map(concrete, seq)) return seq
[ "def", "concrete", "(", "seq", ")", ":", "if", "isinstance", "(", "seq", ",", "Iterator", ")", ":", "seq", "=", "list", "(", "seq", ")", "if", "isinstance", "(", "seq", ",", "(", "tuple", ",", "list", ")", ")", ":", "seq", "=", "list", "(", "map", "(", "concrete", ",", "seq", ")", ")", "return", "seq" ]
make nested iterators concrete lists .
train
false
51,714
def sosfreqd(sos, worN=None, whole=False): (sos, n_sections) = _validate_sos(sos) if (n_sections == 0): raise ValueError('Cannot compute frequencies with no sections') h = 1.0 for row in sos: (w, rowh) = freqd(row[:3], row[3:], worN=worN, whole=whole) h *= rowh return (w, h)
[ "def", "sosfreqd", "(", "sos", ",", "worN", "=", "None", ",", "whole", "=", "False", ")", ":", "(", "sos", ",", "n_sections", ")", "=", "_validate_sos", "(", "sos", ")", "if", "(", "n_sections", "==", "0", ")", ":", "raise", "ValueError", "(", "'Cannot compute frequencies with no sections'", ")", "h", "=", "1.0", "for", "row", "in", "sos", ":", "(", "w", ",", "rowh", ")", "=", "freqd", "(", "row", "[", ":", "3", "]", ",", "row", "[", "3", ":", "]", ",", "worN", "=", "worN", ",", "whole", "=", "whole", ")", "h", "*=", "rowh", "return", "(", "w", ",", "h", ")" ]
compute the frequency response of a digital filter in sos format .
train
false