id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
12,964
def loadavg(): p = '' if ((not sabnzbd.WIN32) and (not sabnzbd.DARWIN)): opt = cfg.show_sysload() if opt: try: p = ('%.2f | %.2f | %.2f' % os.getloadavg()) except: pass if ((opt > 1) and _HAVE_STATM): p = ('%s | %s' % (p, memory_usage())) return p
[ "def", "loadavg", "(", ")", ":", "p", "=", "''", "if", "(", "(", "not", "sabnzbd", ".", "WIN32", ")", "and", "(", "not", "sabnzbd", ".", "DARWIN", ")", ")", ":", "opt", "=", "cfg", ".", "show_sysload", "(", ")", "if", "opt", ":", "try", ":", "p", "=", "(", "'%.2f | %.2f | %.2f'", "%", "os", ".", "getloadavg", "(", ")", ")", "except", ":", "pass", "if", "(", "(", "opt", ">", "1", ")", "and", "_HAVE_STATM", ")", ":", "p", "=", "(", "'%s | %s'", "%", "(", "p", ",", "memory_usage", "(", ")", ")", ")", "return", "p" ]
return the load averages for this minion cli example: .
train
false
12,966
def parse_commits(head, name): for commit in head.traverse(): (yield {'_id': commit.hexsha, '_parent': name, 'committed_date': datetime.fromtimestamp(commit.committed_date), 'committer': {'name': commit.committer.name, 'email': commit.committer.email}, 'authored_date': datetime.fromtimestamp(commit.authored_date), 'author': {'name': commit.author.name, 'email': commit.author.email}, 'description': commit.message, 'parent_shas': [p.hexsha for p in commit.parents], 'files': list(commit.stats.files), 'stats': commit.stats.total})
[ "def", "parse_commits", "(", "head", ",", "name", ")", ":", "for", "commit", "in", "head", ".", "traverse", "(", ")", ":", "(", "yield", "{", "'_id'", ":", "commit", ".", "hexsha", ",", "'_parent'", ":", "name", ",", "'committed_date'", ":", "datetime", ".", "fromtimestamp", "(", "commit", ".", "committed_date", ")", ",", "'committer'", ":", "{", "'name'", ":", "commit", ".", "committer", ".", "name", ",", "'email'", ":", "commit", ".", "committer", ".", "email", "}", ",", "'authored_date'", ":", "datetime", ".", "fromtimestamp", "(", "commit", ".", "authored_date", ")", ",", "'author'", ":", "{", "'name'", ":", "commit", ".", "author", ".", "name", ",", "'email'", ":", "commit", ".", "author", ".", "email", "}", ",", "'description'", ":", "commit", ".", "message", ",", "'parent_shas'", ":", "[", "p", ".", "hexsha", "for", "p", "in", "commit", ".", "parents", "]", ",", "'files'", ":", "list", "(", "commit", ".", "stats", ".", "files", ")", ",", "'stats'", ":", "commit", ".", "stats", ".", "total", "}", ")" ]
go through the git repository log and generate a document per commit containing all the metadata .
train
true
12,967
def chi2_pdf(self, x, df): Px = ((x ** ((df / 2.0) - 1)) * np.exp(((- x) / 2.0))) Px /= (special.gamma((df / 2.0)) * (2 ** (df / 2.0))) return Px
[ "def", "chi2_pdf", "(", "self", ",", "x", ",", "df", ")", ":", "Px", "=", "(", "(", "x", "**", "(", "(", "df", "/", "2.0", ")", "-", "1", ")", ")", "*", "np", ".", "exp", "(", "(", "(", "-", "x", ")", "/", "2.0", ")", ")", ")", "Px", "/=", "(", "special", ".", "gamma", "(", "(", "df", "/", "2.0", ")", ")", "*", "(", "2", "**", "(", "df", "/", "2.0", ")", ")", ")", "return", "Px" ]
pdf of chi-square distribution .
train
false
12,968
@require_POST @login_required def watch_locale(request): locale = request.LANGUAGE_CODE if (request.POST.get('watch') == 'yes'): NewPostInLocaleEvent.notify(request.user, locale=locale) NewThreadInLocaleEvent.notify(request.user, locale=locale) statsd.incr('kbforums.watches.locale') else: NewPostInLocaleEvent.stop_notifying(request.user, locale=locale) NewThreadInLocaleEvent.stop_notifying(request.user, locale=locale) return HttpResponseRedirect((get_next_url(request) or reverse('home')))
[ "@", "require_POST", "@", "login_required", "def", "watch_locale", "(", "request", ")", ":", "locale", "=", "request", ".", "LANGUAGE_CODE", "if", "(", "request", ".", "POST", ".", "get", "(", "'watch'", ")", "==", "'yes'", ")", ":", "NewPostInLocaleEvent", ".", "notify", "(", "request", ".", "user", ",", "locale", "=", "locale", ")", "NewThreadInLocaleEvent", ".", "notify", "(", "request", ".", "user", ",", "locale", "=", "locale", ")", "statsd", ".", "incr", "(", "'kbforums.watches.locale'", ")", "else", ":", "NewPostInLocaleEvent", ".", "stop_notifying", "(", "request", ".", "user", ",", "locale", "=", "locale", ")", "NewThreadInLocaleEvent", ".", "stop_notifying", "(", "request", ".", "user", ",", "locale", "=", "locale", ")", "return", "HttpResponseRedirect", "(", "(", "get_next_url", "(", "request", ")", "or", "reverse", "(", "'home'", ")", ")", ")" ]
start watching a locale for revisions ready for review .
train
false
12,970
def skolem_function(univ_scope=None): skolem = VariableExpression(Variable((u'F%s' % _counter.get()))) if univ_scope: for v in list(univ_scope): skolem = skolem(VariableExpression(v)) return skolem
[ "def", "skolem_function", "(", "univ_scope", "=", "None", ")", ":", "skolem", "=", "VariableExpression", "(", "Variable", "(", "(", "u'F%s'", "%", "_counter", ".", "get", "(", ")", ")", ")", ")", "if", "univ_scope", ":", "for", "v", "in", "list", "(", "univ_scope", ")", ":", "skolem", "=", "skolem", "(", "VariableExpression", "(", "v", ")", ")", "return", "skolem" ]
return a skolem function over the variables in univ_scope param univ_scope .
train
false
12,971
def tablength(word, tabwidth): return (len(word.replace(u' DCTB ', u'')) + (word.count(u' DCTB ') * tabwidth))
[ "def", "tablength", "(", "word", ",", "tabwidth", ")", ":", "return", "(", "len", "(", "word", ".", "replace", "(", "u' DCTB '", ",", "u''", ")", ")", "+", "(", "word", ".", "count", "(", "u' DCTB '", ")", "*", "tabwidth", ")", ")" ]
return length of a word taking tabs into account .
train
false
12,972
def _TestUpdateEpisode(tester, user_cookie, request_dict): validator = tester.validator (user_id, device_id) = tester.GetIdsFromCookie(user_cookie) request_dict = deepcopy(request_dict) actual_dict = tester.SendRequest('update_episode', user_cookie, request_dict) op_dict = tester._DeriveNotificationOpDict(user_id, device_id, request_dict) episode_dict = deepcopy(request_dict) episode_dict['user_id'] = user_id episode_dict.pop('headers', None) episode_dict.pop('activity', None) episode = validator.ValidateUpdateDBObject(Episode, **episode_dict) activity_dict = {'name': 'update_episode', 'activity_id': request_dict['activity']['activity_id'], 'timestamp': request_dict['activity']['timestamp'], 'episode_id': episode.episode_id} invalidate = {'episodes': [{'episode_id': request_dict['episode_id'], 'get_attributes': True}]} validator.ValidateFollowerNotifications(episode.viewpoint_id, activity_dict, op_dict, invalidate) tester._CompareResponseDicts('update_episode', user_id, request_dict, {}, actual_dict) return actual_dict
[ "def", "_TestUpdateEpisode", "(", "tester", ",", "user_cookie", ",", "request_dict", ")", ":", "validator", "=", "tester", ".", "validator", "(", "user_id", ",", "device_id", ")", "=", "tester", ".", "GetIdsFromCookie", "(", "user_cookie", ")", "request_dict", "=", "deepcopy", "(", "request_dict", ")", "actual_dict", "=", "tester", ".", "SendRequest", "(", "'update_episode'", ",", "user_cookie", ",", "request_dict", ")", "op_dict", "=", "tester", ".", "_DeriveNotificationOpDict", "(", "user_id", ",", "device_id", ",", "request_dict", ")", "episode_dict", "=", "deepcopy", "(", "request_dict", ")", "episode_dict", "[", "'user_id'", "]", "=", "user_id", "episode_dict", ".", "pop", "(", "'headers'", ",", "None", ")", "episode_dict", ".", "pop", "(", "'activity'", ",", "None", ")", "episode", "=", "validator", ".", "ValidateUpdateDBObject", "(", "Episode", ",", "**", "episode_dict", ")", "activity_dict", "=", "{", "'name'", ":", "'update_episode'", ",", "'activity_id'", ":", "request_dict", "[", "'activity'", "]", "[", "'activity_id'", "]", ",", "'timestamp'", ":", "request_dict", "[", "'activity'", "]", "[", "'timestamp'", "]", ",", "'episode_id'", ":", "episode", ".", "episode_id", "}", "invalidate", "=", "{", "'episodes'", ":", "[", "{", "'episode_id'", ":", "request_dict", "[", "'episode_id'", "]", ",", "'get_attributes'", ":", "True", "}", "]", "}", "validator", ".", "ValidateFollowerNotifications", "(", "episode", ".", "viewpoint_id", ",", "activity_dict", ",", "op_dict", ",", "invalidate", ")", "tester", ".", "_CompareResponseDicts", "(", "'update_episode'", ",", "user_id", ",", "request_dict", ",", "{", "}", ",", "actual_dict", ")", "return", "actual_dict" ]
called by the servicetester in order to test update_episode service api call .
train
false
12,973
@Addon.on_change def watch_status(old_attr=None, new_attr=None, instance=None, sender=None, **kwargs): if (old_attr is None): old_attr = {} if (new_attr is None): new_attr = {} new_status = new_attr.get('status') old_status = old_attr.get('status') latest_version = instance.find_latest_version(channel=amo.RELEASE_CHANNEL_LISTED) if ((new_status not in amo.VALID_ADDON_STATUSES) or (not new_status) or (not latest_version)): return if (old_status not in amo.UNREVIEWED_ADDON_STATUSES): latest_version.reset_nomination_time() elif latest_version.has_files: inherit_nomination(None, latest_version)
[ "@", "Addon", ".", "on_change", "def", "watch_status", "(", "old_attr", "=", "None", ",", "new_attr", "=", "None", ",", "instance", "=", "None", ",", "sender", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "old_attr", "is", "None", ")", ":", "old_attr", "=", "{", "}", "if", "(", "new_attr", "is", "None", ")", ":", "new_attr", "=", "{", "}", "new_status", "=", "new_attr", ".", "get", "(", "'status'", ")", "old_status", "=", "old_attr", ".", "get", "(", "'status'", ")", "latest_version", "=", "instance", ".", "find_latest_version", "(", "channel", "=", "amo", ".", "RELEASE_CHANNEL_LISTED", ")", "if", "(", "(", "new_status", "not", "in", "amo", ".", "VALID_ADDON_STATUSES", ")", "or", "(", "not", "new_status", ")", "or", "(", "not", "latest_version", ")", ")", ":", "return", "if", "(", "old_status", "not", "in", "amo", ".", "UNREVIEWED_ADDON_STATUSES", ")", ":", "latest_version", ".", "reset_nomination_time", "(", ")", "elif", "latest_version", ".", "has_files", ":", "inherit_nomination", "(", "None", ",", "latest_version", ")" ]
set nomination date when app is pending review .
train
false
12,974
def get_harddisk_sleep(): ret = salt.utils.mac_utils.execute_return_result('systemsetup -getharddisksleep') return salt.utils.mac_utils.parse_return(ret)
[ "def", "get_harddisk_sleep", "(", ")", ":", "ret", "=", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_result", "(", "'systemsetup -getharddisksleep'", ")", "return", "salt", ".", "utils", ".", "mac_utils", ".", "parse_return", "(", "ret", ")" ]
display the amount of idle time until the hard disk sleeps .
train
true
12,976
def base_concrete_model(abstract, model): if hasattr(model, u'objects'): return (model if model._meta.abstract else _base_concrete_model(abstract, model)) return (_base_concrete_model(abstract, model.__class__) or model.__class__)
[ "def", "base_concrete_model", "(", "abstract", ",", "model", ")", ":", "if", "hasattr", "(", "model", ",", "u'objects'", ")", ":", "return", "(", "model", "if", "model", ".", "_meta", ".", "abstract", "else", "_base_concrete_model", "(", "abstract", ",", "model", ")", ")", "return", "(", "_base_concrete_model", "(", "abstract", ",", "model", ".", "__class__", ")", "or", "model", ".", "__class__", ")" ]
used in methods of abstract models to find the super-most concrete model in the inheritance chain that inherits from the given abstract model .
train
true
12,977
@unbox(IndexType) def unbox_index(typ, obj, c): data = c.pyapi.object_getattr_string(obj, '_data') index = make_index(c.context, c.builder, typ) index.data = c.unbox(typ.as_array, data).value return NativeValue(index._getvalue())
[ "@", "unbox", "(", "IndexType", ")", "def", "unbox_index", "(", "typ", ",", "obj", ",", "c", ")", ":", "data", "=", "c", ".", "pyapi", ".", "object_getattr_string", "(", "obj", ",", "'_data'", ")", "index", "=", "make_index", "(", "c", ".", "context", ",", "c", ".", "builder", ",", "typ", ")", "index", ".", "data", "=", "c", ".", "unbox", "(", "typ", ".", "as_array", ",", "data", ")", ".", "value", "return", "NativeValue", "(", "index", ".", "_getvalue", "(", ")", ")" ]
convert a index object to a native structure .
train
false
12,978
def send_email_notification(mr_list): email_list = frappe.db.sql_list("select distinct r.parent\n DCTB DCTB from tabUserRole r, tabUser p\n DCTB DCTB where p.name = r.parent and p.enabled = 1 and p.docstatus < 2\n DCTB DCTB and r.role in ('Purchase Manager','Stock Manager')\n DCTB DCTB and p.name not in ('Administrator', 'All', 'Guest')") msg = frappe.render_template('templates/emails/reorder_item.html', {'mr_list': mr_list}) frappe.sendmail(recipients=email_list, subject=_('Auto Material Requests Generated'), message=msg)
[ "def", "send_email_notification", "(", "mr_list", ")", ":", "email_list", "=", "frappe", ".", "db", ".", "sql_list", "(", "\"select distinct r.parent\\n DCTB DCTB from tabUserRole r, tabUser p\\n DCTB DCTB where p.name = r.parent and p.enabled = 1 and p.docstatus < 2\\n DCTB DCTB and r.role in ('Purchase Manager','Stock Manager')\\n DCTB DCTB and p.name not in ('Administrator', 'All', 'Guest')\"", ")", "msg", "=", "frappe", ".", "render_template", "(", "'templates/emails/reorder_item.html'", ",", "{", "'mr_list'", ":", "mr_list", "}", ")", "frappe", ".", "sendmail", "(", "recipients", "=", "email_list", ",", "subject", "=", "_", "(", "'Auto Material Requests Generated'", ")", ",", "message", "=", "msg", ")" ]
notify user about auto creation of indent .
train
false
12,979
def ssh_execute(dest, *cmd, **kwargs): ssh_cmd = ['ssh', '-o', 'BatchMode=yes'] ssh_cmd.append(dest) ssh_cmd.extend(cmd) return execute(*ssh_cmd, **kwargs)
[ "def", "ssh_execute", "(", "dest", ",", "*", "cmd", ",", "**", "kwargs", ")", ":", "ssh_cmd", "=", "[", "'ssh'", ",", "'-o'", ",", "'BatchMode=yes'", "]", "ssh_cmd", ".", "append", "(", "dest", ")", "ssh_cmd", ".", "extend", "(", "cmd", ")", "return", "execute", "(", "*", "ssh_cmd", ",", "**", "kwargs", ")" ]
convenience wrapper to execute ssh command .
train
false
12,980
def OSX_NSURL_toLocalFile(url): if isinstance(url, QUrl): url = url.toString() if (not url.startswith('file:///.file/id=')): return '' from subprocess import Popen, PIPE, DEVNULL cmd = ['osascript', '-e', 'get POSIX path of POSIX file "{}"'.format(url)] with Popen(cmd, stdout=PIPE, stderr=DEVNULL) as p: return p.stdout.read().strip().decode()
[ "def", "OSX_NSURL_toLocalFile", "(", "url", ")", ":", "if", "isinstance", "(", "url", ",", "QUrl", ")", ":", "url", "=", "url", ".", "toString", "(", ")", "if", "(", "not", "url", ".", "startswith", "(", "'file:///.file/id='", ")", ")", ":", "return", "''", "from", "subprocess", "import", "Popen", ",", "PIPE", ",", "DEVNULL", "cmd", "=", "[", "'osascript'", ",", "'-e'", ",", "'get POSIX path of POSIX file \"{}\"'", ".", "format", "(", "url", ")", "]", "with", "Popen", "(", "cmd", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "DEVNULL", ")", "as", "p", ":", "return", "p", ".", "stdout", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "decode", "(", ")" ]
return os x nsurl file reference as local file path or if not nsurl .
train
false
12,981
def table_dictize(obj, context, **kw): result_dict = {} model = context['model'] session = model.Session if isinstance(obj, RowProxy): fields = obj.keys() else: ModelClass = obj.__class__ table = class_mapper(ModelClass).mapped_table fields = [field.name for field in table.c] for field in fields: name = field if (name in ('current', 'expired_timestamp', 'expired_id')): continue if (name == 'continuity_id'): continue value = getattr(obj, name) if (value is None): result_dict[name] = value elif isinstance(value, dict): result_dict[name] = value elif isinstance(value, int): result_dict[name] = value elif isinstance(value, long): result_dict[name] = value elif isinstance(value, datetime.datetime): result_dict[name] = value.isoformat() elif isinstance(value, list): result_dict[name] = value else: result_dict[name] = unicode(value) result_dict.update(kw) context['metadata_modified'] = max(result_dict.get('revision_timestamp', ''), context.get('metadata_modified', '')) return result_dict
[ "def", "table_dictize", "(", "obj", ",", "context", ",", "**", "kw", ")", ":", "result_dict", "=", "{", "}", "model", "=", "context", "[", "'model'", "]", "session", "=", "model", ".", "Session", "if", "isinstance", "(", "obj", ",", "RowProxy", ")", ":", "fields", "=", "obj", ".", "keys", "(", ")", "else", ":", "ModelClass", "=", "obj", ".", "__class__", "table", "=", "class_mapper", "(", "ModelClass", ")", ".", "mapped_table", "fields", "=", "[", "field", ".", "name", "for", "field", "in", "table", ".", "c", "]", "for", "field", "in", "fields", ":", "name", "=", "field", "if", "(", "name", "in", "(", "'current'", ",", "'expired_timestamp'", ",", "'expired_id'", ")", ")", ":", "continue", "if", "(", "name", "==", "'continuity_id'", ")", ":", "continue", "value", "=", "getattr", "(", "obj", ",", "name", ")", "if", "(", "value", "is", "None", ")", ":", "result_dict", "[", "name", "]", "=", "value", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "result_dict", "[", "name", "]", "=", "value", "elif", "isinstance", "(", "value", ",", "int", ")", ":", "result_dict", "[", "name", "]", "=", "value", "elif", "isinstance", "(", "value", ",", "long", ")", ":", "result_dict", "[", "name", "]", "=", "value", "elif", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "result_dict", "[", "name", "]", "=", "value", ".", "isoformat", "(", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "result_dict", "[", "name", "]", "=", "value", "else", ":", "result_dict", "[", "name", "]", "=", "unicode", "(", "value", ")", "result_dict", ".", "update", "(", "kw", ")", "context", "[", "'metadata_modified'", "]", "=", "max", "(", "result_dict", ".", "get", "(", "'revision_timestamp'", ",", "''", ")", ",", "context", ".", "get", "(", "'metadata_modified'", ",", "''", ")", ")", "return", "result_dict" ]
get any model object and represent it as a dict .
train
false
12,983
def _numpy_tensor_product(*product): if (not np): raise ImportError answer = product[0] for item in product[1:]: answer = np.kron(answer, item) return answer
[ "def", "_numpy_tensor_product", "(", "*", "product", ")", ":", "if", "(", "not", "np", ")", ":", "raise", "ImportError", "answer", "=", "product", "[", "0", "]", "for", "item", "in", "product", "[", "1", ":", "]", ":", "answer", "=", "np", ".", "kron", "(", "answer", ",", "item", ")", "return", "answer" ]
numpy version of tensor product of multiple arguments .
train
false
12,985
def _createFilesDir(): if (not conf.rFile): return conf.filePath = (paths.SQLMAP_FILES_PATH % conf.hostname) if (not os.path.isdir(conf.filePath)): try: os.makedirs(conf.filePath, 493) except OSError as ex: tempDir = tempfile.mkdtemp(prefix='sqlmapfiles') warnMsg = 'unable to create files directory ' warnMsg += ("'%s' (%s). " % (conf.filePath, getUnicode(ex))) warnMsg += ("Using temporary directory '%s' instead" % tempDir) logger.warn(warnMsg) conf.filePath = tempDir
[ "def", "_createFilesDir", "(", ")", ":", "if", "(", "not", "conf", ".", "rFile", ")", ":", "return", "conf", ".", "filePath", "=", "(", "paths", ".", "SQLMAP_FILES_PATH", "%", "conf", ".", "hostname", ")", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "conf", ".", "filePath", ")", ")", ":", "try", ":", "os", ".", "makedirs", "(", "conf", ".", "filePath", ",", "493", ")", "except", "OSError", "as", "ex", ":", "tempDir", "=", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "'sqlmapfiles'", ")", "warnMsg", "=", "'unable to create files directory '", "warnMsg", "+=", "(", "\"'%s' (%s). \"", "%", "(", "conf", ".", "filePath", ",", "getUnicode", "(", "ex", ")", ")", ")", "warnMsg", "+=", "(", "\"Using temporary directory '%s' instead\"", "%", "tempDir", ")", "logger", ".", "warn", "(", "warnMsg", ")", "conf", ".", "filePath", "=", "tempDir" ]
create the file directory .
train
false
12,986
def libvlc_toggle_teletext(p_mi): f = (_Cfunctions.get('libvlc_toggle_teletext', None) or _Cfunction('libvlc_toggle_teletext', ((1,),), None, None, MediaPlayer)) return f(p_mi)
[ "def", "libvlc_toggle_teletext", "(", "p_mi", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_toggle_teletext'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_toggle_teletext'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaPlayer", ")", ")", "return", "f", "(", "p_mi", ")" ]
toggle teletext transparent status on video output .
train
false
12,987
def goodlines(path): return list((line for line in path.getContent().splitlines() if (line and (not line.strip().startswith('#')))))
[ "def", "goodlines", "(", "path", ")", ":", "return", "list", "(", "(", "line", "for", "line", "in", "path", ".", "getContent", "(", ")", ".", "splitlines", "(", ")", "if", "(", "line", "and", "(", "not", "line", ".", "strip", "(", ")", ".", "startswith", "(", "'#'", ")", ")", ")", ")", ")" ]
return a list of lines read from path excluding those that are blank or begin with # .
train
false
12,988
def get_differ(a, b, ignore_space=False, compat_version=DiffCompatVersion.DEFAULT): cls = None if (compat_version in DiffCompatVersion.MYERS_VERSIONS): from reviewboard.diffviewer.myersdiff import MyersDiffer cls = MyersDiffer elif (compat_version == DiffCompatVersion.SMDIFFER): from reviewboard.diffviewer.smdiff import SMDiffer cls = SMDiffer else: raise DiffCompatError((u'Invalid diff compatibility version (%s) passed to Differ' % compat_version)) return cls(a, b, ignore_space, compat_version=compat_version)
[ "def", "get_differ", "(", "a", ",", "b", ",", "ignore_space", "=", "False", ",", "compat_version", "=", "DiffCompatVersion", ".", "DEFAULT", ")", ":", "cls", "=", "None", "if", "(", "compat_version", "in", "DiffCompatVersion", ".", "MYERS_VERSIONS", ")", ":", "from", "reviewboard", ".", "diffviewer", ".", "myersdiff", "import", "MyersDiffer", "cls", "=", "MyersDiffer", "elif", "(", "compat_version", "==", "DiffCompatVersion", ".", "SMDIFFER", ")", ":", "from", "reviewboard", ".", "diffviewer", ".", "smdiff", "import", "SMDiffer", "cls", "=", "SMDiffer", "else", ":", "raise", "DiffCompatError", "(", "(", "u'Invalid diff compatibility version (%s) passed to Differ'", "%", "compat_version", ")", ")", "return", "cls", "(", "a", ",", "b", ",", "ignore_space", ",", "compat_version", "=", "compat_version", ")" ]
returns a differ for with the given settings .
train
false
12,989
def _fetch_option(cfg, ret_config, virtualname, attr_name): if isinstance(cfg, dict): c_cfg = cfg else: c_cfg = cfg('{0}'.format(virtualname), {}) default_cfg_key = '{0}.{1}'.format(virtualname, attr_name) if (not ret_config): if isinstance(cfg, dict): return c_cfg.get(attr_name, cfg.get(default_cfg_key)) else: return c_cfg.get(attr_name, cfg(default_cfg_key)) ret_cfg = cfg('{0}.{1}'.format(ret_config, virtualname), {}) override_default_cfg_key = '{0}.{1}.{2}'.format(ret_config, virtualname, attr_name) override_cfg_default = cfg(override_default_cfg_key) ret_override_cfg = ret_cfg.get(attr_name, override_cfg_default) if ret_override_cfg: return ret_override_cfg return c_cfg.get(attr_name, cfg(default_cfg_key))
[ "def", "_fetch_option", "(", "cfg", ",", "ret_config", ",", "virtualname", ",", "attr_name", ")", ":", "if", "isinstance", "(", "cfg", ",", "dict", ")", ":", "c_cfg", "=", "cfg", "else", ":", "c_cfg", "=", "cfg", "(", "'{0}'", ".", "format", "(", "virtualname", ")", ",", "{", "}", ")", "default_cfg_key", "=", "'{0}.{1}'", ".", "format", "(", "virtualname", ",", "attr_name", ")", "if", "(", "not", "ret_config", ")", ":", "if", "isinstance", "(", "cfg", ",", "dict", ")", ":", "return", "c_cfg", ".", "get", "(", "attr_name", ",", "cfg", ".", "get", "(", "default_cfg_key", ")", ")", "else", ":", "return", "c_cfg", ".", "get", "(", "attr_name", ",", "cfg", "(", "default_cfg_key", ")", ")", "ret_cfg", "=", "cfg", "(", "'{0}.{1}'", ".", "format", "(", "ret_config", ",", "virtualname", ")", ",", "{", "}", ")", "override_default_cfg_key", "=", "'{0}.{1}.{2}'", ".", "format", "(", "ret_config", ",", "virtualname", ",", "attr_name", ")", "override_cfg_default", "=", "cfg", "(", "override_default_cfg_key", ")", "ret_override_cfg", "=", "ret_cfg", ".", "get", "(", "attr_name", ",", "override_cfg_default", ")", "if", "ret_override_cfg", ":", "return", "ret_override_cfg", "return", "c_cfg", ".", "get", "(", "attr_name", ",", "cfg", "(", "default_cfg_key", ")", ")" ]
fetch a given option value from the config .
train
true
12,991
def _prepare_info(inverse_operator): info = deepcopy(inverse_operator['info']) info['sfreq'] = 1000.0 info['projs'] = inverse_operator['projs'] return info
[ "def", "_prepare_info", "(", "inverse_operator", ")", ":", "info", "=", "deepcopy", "(", "inverse_operator", "[", "'info'", "]", ")", "info", "[", "'sfreq'", "]", "=", "1000.0", "info", "[", "'projs'", "]", "=", "inverse_operator", "[", "'projs'", "]", "return", "info" ]
get a usable dict .
train
false
12,993
def on_revision_save(sender, instance, **kwargs): rev = instance year = rev.created.year creator = rev.creator if (not rev.is_approved): return if (rev.document.locale == settings.WIKI_DEFAULT_LANGUAGE): badge_template = WIKI_BADGES['kb-badge'] else: badge_template = WIKI_BADGES['l10n-badge'] from kitsune.wiki.tasks import maybe_award_badge maybe_award_badge.delay(badge_template, year, creator)
[ "def", "on_revision_save", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "rev", "=", "instance", "year", "=", "rev", ".", "created", ".", "year", "creator", "=", "rev", ".", "creator", "if", "(", "not", "rev", ".", "is_approved", ")", ":", "return", "if", "(", "rev", ".", "document", ".", "locale", "==", "settings", ".", "WIKI_DEFAULT_LANGUAGE", ")", ":", "badge_template", "=", "WIKI_BADGES", "[", "'kb-badge'", "]", "else", ":", "badge_template", "=", "WIKI_BADGES", "[", "'l10n-badge'", "]", "from", "kitsune", ".", "wiki", ".", "tasks", "import", "maybe_award_badge", "maybe_award_badge", ".", "delay", "(", "badge_template", ",", "year", ",", "creator", ")" ]
handle the revision save signal .
train
false
12,994
def cwt(data, wavelet, widths): output = np.zeros([len(widths), len(data)]) for (ind, width) in enumerate(widths): wavelet_data = wavelet(min((10 * width), len(data)), width) output[ind, :] = convolve(data, wavelet_data, mode='same') return output
[ "def", "cwt", "(", "data", ",", "wavelet", ",", "widths", ")", ":", "output", "=", "np", ".", "zeros", "(", "[", "len", "(", "widths", ")", ",", "len", "(", "data", ")", "]", ")", "for", "(", "ind", ",", "width", ")", "in", "enumerate", "(", "widths", ")", ":", "wavelet_data", "=", "wavelet", "(", "min", "(", "(", "10", "*", "width", ")", ",", "len", "(", "data", ")", ")", ",", "width", ")", "output", "[", "ind", ",", ":", "]", "=", "convolve", "(", "data", ",", "wavelet_data", ",", "mode", "=", "'same'", ")", "return", "output" ]
compute time freq decomposition with continuous wavelet transform .
train
false
12,996
def test_outer_scope(): def outer_scope_test(): class Referenced: pass class C: if Referenced: pass Assert(('Referenced' not in C.__dict__.keys())) outer_scope_test() for x in [None, 'abc', 3]: class foo(object, ): pass a = foo() try: a.__dict__ = x AssertUnreachable() except TypeError: pass
[ "def", "test_outer_scope", "(", ")", ":", "def", "outer_scope_test", "(", ")", ":", "class", "Referenced", ":", "pass", "class", "C", ":", "if", "Referenced", ":", "pass", "Assert", "(", "(", "'Referenced'", "not", "in", "C", ".", "__dict__", ".", "keys", "(", ")", ")", ")", "outer_scope_test", "(", ")", "for", "x", "in", "[", "None", ",", "'abc'", ",", "3", "]", ":", "class", "foo", "(", "object", ",", ")", ":", "pass", "a", "=", "foo", "(", ")", "try", ":", "a", ".", "__dict__", "=", "x", "AssertUnreachable", "(", ")", "except", "TypeError", ":", "pass" ]
do not automatically include outer scopes in closure scenarios .
train
false
12,997
@commands(u'agreed') @example(u'.agreed Bowties are cool') def meetingagreed(bot, trigger): if (not ismeetingrunning(trigger.sender)): bot.say(u"Can't do that, start meeting first") return if (not trigger.group(2)): bot.say(u'try .action someone will do something') return if (not ischair(trigger.nick, trigger.sender)): bot.say(u'Only meeting head or chairs can do that') return logplain((u'AGREED: ' + trigger.group(2)), trigger.sender) logHTML_listitem((u'<span style="font-weight: bold">Agreed: </span>' + trigger.group(2)), trigger.sender) bot.say((u'\x02AGREED\x0f: ' + trigger.group(2)))
[ "@", "commands", "(", "u'agreed'", ")", "@", "example", "(", "u'.agreed Bowties are cool'", ")", "def", "meetingagreed", "(", "bot", ",", "trigger", ")", ":", "if", "(", "not", "ismeetingrunning", "(", "trigger", ".", "sender", ")", ")", ":", "bot", ".", "say", "(", "u\"Can't do that, start meeting first\"", ")", "return", "if", "(", "not", "trigger", ".", "group", "(", "2", ")", ")", ":", "bot", ".", "say", "(", "u'try .action someone will do something'", ")", "return", "if", "(", "not", "ischair", "(", "trigger", ".", "nick", ",", "trigger", ".", "sender", ")", ")", ":", "bot", ".", "say", "(", "u'Only meeting head or chairs can do that'", ")", "return", "logplain", "(", "(", "u'AGREED: '", "+", "trigger", ".", "group", "(", "2", ")", ")", ",", "trigger", ".", "sender", ")", "logHTML_listitem", "(", "(", "u'<span style=\"font-weight: bold\">Agreed: </span>'", "+", "trigger", ".", "group", "(", "2", ")", ")", ",", "trigger", ".", "sender", ")", "bot", ".", "say", "(", "(", "u'\\x02AGREED\\x0f: '", "+", "trigger", ".", "group", "(", "2", ")", ")", ")" ]
log an agreement in the meeting log .
train
false
12,998
def besselFilter(data, cutoff, order=1, dt=None, btype='low', bidir=True): try: import scipy.signal except ImportError: raise Exception('besselFilter() requires the package scipy.signal.') if (dt is None): try: tvals = data.xvals('Time') dt = ((tvals[(-1)] - tvals[0]) / (len(tvals) - 1)) except: dt = 1.0 (b, a) = scipy.signal.bessel(order, (cutoff * dt), btype=btype) return applyFilter(data, b, a, bidir=bidir)
[ "def", "besselFilter", "(", "data", ",", "cutoff", ",", "order", "=", "1", ",", "dt", "=", "None", ",", "btype", "=", "'low'", ",", "bidir", "=", "True", ")", ":", "try", ":", "import", "scipy", ".", "signal", "except", "ImportError", ":", "raise", "Exception", "(", "'besselFilter() requires the package scipy.signal.'", ")", "if", "(", "dt", "is", "None", ")", ":", "try", ":", "tvals", "=", "data", ".", "xvals", "(", "'Time'", ")", "dt", "=", "(", "(", "tvals", "[", "(", "-", "1", ")", "]", "-", "tvals", "[", "0", "]", ")", "/", "(", "len", "(", "tvals", ")", "-", "1", ")", ")", "except", ":", "dt", "=", "1.0", "(", "b", ",", "a", ")", "=", "scipy", ".", "signal", ".", "bessel", "(", "order", ",", "(", "cutoff", "*", "dt", ")", ",", "btype", "=", "btype", ")", "return", "applyFilter", "(", "data", ",", "b", ",", "a", ",", "bidir", "=", "bidir", ")" ]
return data passed through bessel filter .
train
false
12,999
def ePutSS(Handle, pIOType, pChannel, Value, x1): if (os.name == 'nt'): staticLib = ctypes.windll.LoadLibrary('labjackud') pv = ctypes.c_double(Value) ec = staticLib.ePutSS(Handle, pIOType, pChannel, pv, x1) if (ec != 0): raise LabJackException(ec) else: raise LabJackException(0, 'Function only supported for Windows')
[ "def", "ePutSS", "(", "Handle", ",", "pIOType", ",", "pChannel", ",", "Value", ",", "x1", ")", ":", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "staticLib", "=", "ctypes", ".", "windll", ".", "LoadLibrary", "(", "'labjackud'", ")", "pv", "=", "ctypes", ".", "c_double", "(", "Value", ")", "ec", "=", "staticLib", ".", "ePutSS", "(", "Handle", ",", "pIOType", ",", "pChannel", ",", "pv", ",", "x1", ")", "if", "(", "ec", "!=", "0", ")", ":", "raise", "LabJackException", "(", "ec", ")", "else", ":", "raise", "LabJackException", "(", "0", ",", "'Function only supported for Windows'", ")" ]
put one value to the labjack device eput is equivilent to an addrequest followed by a goone .
train
false
13,003
def parse_key_id(id): return parse_object_id('keys', id)
[ "def", "parse_key_id", "(", "id", ")", ":", "return", "parse_object_id", "(", "'keys'", ",", "id", ")" ]
validate the key_id and break it into segments :arg key_id: the key_id as supplied by the user .
train
false
13,004
def uniqueify(items): return list(set(items))
[ "def", "uniqueify", "(", "items", ")", ":", "return", "list", "(", "set", "(", "items", ")", ")" ]
return a list of the unique items in the given iterable .
train
false
13,005
def test_prompt_should_ask_and_keep_repo_dir(mocker, tmpdir): mock_read_user = mocker.patch('cookiecutter.vcs.read_user_yes_no', return_value=False, autospec=True) repo_dir = tmpdir.mkdir('repo') with pytest.raises(SystemExit): vcs.prompt_and_delete_repo(str(repo_dir)) assert mock_read_user.called assert repo_dir.exists()
[ "def", "test_prompt_should_ask_and_keep_repo_dir", "(", "mocker", ",", "tmpdir", ")", ":", "mock_read_user", "=", "mocker", ".", "patch", "(", "'cookiecutter.vcs.read_user_yes_no'", ",", "return_value", "=", "False", ",", "autospec", "=", "True", ")", "repo_dir", "=", "tmpdir", ".", "mkdir", "(", "'repo'", ")", "with", "pytest", ".", "raises", "(", "SystemExit", ")", ":", "vcs", ".", "prompt_and_delete_repo", "(", "str", "(", "repo_dir", ")", ")", "assert", "mock_read_user", ".", "called", "assert", "repo_dir", ".", "exists", "(", ")" ]
in prompt_and_delete_repo() .
train
false
13,006
@snippet def dataset_patch(client, to_delete): DATASET_NAME = ('dataset_patch_%d' % (_millis(),)) dataset = client.dataset(DATASET_NAME) dataset.description = ORIGINAL_DESCRIPTION dataset.create() to_delete.append(dataset) ONE_DAY_MS = (((24 * 60) * 60) * 1000) assert (dataset.description == ORIGINAL_DESCRIPTION) dataset.patch(description=PATCHED_DESCRIPTION, default_table_expiration_ms=ONE_DAY_MS) assert (dataset.description == PATCHED_DESCRIPTION) assert (dataset.default_table_expiration_ms == ONE_DAY_MS)
[ "@", "snippet", "def", "dataset_patch", "(", "client", ",", "to_delete", ")", ":", "DATASET_NAME", "=", "(", "'dataset_patch_%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "dataset", "=", "client", ".", "dataset", "(", "DATASET_NAME", ")", "dataset", ".", "description", "=", "ORIGINAL_DESCRIPTION", "dataset", ".", "create", "(", ")", "to_delete", ".", "append", "(", "dataset", ")", "ONE_DAY_MS", "=", "(", "(", "(", "24", "*", "60", ")", "*", "60", ")", "*", "1000", ")", "assert", "(", "dataset", ".", "description", "==", "ORIGINAL_DESCRIPTION", ")", "dataset", ".", "patch", "(", "description", "=", "PATCHED_DESCRIPTION", ",", "default_table_expiration_ms", "=", "ONE_DAY_MS", ")", "assert", "(", "dataset", ".", "description", "==", "PATCHED_DESCRIPTION", ")", "assert", "(", "dataset", ".", "default_table_expiration_ms", "==", "ONE_DAY_MS", ")" ]
patch a datasets metadata .
train
false
13,009
def get_host_iqn(session, cluster=None): host_mor = vm_util.get_host_ref(session, cluster) hbas_ret = session._call_method(vim_util, 'get_dynamic_property', host_mor, 'HostSystem', 'config.storageDevice.hostBusAdapter') if (hbas_ret is None): return host_hbas = hbas_ret.HostHostBusAdapter if (not host_hbas): return for hba in host_hbas: if (hba.__class__.__name__ == 'HostInternetScsiHba'): return hba.iScsiName
[ "def", "get_host_iqn", "(", "session", ",", "cluster", "=", "None", ")", ":", "host_mor", "=", "vm_util", ".", "get_host_ref", "(", "session", ",", "cluster", ")", "hbas_ret", "=", "session", ".", "_call_method", "(", "vim_util", ",", "'get_dynamic_property'", ",", "host_mor", ",", "'HostSystem'", ",", "'config.storageDevice.hostBusAdapter'", ")", "if", "(", "hbas_ret", "is", "None", ")", ":", "return", "host_hbas", "=", "hbas_ret", ".", "HostHostBusAdapter", "if", "(", "not", "host_hbas", ")", ":", "return", "for", "hba", "in", "host_hbas", ":", "if", "(", "hba", ".", "__class__", ".", "__name__", "==", "'HostInternetScsiHba'", ")", ":", "return", "hba", ".", "iScsiName" ]
return the host iscsi iqn .
train
false
13,012
def model_from_protobuf(pb, _entity_class=datastore.Entity): entity = _entity_class.FromPb(pb, default_kind=Expando.kind()) return class_for_kind(entity.kind()).from_entity(entity)
[ "def", "model_from_protobuf", "(", "pb", ",", "_entity_class", "=", "datastore", ".", "Entity", ")", ":", "entity", "=", "_entity_class", ".", "FromPb", "(", "pb", ",", "default_kind", "=", "Expando", ".", "kind", "(", ")", ")", "return", "class_for_kind", "(", "entity", ".", "kind", "(", ")", ")", ".", "from_entity", "(", "entity", ")" ]
decodes a model instance from a protocol buffer .
train
false
13,013
def drop_block_ss_columns(store): for c in ['created', 'destroyed']: try: store.sql(('ALTER TABLE block DROP COLUMN block_ss_' + c)) except Exception: store.rollback()
[ "def", "drop_block_ss_columns", "(", "store", ")", ":", "for", "c", "in", "[", "'created'", ",", "'destroyed'", "]", ":", "try", ":", "store", ".", "sql", "(", "(", "'ALTER TABLE block DROP COLUMN block_ss_'", "+", "c", ")", ")", "except", "Exception", ":", "store", ".", "rollback", "(", ")" ]
drop columns that may have been added in error .
train
false
13,014
def find_network_with_bridge(session, bridge): expr = ('field "name__label" = "%s" or field "bridge" = "%s"' % (bridge, bridge)) networks = session.call_xenapi('network.get_all_records_where', expr) if (len(networks) == 1): return networks.keys()[0] elif (len(networks) > 1): raise Exception((_('Found non-unique network for bridge %s') % bridge)) else: raise Exception((_('Found no network for bridge %s') % bridge))
[ "def", "find_network_with_bridge", "(", "session", ",", "bridge", ")", ":", "expr", "=", "(", "'field \"name__label\" = \"%s\" or field \"bridge\" = \"%s\"'", "%", "(", "bridge", ",", "bridge", ")", ")", "networks", "=", "session", ".", "call_xenapi", "(", "'network.get_all_records_where'", ",", "expr", ")", "if", "(", "len", "(", "networks", ")", "==", "1", ")", ":", "return", "networks", ".", "keys", "(", ")", "[", "0", "]", "elif", "(", "len", "(", "networks", ")", ">", "1", ")", ":", "raise", "Exception", "(", "(", "_", "(", "'Found non-unique network for bridge %s'", ")", "%", "bridge", ")", ")", "else", ":", "raise", "Exception", "(", "(", "_", "(", "'Found no network for bridge %s'", ")", "%", "bridge", ")", ")" ]
return the network on which the bridge is attached .
train
false
13,017
@cache_permission def can_upload_screenshot(user, project): return check_permission(user, project, 'trans.upload_screenshot')
[ "@", "cache_permission", "def", "can_upload_screenshot", "(", "user", ",", "project", ")", ":", "return", "check_permission", "(", "user", ",", "project", ",", "'trans.upload_screenshot'", ")" ]
checks whether user can upload screenshot for given project .
train
false
13,018
def _make_guesses(surf_or_rad, r0, grid, exclude, mindist, n_jobs): if isinstance(surf_or_rad, dict): surf = surf_or_rad logger.info(('Guess surface (%s) is in %s coordinates' % (_bem_explain_surface(surf['id']), _coord_frame_name(surf['coord_frame'])))) else: radius = surf_or_rad[0] logger.info(('Making a spherical guess space with radius %7.1f mm...' % (1000 * radius))) surf = _get_ico_surface(3) _normalize_vectors(surf['rr']) surf['rr'] *= radius surf['rr'] += r0 logger.info(('Filtering (grid = %6.f mm)...' % (1000 * grid))) src = _make_volume_source_space(surf, grid, exclude, (1000 * mindist), do_neighbors=False, n_jobs=n_jobs) src = dict(rr=src['rr'][src['vertno']], nn=src['nn'][src['vertno']], nuse=src['nuse'], coord_frame=src['coord_frame'], vertno=np.arange(src['nuse'])) return SourceSpaces([src])
[ "def", "_make_guesses", "(", "surf_or_rad", ",", "r0", ",", "grid", ",", "exclude", ",", "mindist", ",", "n_jobs", ")", ":", "if", "isinstance", "(", "surf_or_rad", ",", "dict", ")", ":", "surf", "=", "surf_or_rad", "logger", ".", "info", "(", "(", "'Guess surface (%s) is in %s coordinates'", "%", "(", "_bem_explain_surface", "(", "surf", "[", "'id'", "]", ")", ",", "_coord_frame_name", "(", "surf", "[", "'coord_frame'", "]", ")", ")", ")", ")", "else", ":", "radius", "=", "surf_or_rad", "[", "0", "]", "logger", ".", "info", "(", "(", "'Making a spherical guess space with radius %7.1f mm...'", "%", "(", "1000", "*", "radius", ")", ")", ")", "surf", "=", "_get_ico_surface", "(", "3", ")", "_normalize_vectors", "(", "surf", "[", "'rr'", "]", ")", "surf", "[", "'rr'", "]", "*=", "radius", "surf", "[", "'rr'", "]", "+=", "r0", "logger", ".", "info", "(", "(", "'Filtering (grid = %6.f mm)...'", "%", "(", "1000", "*", "grid", ")", ")", ")", "src", "=", "_make_volume_source_space", "(", "surf", ",", "grid", ",", "exclude", ",", "(", "1000", "*", "mindist", ")", ",", "do_neighbors", "=", "False", ",", "n_jobs", "=", "n_jobs", ")", "src", "=", "dict", "(", "rr", "=", "src", "[", "'rr'", "]", "[", "src", "[", "'vertno'", "]", "]", ",", "nn", "=", "src", "[", "'nn'", "]", "[", "src", "[", "'vertno'", "]", "]", ",", "nuse", "=", "src", "[", "'nuse'", "]", ",", "coord_frame", "=", "src", "[", "'coord_frame'", "]", ",", "vertno", "=", "np", ".", "arange", "(", "src", "[", "'nuse'", "]", ")", ")", "return", "SourceSpaces", "(", "[", "src", "]", ")" ]
make a guess space inside a sphere or bem surface .
train
false
13,019
def rc4(key, string): string.encode('utf8') key.encode('utf8') def convert_key(s): return [ord(c) for c in s] key = convert_key(key) keystream = RC4(key) r = '' for c in string: if (sys.version_info[0] == 3): r += bytes([(ord(c) ^ next(keystream))]) else: r += chr((ord(c) ^ next(keystream))) return base64.b64encode(r).replace('\n', '').decode('ascii')
[ "def", "rc4", "(", "key", ",", "string", ")", ":", "string", ".", "encode", "(", "'utf8'", ")", "key", ".", "encode", "(", "'utf8'", ")", "def", "convert_key", "(", "s", ")", ":", "return", "[", "ord", "(", "c", ")", "for", "c", "in", "s", "]", "key", "=", "convert_key", "(", "key", ")", "keystream", "=", "RC4", "(", "key", ")", "r", "=", "''", "for", "c", "in", "string", ":", "if", "(", "sys", ".", "version_info", "[", "0", "]", "==", "3", ")", ":", "r", "+=", "bytes", "(", "[", "(", "ord", "(", "c", ")", "^", "next", "(", "keystream", ")", ")", "]", ")", "else", ":", "r", "+=", "chr", "(", "(", "ord", "(", "c", ")", "^", "next", "(", "keystream", ")", ")", ")", "return", "base64", ".", "b64encode", "(", "r", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", ".", "decode", "(", "'ascii'", ")" ]
encrypt things .
train
false
13,020
def get_current_registry(context=None): return manager.get()['registry']
[ "def", "get_current_registry", "(", "context", "=", "None", ")", ":", "return", "manager", ".", "get", "(", ")", "[", "'registry'", "]" ]
return the currently active :term:application registry or the global application registry if no request is currently active .
train
false
13,021
def bind_af_aware(sock, addr): if (HAS_UNIX_SOCKETS and (sock.family == socket.AF_UNIX)): unlink(addr) sock.bind(addr)
[ "def", "bind_af_aware", "(", "sock", ",", "addr", ")", ":", "if", "(", "HAS_UNIX_SOCKETS", "and", "(", "sock", ".", "family", "==", "socket", ".", "AF_UNIX", ")", ")", ":", "unlink", "(", "addr", ")", "sock", ".", "bind", "(", "addr", ")" ]
helper function to bind a socket according to its family .
train
false
13,022
def normalize_html(input): return kuma.wiki.content.parse(unicode(input)).filter(WhitespaceRemovalFilter).serialize(alphabetical_attributes=True)
[ "def", "normalize_html", "(", "input", ")", ":", "return", "kuma", ".", "wiki", ".", "content", ".", "parse", "(", "unicode", "(", "input", ")", ")", ".", "filter", "(", "WhitespaceRemovalFilter", ")", ".", "serialize", "(", "alphabetical_attributes", "=", "True", ")" ]
normalize html5 input .
train
false
13,023
def get_matching_files(all_fastq, all_mapping, read_indicator, barcode_indicator, mapping_indicator): read_files = [] barcode_files = [] mapping_files = {} matching_files = {} for curr_file in all_mapping: try: curr_mapping = curr_file.split(mapping_indicator) mapping_files[(curr_mapping[0] + splitext(curr_mapping[1])[0])] = curr_file except IndexError: raise IndexError(('Found file with a mapping file extension that does not contain the mapping file indicators (see mapping_indicator): %s' % curr_file)) for curr_file in all_fastq: curr_file_string_read = curr_file.split(read_indicator) curr_file_string_bc = curr_file.split(barcode_indicator) if (len(curr_file_string_read) == 2): read_files.append(curr_file_string_read) elif (len(curr_file_string_bc) == 2): barcode_files.append(curr_file_string_bc) else: raise ValueError(((('Invalid filename found for splitting on input ' + ('for file %s, ' % curr_file)) + 'check input read indicator ') + 'and barcode indicator parameters.')) for curr_read in read_files: for curr_bc in barcode_files: if (curr_read == curr_bc): curr_read_sans_ext = (curr_read[0] + curr_read[1].split('.f')[0]) try: matching_files[read_indicator.join(curr_read)] = (barcode_indicator.join(curr_bc), mapping_files[curr_read_sans_ext]) except KeyError: raise KeyError(('Found read file with no matching mapping file: %s' % read_indicator.join(curr_read))) return matching_files
[ "def", "get_matching_files", "(", "all_fastq", ",", "all_mapping", ",", "read_indicator", ",", "barcode_indicator", ",", "mapping_indicator", ")", ":", "read_files", "=", "[", "]", "barcode_files", "=", "[", "]", "mapping_files", "=", "{", "}", "matching_files", "=", "{", "}", "for", "curr_file", "in", "all_mapping", ":", "try", ":", "curr_mapping", "=", "curr_file", ".", "split", "(", "mapping_indicator", ")", "mapping_files", "[", "(", "curr_mapping", "[", "0", "]", "+", "splitext", "(", "curr_mapping", "[", "1", "]", ")", "[", "0", "]", ")", "]", "=", "curr_file", "except", "IndexError", ":", "raise", "IndexError", "(", "(", "'Found file with a mapping file extension that does not contain the mapping file indicators (see mapping_indicator): %s'", "%", "curr_file", ")", ")", "for", "curr_file", "in", "all_fastq", ":", "curr_file_string_read", "=", "curr_file", ".", "split", "(", "read_indicator", ")", "curr_file_string_bc", "=", "curr_file", ".", "split", "(", "barcode_indicator", ")", "if", "(", "len", "(", "curr_file_string_read", ")", "==", "2", ")", ":", "read_files", ".", "append", "(", "curr_file_string_read", ")", "elif", "(", "len", "(", "curr_file_string_bc", ")", "==", "2", ")", ":", "barcode_files", ".", "append", "(", "curr_file_string_bc", ")", "else", ":", "raise", "ValueError", "(", "(", "(", "(", "'Invalid filename found for splitting on input '", "+", "(", "'for file %s, '", "%", "curr_file", ")", ")", "+", "'check input read indicator '", ")", "+", "'and barcode indicator parameters.'", ")", ")", "for", "curr_read", "in", "read_files", ":", "for", "curr_bc", "in", "barcode_files", ":", "if", "(", "curr_read", "==", "curr_bc", ")", ":", "curr_read_sans_ext", "=", "(", "curr_read", "[", "0", "]", "+", "curr_read", "[", "1", "]", ".", "split", "(", "'.f'", ")", "[", "0", "]", ")", "try", ":", "matching_files", "[", "read_indicator", ".", "join", "(", "curr_read", ")", "]", "=", "(", "barcode_indicator", ".", "join", "(", "curr_bc", ")", ",", "mapping_files", "[", "curr_read_sans_ext", "]", ")", "except", "KeyError", ":", "raise", "KeyError", "(", "(", "'Found read file with no matching mapping file: %s'", "%", "read_indicator", ".", "join", "(", "curr_read", ")", ")", ")", "return", "matching_files" ]
get all file names in a directory .
train
false
13,024
def send_mail_jinja(subject, template, context, *args, **kwargs): autoescape_orig = env.autoescape env.autoescape = False template = env.get_template(template) msg = send_mail(subject, template.render(context), *args, **kwargs) env.autoescape = autoescape_orig return msg
[ "def", "send_mail_jinja", "(", "subject", ",", "template", ",", "context", ",", "*", "args", ",", "**", "kwargs", ")", ":", "autoescape_orig", "=", "env", ".", "autoescape", "env", ".", "autoescape", "=", "False", "template", "=", "env", ".", "get_template", "(", "template", ")", "msg", "=", "send_mail", "(", "subject", ",", "template", ".", "render", "(", "context", ")", ",", "*", "args", ",", "**", "kwargs", ")", "env", ".", "autoescape", "=", "autoescape_orig", "return", "msg" ]
sends mail using a jinja template with autoescaping turned off .
train
false
13,025
def dict_from_corpus(corpus): num_terms = (1 + get_max_id(corpus)) id2word = FakeDict(num_terms) return id2word
[ "def", "dict_from_corpus", "(", "corpus", ")", ":", "num_terms", "=", "(", "1", "+", "get_max_id", "(", "corpus", ")", ")", "id2word", "=", "FakeDict", "(", "num_terms", ")", "return", "id2word" ]
scan corpus for all word ids that appear in it .
train
false
13,026
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
13,028
def file_data(value): def wrapper(func): setattr(func, FILE_ATTR, value) return func return wrapper
[ "def", "file_data", "(", "value", ")", ":", "def", "wrapper", "(", "func", ")", ":", "setattr", "(", "func", ",", "FILE_ATTR", ",", "value", ")", "return", "func", "return", "wrapper" ]
get the data for name .
train
false
13,030
def create_xml_path(path): try: with salt.utils.fopen(path, 'r') as fp_: return create_xml_str(fp_.read()) except (OSError, IOError): return False
[ "def", "create_xml_path", "(", "path", ")", ":", "try", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "path", ",", "'r'", ")", "as", "fp_", ":", "return", "create_xml_str", "(", "fp_", ".", "read", "(", ")", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "False" ]
start a domain based on the xml-file path passed to the function cli example: .
train
false
13,031
def is_super_call(expr): return (isinstance(expr, astroid.CallFunc) and isinstance(expr.func, astroid.Name) and (expr.func.name == 'super'))
[ "def", "is_super_call", "(", "expr", ")", ":", "return", "(", "isinstance", "(", "expr", ",", "astroid", ".", "CallFunc", ")", "and", "isinstance", "(", "expr", ".", "func", ",", "astroid", ".", "Name", ")", "and", "(", "expr", ".", "func", ".", "name", "==", "'super'", ")", ")" ]
return true if expression node is a function call and if function name is super .
train
false
13,032
def get_subnet_association(subnets, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) subnets = conn.get_all_subnets(subnet_ids=subnets) except BotoServerError as e: return {'error': salt.utils.boto.get_error(e)} vpc_ids = set() for subnet in subnets: log.debug('examining subnet id: {0} for vpc_id'.format(subnet.id)) if (subnet in subnets): log.debug('subnet id: {0} is associated with vpc id: {1}'.format(subnet.id, subnet.vpc_id)) vpc_ids.add(subnet.vpc_id) if (not vpc_ids): return {'vpc_id': None} elif (len(vpc_ids) == 1): return {'vpc_id': vpc_ids.pop()} else: return {'vpc_ids': list(vpc_ids)}
[ "def", "get_subnet_association", "(", "subnets", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "subnets", "=", "conn", ".", "get_all_subnets", "(", "subnet_ids", "=", "subnets", ")", "except", "BotoServerError", "as", "e", ":", "return", "{", "'error'", ":", "salt", ".", "utils", ".", "boto", ".", "get_error", "(", "e", ")", "}", "vpc_ids", "=", "set", "(", ")", "for", "subnet", "in", "subnets", ":", "log", ".", "debug", "(", "'examining subnet id: {0} for vpc_id'", ".", "format", "(", "subnet", ".", "id", ")", ")", "if", "(", "subnet", "in", "subnets", ")", ":", "log", ".", "debug", "(", "'subnet id: {0} is associated with vpc id: {1}'", ".", "format", "(", "subnet", ".", "id", ",", "subnet", ".", "vpc_id", ")", ")", "vpc_ids", ".", "add", "(", "subnet", ".", "vpc_id", ")", "if", "(", "not", "vpc_ids", ")", ":", "return", "{", "'vpc_id'", ":", "None", "}", "elif", "(", "len", "(", "vpc_ids", ")", "==", "1", ")", ":", "return", "{", "'vpc_id'", ":", "vpc_ids", ".", "pop", "(", ")", "}", "else", ":", "return", "{", "'vpc_ids'", ":", "list", "(", "vpc_ids", ")", "}" ]
given a subnet or list of subnets .
train
false
13,033
@pytest.fixture def member2(): from django.contrib.auth import get_user_model return get_user_model().objects.get(username='member2')
[ "@", "pytest", ".", "fixture", "def", "member2", "(", ")", ":", "from", "django", ".", "contrib", ".", "auth", "import", "get_user_model", "return", "get_user_model", "(", ")", ".", "objects", ".", "get", "(", "username", "=", "'member2'", ")" ]
require a member2 user .
train
false
13,034
def get_topic_similarity(topic_1, topic_2): if ((topic_1 in RECOMMENDATION_CATEGORIES) and (topic_2 in RECOMMENDATION_CATEGORIES)): topic_similarities = get_topic_similarities_dict() return topic_similarities[topic_1][topic_2] elif (topic_1 == topic_2): return feconf.SAME_TOPIC_SIMILARITY else: return feconf.DEFAULT_TOPIC_SIMILARITY
[ "def", "get_topic_similarity", "(", "topic_1", ",", "topic_2", ")", ":", "if", "(", "(", "topic_1", "in", "RECOMMENDATION_CATEGORIES", ")", "and", "(", "topic_2", "in", "RECOMMENDATION_CATEGORIES", ")", ")", ":", "topic_similarities", "=", "get_topic_similarities_dict", "(", ")", "return", "topic_similarities", "[", "topic_1", "]", "[", "topic_2", "]", "elif", "(", "topic_1", "==", "topic_2", ")", ":", "return", "feconf", ".", "SAME_TOPIC_SIMILARITY", "else", ":", "return", "feconf", ".", "DEFAULT_TOPIC_SIMILARITY" ]
gets the similarity between two topics .
train
false
13,035
def twoline(file_name, title, line1, line1_label, line2, line2_label, x_labels): line_chart = pygal.Line(include_x_axis=True) line_chart.title = title line_chart.x_labels = x_labels line_chart.add(line1_label, line1) line_chart.add(line2_label, line2) line_chart.render_to_file(file_name) return True
[ "def", "twoline", "(", "file_name", ",", "title", ",", "line1", ",", "line1_label", ",", "line2", ",", "line2_label", ",", "x_labels", ")", ":", "line_chart", "=", "pygal", ".", "Line", "(", "include_x_axis", "=", "True", ")", "line_chart", ".", "title", "=", "title", "line_chart", ".", "x_labels", "=", "x_labels", "line_chart", ".", "add", "(", "line1_label", ",", "line1", ")", "line_chart", ".", "add", "(", "line2_label", ",", "line2", ")", "line_chart", ".", "render_to_file", "(", "file_name", ")", "return", "True" ]
line1 is a list of data points line2 is a list of data points x_labels are labels that correspond to the data points in line1 and line2 example call: line_graph .
train
false
13,036
def get_hex_color(layer_type): COLORS = ['#4A88B3', '#98C1DE', '#6CA2C8', '#3173A2', '#17649B', '#FFBB60', '#FFDAA9', '#FFC981', '#FCAC41', '#F29416', '#C54AAA', '#E698D4', '#D56CBE', '#B72F99', '#B0108D', '#75DF54', '#B3F1A0', '#91E875', '#5DD637', '#3FCD12'] hashed = (int(hash(layer_type)) % 5) if ('conv' in layer_type.lower()): return COLORS[:5][hashed] if (layer_type in lasagne.layers.pool.__all__): return COLORS[5:10][hashed] if (layer_type in lasagne.layers.recurrent.__all__): return COLORS[10:15][hashed] else: return COLORS[15:20][hashed]
[ "def", "get_hex_color", "(", "layer_type", ")", ":", "COLORS", "=", "[", "'#4A88B3'", ",", "'#98C1DE'", ",", "'#6CA2C8'", ",", "'#3173A2'", ",", "'#17649B'", ",", "'#FFBB60'", ",", "'#FFDAA9'", ",", "'#FFC981'", ",", "'#FCAC41'", ",", "'#F29416'", ",", "'#C54AAA'", ",", "'#E698D4'", ",", "'#D56CBE'", ",", "'#B72F99'", ",", "'#B0108D'", ",", "'#75DF54'", ",", "'#B3F1A0'", ",", "'#91E875'", ",", "'#5DD637'", ",", "'#3FCD12'", "]", "hashed", "=", "(", "int", "(", "hash", "(", "layer_type", ")", ")", "%", "5", ")", "if", "(", "'conv'", "in", "layer_type", ".", "lower", "(", ")", ")", ":", "return", "COLORS", "[", ":", "5", "]", "[", "hashed", "]", "if", "(", "layer_type", "in", "lasagne", ".", "layers", ".", "pool", ".", "__all__", ")", ":", "return", "COLORS", "[", "5", ":", "10", "]", "[", "hashed", "]", "if", "(", "layer_type", "in", "lasagne", ".", "layers", ".", "recurrent", ".", "__all__", ")", ":", "return", "COLORS", "[", "10", ":", "15", "]", "[", "hashed", "]", "else", ":", "return", "COLORS", "[", "15", ":", "20", "]", "[", "hashed", "]" ]
determines the hex color for a layer .
train
true
13,037
def hidden_param(parser, xml_parent, data): base_param(parser, xml_parent, data, True, 'com.wangyin.parameter.WHideParameterDefinition')
[ "def", "hidden_param", "(", "parser", ",", "xml_parent", ",", "data", ")", ":", "base_param", "(", "parser", ",", "xml_parent", ",", "data", ",", "True", ",", "'com.wangyin.parameter.WHideParameterDefinition'", ")" ]
yaml: hidden allows you to use parameters hidden from the build with parameter page .
train
false
13,038
def post_annotation(annotation, api_key): base_url = 'https://api.circonus.com/v2' anootate_post_endpoint = '/annotation' resp = requests.post((base_url + anootate_post_endpoint), headers=build_headers(api_key), data=json.dumps(annotation)) resp.raise_for_status() return resp
[ "def", "post_annotation", "(", "annotation", ",", "api_key", ")", ":", "base_url", "=", "'https://api.circonus.com/v2'", "anootate_post_endpoint", "=", "'/annotation'", "resp", "=", "requests", ".", "post", "(", "(", "base_url", "+", "anootate_post_endpoint", ")", ",", "headers", "=", "build_headers", "(", "api_key", ")", ",", "data", "=", "json", ".", "dumps", "(", "annotation", ")", ")", "resp", ".", "raise_for_status", "(", ")", "return", "resp" ]
takes annotation dict and api_key string .
train
false
13,039
def unload_module(modname): for (m, pm) in mpstate.modules: if (m.name == modname): if hasattr(m, 'unload'): m.unload() mpstate.modules.remove((m, pm)) print ('Unloaded module %s' % modname) return True print ('Unable to find module %s' % modname) return False
[ "def", "unload_module", "(", "modname", ")", ":", "for", "(", "m", ",", "pm", ")", "in", "mpstate", ".", "modules", ":", "if", "(", "m", ".", "name", "==", "modname", ")", ":", "if", "hasattr", "(", "m", ",", "'unload'", ")", ":", "m", ".", "unload", "(", ")", "mpstate", ".", "modules", ".", "remove", "(", "(", "m", ",", "pm", ")", ")", "print", "(", "'Unloaded module %s'", "%", "modname", ")", "return", "True", "print", "(", "'Unable to find module %s'", "%", "modname", ")", "return", "False" ]
removes a module .
train
true
13,041
def re_validate_invoice(obj_invoice): if obj_invoice.is_valid: return HttpResponseBadRequest(_('This invoice is already active.')) obj_invoice.is_valid = True obj_invoice.save() message = _('The registration codes for invoice {0} have been re-activated.').format(obj_invoice.id) return JsonResponse({'message': message})
[ "def", "re_validate_invoice", "(", "obj_invoice", ")", ":", "if", "obj_invoice", ".", "is_valid", ":", "return", "HttpResponseBadRequest", "(", "_", "(", "'This invoice is already active.'", ")", ")", "obj_invoice", ".", "is_valid", "=", "True", "obj_invoice", ".", "save", "(", ")", "message", "=", "_", "(", "'The registration codes for invoice {0} have been re-activated.'", ")", ".", "format", "(", "obj_invoice", ".", "id", ")", "return", "JsonResponse", "(", "{", "'message'", ":", "message", "}", ")" ]
this method re-validate the sale against the invoice number .
train
false
13,042
@cronjobs.register def email_daily_ratings(): dt = (datetime.datetime.today() - datetime.timedelta(1)) yesterday = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0) today = (yesterday + datetime.timedelta(1)) pretty_date = ('%04d-%02d-%02d' % (dt.year, dt.month, dt.day)) yesterday_reviews = Review.objects.filter(created__gte=yesterday, created__lt=today) apps = set((review.addon for review in yesterday_reviews)) for app in apps: author_emails = app.authors.values_list('email', flat=True) subject = ('Firefox Marketplace reviews for %s on %s' % (app.name, pretty_date)) context = {'reviews': yesterday_reviews.filter(addon=app).order_by('-created'), 'base_url': settings.SITE_URL, 'pretty_date': pretty_date} send_mail_jinja(subject, 'ratings/emails/daily_digest.html', context, recipient_list=author_emails, perm_setting='app_new_review', async=True)
[ "@", "cronjobs", ".", "register", "def", "email_daily_ratings", "(", ")", ":", "dt", "=", "(", "datetime", ".", "datetime", ".", "today", "(", ")", "-", "datetime", ".", "timedelta", "(", "1", ")", ")", "yesterday", "=", "datetime", ".", "datetime", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ",", "0", ",", "0", ",", "0", ")", "today", "=", "(", "yesterday", "+", "datetime", ".", "timedelta", "(", "1", ")", ")", "pretty_date", "=", "(", "'%04d-%02d-%02d'", "%", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ")", ")", "yesterday_reviews", "=", "Review", ".", "objects", ".", "filter", "(", "created__gte", "=", "yesterday", ",", "created__lt", "=", "today", ")", "apps", "=", "set", "(", "(", "review", ".", "addon", "for", "review", "in", "yesterday_reviews", ")", ")", "for", "app", "in", "apps", ":", "author_emails", "=", "app", ".", "authors", ".", "values_list", "(", "'email'", ",", "flat", "=", "True", ")", "subject", "=", "(", "'Firefox Marketplace reviews for %s on %s'", "%", "(", "app", ".", "name", ",", "pretty_date", ")", ")", "context", "=", "{", "'reviews'", ":", "yesterday_reviews", ".", "filter", "(", "addon", "=", "app", ")", ".", "order_by", "(", "'-created'", ")", ",", "'base_url'", ":", "settings", ".", "SITE_URL", ",", "'pretty_date'", ":", "pretty_date", "}", "send_mail_jinja", "(", "subject", ",", "'ratings/emails/daily_digest.html'", ",", "context", ",", "recipient_list", "=", "author_emails", ",", "perm_setting", "=", "'app_new_review'", ",", "async", "=", "True", ")" ]
does email for yesterdays ratings .
train
false
13,045
def _build_subs_mat(obs_freq_mat, exp_freq_mat): if (obs_freq_mat.ab_list != exp_freq_mat.ab_list): raise ValueError('Alphabet mismatch in passed matrices') subs_mat = SubstitutionMatrix(obs_freq_mat) for i in obs_freq_mat: subs_mat[i] = (obs_freq_mat[i] / exp_freq_mat[i]) return subs_mat
[ "def", "_build_subs_mat", "(", "obs_freq_mat", ",", "exp_freq_mat", ")", ":", "if", "(", "obs_freq_mat", ".", "ab_list", "!=", "exp_freq_mat", ".", "ab_list", ")", ":", "raise", "ValueError", "(", "'Alphabet mismatch in passed matrices'", ")", "subs_mat", "=", "SubstitutionMatrix", "(", "obs_freq_mat", ")", "for", "i", "in", "obs_freq_mat", ":", "subs_mat", "[", "i", "]", "=", "(", "obs_freq_mat", "[", "i", "]", "/", "exp_freq_mat", "[", "i", "]", ")", "return", "subs_mat" ]
build the substitution matrix .
train
false
13,046
@contextlib.contextmanager def hide_attrs(obj, *attrs): obj_vals = (obj.__dict__ if (obj is not None) else {}) saved_vals = {} for name in attrs: saved_vals[name] = obj_vals.pop(name, _marker) try: (yield) finally: for name in attrs: saved_val = saved_vals[name] if (saved_val is not _marker): obj_vals[name] = saved_val elif (name in obj_vals): del obj_vals[name]
[ "@", "contextlib", ".", "contextmanager", "def", "hide_attrs", "(", "obj", ",", "*", "attrs", ")", ":", "obj_vals", "=", "(", "obj", ".", "__dict__", "if", "(", "obj", "is", "not", "None", ")", "else", "{", "}", ")", "saved_vals", "=", "{", "}", "for", "name", "in", "attrs", ":", "saved_vals", "[", "name", "]", "=", "obj_vals", ".", "pop", "(", "name", ",", "_marker", ")", "try", ":", "(", "yield", ")", "finally", ":", "for", "name", "in", "attrs", ":", "saved_val", "=", "saved_vals", "[", "name", "]", "if", "(", "saved_val", "is", "not", "_marker", ")", ":", "obj_vals", "[", "name", "]", "=", "saved_val", "elif", "(", "name", "in", "obj_vals", ")", ":", "del", "obj_vals", "[", "name", "]" ]
temporarily delete object attrs and restore afterward .
train
false
13,047
def ceil_div(x, y): return (- ((- x) // y))
[ "def", "ceil_div", "(", "x", ",", "y", ")", ":", "return", "(", "-", "(", "(", "-", "x", ")", "//", "y", ")", ")" ]
return ceil without performing any floating-point operations .
train
false
13,048
def distribute_keys(): local(('ssh-copy-id -i ~/.ssh/sahana_release.pub %s@%s' % (env.user, env.host)))
[ "def", "distribute_keys", "(", ")", ":", "local", "(", "(", "'ssh-copy-id -i ~/.ssh/sahana_release.pub %s@%s'", "%", "(", "env", ".", "user", ",", "env", ".", "host", ")", ")", ")" ]
distribute keys to servers - requires preceding with test or prod .
train
false
13,051
@must_be_logged_in def personal_access_token_register(auth, **kwargs): token_list_url = api_v2_url('tokens/') return {'token_list_url': token_list_url, 'token_detail_url': '', 'scope_options': get_available_scopes()}
[ "@", "must_be_logged_in", "def", "personal_access_token_register", "(", "auth", ",", "**", "kwargs", ")", ":", "token_list_url", "=", "api_v2_url", "(", "'tokens/'", ")", "return", "{", "'token_list_url'", ":", "token_list_url", ",", "'token_detail_url'", ":", "''", ",", "'scope_options'", ":", "get_available_scopes", "(", ")", "}" ]
register a personal access token: blank form view .
train
false
13,052
def add_feature(feature, package=None, source=None, limit_access=False, enable_parent=False, image=None, restart=False): cmd = ['DISM', '/Quiet', ('/Image:{0}'.format(image) if image else '/Online'), '/Enable-Feature', '/FeatureName:{0}'.format(feature)] if package: cmd.append('/PackageName:{0}'.format(package)) if source: cmd.append('/Source:{0}'.format(source)) if limit_access: cmd.append('/LimitAccess') if enable_parent: cmd.append('/All') if (not restart): cmd.append('/NoRestart') return __salt__['cmd.run_all'](cmd)
[ "def", "add_feature", "(", "feature", ",", "package", "=", "None", ",", "source", "=", "None", ",", "limit_access", "=", "False", ",", "enable_parent", "=", "False", ",", "image", "=", "None", ",", "restart", "=", "False", ")", ":", "cmd", "=", "[", "'DISM'", ",", "'/Quiet'", ",", "(", "'/Image:{0}'", ".", "format", "(", "image", ")", "if", "image", "else", "'/Online'", ")", ",", "'/Enable-Feature'", ",", "'/FeatureName:{0}'", ".", "format", "(", "feature", ")", "]", "if", "package", ":", "cmd", ".", "append", "(", "'/PackageName:{0}'", ".", "format", "(", "package", ")", ")", "if", "source", ":", "cmd", ".", "append", "(", "'/Source:{0}'", ".", "format", "(", "source", ")", ")", "if", "limit_access", ":", "cmd", ".", "append", "(", "'/LimitAccess'", ")", "if", "enable_parent", ":", "cmd", ".", "append", "(", "'/All'", ")", "if", "(", "not", "restart", ")", ":", "cmd", ".", "append", "(", "'/NoRestart'", ")", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")" ]
install a feature using dism args: feature : the feature to install package : the parent package for the feature .
train
true
13,055
def _IsSourceExtension(s): return (s in GetNonHeaderExtensions())
[ "def", "_IsSourceExtension", "(", "s", ")", ":", "return", "(", "s", "in", "GetNonHeaderExtensions", "(", ")", ")" ]
file extension matches a source file extension .
train
false
13,056
def test_getslice(): class l(list, ): def __len__(self): raise Exception() x = l() AreEqual(x.__getslice__((-1), (-200)), []) class mylist(list, ): def __getslice__(self, i, j): return (i, j) class mylong(long, ): pass class myint(int, ): pass for listType in (list, mylist): for input in [0, 1, False, True, myint(0), myint(1), mylong(0), mylong(1), (-1), myint((-1)), mylong((-1))]: for x in listType(range(5))[input:input]: AreEqual(type(x), int)
[ "def", "test_getslice", "(", ")", ":", "class", "l", "(", "list", ",", ")", ":", "def", "__len__", "(", "self", ")", ":", "raise", "Exception", "(", ")", "x", "=", "l", "(", ")", "AreEqual", "(", "x", ".", "__getslice__", "(", "(", "-", "1", ")", ",", "(", "-", "200", ")", ")", ",", "[", "]", ")", "class", "mylist", "(", "list", ",", ")", ":", "def", "__getslice__", "(", "self", ",", "i", ",", "j", ")", ":", "return", "(", "i", ",", "j", ")", "class", "mylong", "(", "long", ",", ")", ":", "pass", "class", "myint", "(", "int", ",", ")", ":", "pass", "for", "listType", "in", "(", "list", ",", "mylist", ")", ":", "for", "input", "in", "[", "0", ",", "1", ",", "False", ",", "True", ",", "myint", "(", "0", ")", ",", "myint", "(", "1", ")", ",", "mylong", "(", "0", ")", ",", "mylong", "(", "1", ")", ",", "(", "-", "1", ")", ",", "myint", "(", "(", "-", "1", ")", ")", ",", "mylong", "(", "(", "-", "1", ")", ")", "]", ":", "for", "x", "in", "listType", "(", "range", "(", "5", ")", ")", "[", "input", ":", "input", "]", ":", "AreEqual", "(", "type", "(", "x", ")", ",", "int", ")" ]
overriding __len__ doesnt get called when doing __getslice__ .
train
false
13,057
def plot_colored_sinusoidal_lines(ax): L = (2 * np.pi) x = np.linspace(0, L) nb_colors = len(plt.rcParams['axes.prop_cycle']) shift = np.linspace(0, L, nb_colors, endpoint=False) for s in shift: ax.plot(x, np.sin((x + s)), '-') ax.set_xlim([x[0], x[(-1)]]) return ax
[ "def", "plot_colored_sinusoidal_lines", "(", "ax", ")", ":", "L", "=", "(", "2", "*", "np", ".", "pi", ")", "x", "=", "np", ".", "linspace", "(", "0", ",", "L", ")", "nb_colors", "=", "len", "(", "plt", ".", "rcParams", "[", "'axes.prop_cycle'", "]", ")", "shift", "=", "np", ".", "linspace", "(", "0", ",", "L", ",", "nb_colors", ",", "endpoint", "=", "False", ")", "for", "s", "in", "shift", ":", "ax", ".", "plot", "(", "x", ",", "np", ".", "sin", "(", "(", "x", "+", "s", ")", ")", ",", "'-'", ")", "ax", ".", "set_xlim", "(", "[", "x", "[", "0", "]", ",", "x", "[", "(", "-", "1", ")", "]", "]", ")", "return", "ax" ]
plot sinusoidal lines with colors following the style color cycle .
train
false
13,058
def getSettingsPath(subName=''): global globalTemporarySettingsPath return getJoinedPath(globalTemporarySettingsPath, subName)
[ "def", "getSettingsPath", "(", "subName", "=", "''", ")", ":", "global", "globalTemporarySettingsPath", "return", "getJoinedPath", "(", "globalTemporarySettingsPath", ",", "subName", ")" ]
get the settings directory path .
train
false
13,059
def save_method_args(method): args_and_kwargs = collections.namedtuple(u'args_and_kwargs', u'args kwargs') @functools.wraps(method) def wrapper(self, *args, **kwargs): attr_name = (u'_saved_' + method.__name__) attr = args_and_kwargs(args, kwargs) setattr(self, attr_name, attr) return method(self, *args, **kwargs) return wrapper
[ "def", "save_method_args", "(", "method", ")", ":", "args_and_kwargs", "=", "collections", ".", "namedtuple", "(", "u'args_and_kwargs'", ",", "u'args kwargs'", ")", "@", "functools", ".", "wraps", "(", "method", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "attr_name", "=", "(", "u'_saved_'", "+", "method", ".", "__name__", ")", "attr", "=", "args_and_kwargs", "(", "args", ",", "kwargs", ")", "setattr", "(", "self", ",", "attr_name", ",", "attr", ")", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
wrap a method such that when it is called .
train
false
13,060
def parse_date_from_string(datetime_str): datetime_obj = datetime.datetime.strptime(datetime_str, feconf.DASHBOARD_STATS_DATETIME_STRING_FORMAT) return {'year': datetime_obj.year, 'month': datetime_obj.month, 'day': datetime_obj.day}
[ "def", "parse_date_from_string", "(", "datetime_str", ")", ":", "datetime_obj", "=", "datetime", ".", "datetime", ".", "strptime", "(", "datetime_str", ",", "feconf", ".", "DASHBOARD_STATS_DATETIME_STRING_FORMAT", ")", "return", "{", "'year'", ":", "datetime_obj", ".", "year", ",", "'month'", ":", "datetime_obj", ".", "month", ",", "'day'", ":", "datetime_obj", ".", "day", "}" ]
parses the given string .
train
false
13,061
def pack_nibbles(nibbles): if (nibbles[(-1):] == [NIBBLE_TERMINATOR]): flags = 2 nibbles = nibbles[:(-1)] else: flags = 0 oddlen = (len(nibbles) % 2) flags |= oddlen if oddlen: nibbles = ([flags] + nibbles) else: nibbles = ([flags, 0] + nibbles) o = '' for i in range(0, len(nibbles), 2): o += ascii_chr(((16 * nibbles[i]) + nibbles[(i + 1)])) return o
[ "def", "pack_nibbles", "(", "nibbles", ")", ":", "if", "(", "nibbles", "[", "(", "-", "1", ")", ":", "]", "==", "[", "NIBBLE_TERMINATOR", "]", ")", ":", "flags", "=", "2", "nibbles", "=", "nibbles", "[", ":", "(", "-", "1", ")", "]", "else", ":", "flags", "=", "0", "oddlen", "=", "(", "len", "(", "nibbles", ")", "%", "2", ")", "flags", "|=", "oddlen", "if", "oddlen", ":", "nibbles", "=", "(", "[", "flags", "]", "+", "nibbles", ")", "else", ":", "nibbles", "=", "(", "[", "flags", ",", "0", "]", "+", "nibbles", ")", "o", "=", "''", "for", "i", "in", "range", "(", "0", ",", "len", "(", "nibbles", ")", ",", "2", ")", ":", "o", "+=", "ascii_chr", "(", "(", "(", "16", "*", "nibbles", "[", "i", "]", ")", "+", "nibbles", "[", "(", "i", "+", "1", ")", "]", ")", ")", "return", "o" ]
pack nibbles to binary .
train
true
13,063
def sine(w, A=1, phi=0, offset=0): from math import sin def f(i): return ((A * sin(((w * i) + phi))) + offset) return partial(_force, sequence=_advance(f))
[ "def", "sine", "(", "w", ",", "A", "=", "1", ",", "phi", "=", "0", ",", "offset", "=", "0", ")", ":", "from", "math", "import", "sin", "def", "f", "(", "i", ")", ":", "return", "(", "(", "A", "*", "sin", "(", "(", "(", "w", "*", "i", ")", "+", "phi", ")", ")", ")", "+", "offset", ")", "return", "partial", "(", "_force", ",", "sequence", "=", "_advance", "(", "f", ")", ")" ]
return a driver function that can advance a sequence of sine values .
train
true
13,064
def upper_tri_to_full(n): entries = ((n * (n + 1)) // 2) val_arr = [] row_arr = [] col_arr = [] count = 0 for i in range(n): for j in range(i, n): col_arr.append(count) row_arr.append(((j * n) + i)) val_arr.append(1.0) if (i != j): col_arr.append(count) row_arr.append(((i * n) + j)) val_arr.append(1.0) count += 1 return sp.coo_matrix((val_arr, (row_arr, col_arr)), ((n * n), entries)).tocsc()
[ "def", "upper_tri_to_full", "(", "n", ")", ":", "entries", "=", "(", "(", "n", "*", "(", "n", "+", "1", ")", ")", "//", "2", ")", "val_arr", "=", "[", "]", "row_arr", "=", "[", "]", "col_arr", "=", "[", "]", "count", "=", "0", "for", "i", "in", "range", "(", "n", ")", ":", "for", "j", "in", "range", "(", "i", ",", "n", ")", ":", "col_arr", ".", "append", "(", "count", ")", "row_arr", ".", "append", "(", "(", "(", "j", "*", "n", ")", "+", "i", ")", ")", "val_arr", ".", "append", "(", "1.0", ")", "if", "(", "i", "!=", "j", ")", ":", "col_arr", ".", "append", "(", "count", ")", "row_arr", ".", "append", "(", "(", "(", "i", "*", "n", ")", "+", "j", ")", ")", "val_arr", ".", "append", "(", "1.0", ")", "count", "+=", "1", "return", "sp", ".", "coo_matrix", "(", "(", "val_arr", ",", "(", "row_arr", ",", "col_arr", ")", ")", ",", "(", "(", "n", "*", "n", ")", ",", "entries", ")", ")", ".", "tocsc", "(", ")" ]
returns a coefficient matrix to create a symmetric matrix .
train
false
13,068
def generate_repository_dependencies_key_for_repository(toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td): tool_shed = common_util.remove_protocol_from_tool_shed_url(toolshed_base_url) return ('%s%s%s%s%s%s%s%s%s%s%s' % (tool_shed, STRSEP, str(repository_name), STRSEP, str(repository_owner), STRSEP, str(changeset_revision), STRSEP, str(prior_installation_required), STRSEP, str(only_if_compiling_contained_td)))
[ "def", "generate_repository_dependencies_key_for_repository", "(", "toolshed_base_url", ",", "repository_name", ",", "repository_owner", ",", "changeset_revision", ",", "prior_installation_required", ",", "only_if_compiling_contained_td", ")", ":", "tool_shed", "=", "common_util", ".", "remove_protocol_from_tool_shed_url", "(", "toolshed_base_url", ")", "return", "(", "'%s%s%s%s%s%s%s%s%s%s%s'", "%", "(", "tool_shed", ",", "STRSEP", ",", "str", "(", "repository_name", ")", ",", "STRSEP", ",", "str", "(", "repository_owner", ")", ",", "STRSEP", ",", "str", "(", "changeset_revision", ")", ",", "STRSEP", ",", "str", "(", "prior_installation_required", ")", ",", "STRSEP", ",", "str", "(", "only_if_compiling_contained_td", ")", ")", ")" ]
assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported .
train
false
13,069
def GetInvisibleSpecialPropertyNames(): invisible_names = [] for (name, value) in _SPECIAL_PROPERTY_MAP.items(): (is_visible, _, _) = value if (not is_visible): invisible_names.append(name) return invisible_names
[ "def", "GetInvisibleSpecialPropertyNames", "(", ")", ":", "invisible_names", "=", "[", "]", "for", "(", "name", ",", "value", ")", "in", "_SPECIAL_PROPERTY_MAP", ".", "items", "(", ")", ":", "(", "is_visible", ",", "_", ",", "_", ")", "=", "value", "if", "(", "not", "is_visible", ")", ":", "invisible_names", ".", "append", "(", "name", ")", "return", "invisible_names" ]
gets the names of all non user-visible special properties .
train
false
13,071
def remove_local_modules_from_sys(testdir): for (modname, mod) in list(sys.modules.items()): if (mod is None): continue if (not hasattr(mod, '__file__')): continue modfile = mod.__file__ if ((not osp.isabs(modfile)) or modfile.startswith(testdir)): del sys.modules[modname]
[ "def", "remove_local_modules_from_sys", "(", "testdir", ")", ":", "for", "(", "modname", ",", "mod", ")", "in", "list", "(", "sys", ".", "modules", ".", "items", "(", ")", ")", ":", "if", "(", "mod", "is", "None", ")", ":", "continue", "if", "(", "not", "hasattr", "(", "mod", ",", "'__file__'", ")", ")", ":", "continue", "modfile", "=", "mod", ".", "__file__", "if", "(", "(", "not", "osp", ".", "isabs", "(", "modfile", ")", ")", "or", "modfile", ".", "startswith", "(", "testdir", ")", ")", ":", "del", "sys", ".", "modules", "[", "modname", "]" ]
remove all modules from cache that come from testdir this is used to avoid strange side-effects when using the testall() mode of pytest .
train
false
13,073
def write_cookie(cls, COOKIE_NAME, COOKIE_VALUE, path, expires=7200): time_expire = (datetime.now() + timedelta(seconds=expires)) time_expire = time_expire.strftime('%a, %d-%b-%Y %H:%M:%S GMT') cls.response.headers.add_header('Set-Cookie', (((((((COOKIE_NAME + '=') + COOKIE_VALUE) + '; expires=') + str(time_expire)) + '; path=') + path) + '; HttpOnly')) return
[ "def", "write_cookie", "(", "cls", ",", "COOKIE_NAME", ",", "COOKIE_VALUE", ",", "path", ",", "expires", "=", "7200", ")", ":", "time_expire", "=", "(", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "seconds", "=", "expires", ")", ")", "time_expire", "=", "time_expire", ".", "strftime", "(", "'%a, %d-%b-%Y %H:%M:%S GMT'", ")", "cls", ".", "response", ".", "headers", ".", "add_header", "(", "'Set-Cookie'", ",", "(", "(", "(", "(", "(", "(", "(", "COOKIE_NAME", "+", "'='", ")", "+", "COOKIE_VALUE", ")", "+", "'; expires='", ")", "+", "str", "(", "time_expire", ")", ")", "+", "'; path='", ")", "+", "path", ")", "+", "'; HttpOnly'", ")", ")", "return" ]
write a cookie @path = could be a cls .
train
false
13,076
def _try_until_true(func, max_tries=5, sleep_time=0.5): for _ in xrange(0, max_tries): if func(): return True else: time.sleep(sleep_time) return False
[ "def", "_try_until_true", "(", "func", ",", "max_tries", "=", "5", ",", "sleep_time", "=", "0.5", ")", ":", "for", "_", "in", "xrange", "(", "0", ",", "max_tries", ")", ":", "if", "func", "(", ")", ":", "return", "True", "else", ":", "time", ".", "sleep", "(", "sleep_time", ")", "return", "False" ]
calls func up to max_tries times until it returns true .
train
false
13,077
def _expiry_range_batched(session, upper_bound_func, batch_size): LOG.debug('Token expiration batch size: %d', batch_size) query = session.query(TokenModel.expires) query = query.filter((TokenModel.expires < upper_bound_func())) query = query.order_by(TokenModel.expires) query = query.offset((batch_size - 1)) query = query.limit(1) while True: try: next_expiration = query.one()[0] except sql.NotFound: break (yield next_expiration) (yield upper_bound_func())
[ "def", "_expiry_range_batched", "(", "session", ",", "upper_bound_func", ",", "batch_size", ")", ":", "LOG", ".", "debug", "(", "'Token expiration batch size: %d'", ",", "batch_size", ")", "query", "=", "session", ".", "query", "(", "TokenModel", ".", "expires", ")", "query", "=", "query", ".", "filter", "(", "(", "TokenModel", ".", "expires", "<", "upper_bound_func", "(", ")", ")", ")", "query", "=", "query", ".", "order_by", "(", "TokenModel", ".", "expires", ")", "query", "=", "query", ".", "offset", "(", "(", "batch_size", "-", "1", ")", ")", "query", "=", "query", ".", "limit", "(", "1", ")", "while", "True", ":", "try", ":", "next_expiration", "=", "query", ".", "one", "(", ")", "[", "0", "]", "except", "sql", ".", "NotFound", ":", "break", "(", "yield", "next_expiration", ")", "(", "yield", "upper_bound_func", "(", ")", ")" ]
return the stop point of the next batch for expiration .
train
false
13,078
@decorators.memoize def _check_pkgin(): ppath = salt.utils.which('pkgin') if (ppath is None): try: localbase = __salt__['cmd.run']('pkg_info -Q LOCALBASE pkgin', output_loglevel='trace') if (localbase is not None): ppath = '{0}/bin/pkgin'.format(localbase) if (not os.path.exists(ppath)): return None except CommandExecutionError: return None return ppath
[ "@", "decorators", ".", "memoize", "def", "_check_pkgin", "(", ")", ":", "ppath", "=", "salt", ".", "utils", ".", "which", "(", "'pkgin'", ")", "if", "(", "ppath", "is", "None", ")", ":", "try", ":", "localbase", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'pkg_info -Q LOCALBASE pkgin'", ",", "output_loglevel", "=", "'trace'", ")", "if", "(", "localbase", "is", "not", "None", ")", ":", "ppath", "=", "'{0}/bin/pkgin'", ".", "format", "(", "localbase", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "ppath", ")", ")", ":", "return", "None", "except", "CommandExecutionError", ":", "return", "None", "return", "ppath" ]
looks to see if pkgin is present on the system .
train
true
13,079
def getIsInFilledRegionByPaths(loops, paths): for path in paths: if (len(path) > 0): if getIsInFilledRegion(loops, path[0]): return True return False
[ "def", "getIsInFilledRegionByPaths", "(", "loops", ",", "paths", ")", ":", "for", "path", "in", "paths", ":", "if", "(", "len", "(", "path", ")", ">", "0", ")", ":", "if", "getIsInFilledRegion", "(", "loops", ",", "path", "[", "0", "]", ")", ":", "return", "True", "return", "False" ]
determine if the point of any path is in the filled region of the loops .
train
false
13,080
@handle_response_format @treeio_login_required def weblink_view(request, weblink_id, response_format='html'): link = get_object_or_404(WebLink, pk=weblink_id) if (not request.user.profile.has_permission(link)): return user_denied(request, message="You don't have access to this Web Link") context = _get_default_context(request) context.update({'link': link}) return render_to_response('documents/weblink_view', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "weblink_view", "(", "request", ",", "weblink_id", ",", "response_format", "=", "'html'", ")", ":", "link", "=", "get_object_or_404", "(", "WebLink", ",", "pk", "=", "weblink_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "link", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Web Link\"", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'link'", ":", "link", "}", ")", "return", "render_to_response", "(", "'documents/weblink_view'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
weblink page .
train
false
13,081
def get_ha1_file_htdigest(filename): def get_ha1(realm, username): result = None f = open(filename, 'r') for line in f: (u, r, ha1) = line.rstrip().split(':') if ((u == username) and (r == realm)): result = ha1 break f.close() return result return get_ha1
[ "def", "get_ha1_file_htdigest", "(", "filename", ")", ":", "def", "get_ha1", "(", "realm", ",", "username", ")", ":", "result", "=", "None", "f", "=", "open", "(", "filename", ",", "'r'", ")", "for", "line", "in", "f", ":", "(", "u", ",", "r", ",", "ha1", ")", "=", "line", ".", "rstrip", "(", ")", ".", "split", "(", "':'", ")", "if", "(", "(", "u", "==", "username", ")", "and", "(", "r", "==", "realm", ")", ")", ":", "result", "=", "ha1", "break", "f", ".", "close", "(", ")", "return", "result", "return", "get_ha1" ]
returns a get_ha1 function which obtains a ha1 password hash from a flat file with lines of the same format as that produced by the apache htdigest utility .
train
false
13,082
def back_transcribe(rna): if isinstance(rna, Seq): return rna.back_transcribe() elif isinstance(rna, MutableSeq): return rna.toseq().back_transcribe() else: return rna.replace('U', 'T').replace('u', 't')
[ "def", "back_transcribe", "(", "rna", ")", ":", "if", "isinstance", "(", "rna", ",", "Seq", ")", ":", "return", "rna", ".", "back_transcribe", "(", ")", "elif", "isinstance", "(", "rna", ",", "MutableSeq", ")", ":", "return", "rna", ".", "toseq", "(", ")", ".", "back_transcribe", "(", ")", "else", ":", "return", "rna", ".", "replace", "(", "'U'", ",", "'T'", ")", ".", "replace", "(", "'u'", ",", "'t'", ")" ]
back-transcribes an rna sequence into dna .
train
false
13,084
def get_video_dims(fname): try: import pyffmpeg except ImportError: raise ImportError('This function requires pyffmpeg <http://code.google.com/p/pyffmpeg/>') mp = pyffmpeg.FFMpegReader() try: mp.open(fname) tracks = mp.get_tracks() for track in tracks: if isinstance(track, pyffmpeg.VideoTrack): break else: raise ValueError('no video track found') return ((track.duration(),) + track.get_orig_size()) finally: mp.close()
[ "def", "get_video_dims", "(", "fname", ")", ":", "try", ":", "import", "pyffmpeg", "except", "ImportError", ":", "raise", "ImportError", "(", "'This function requires pyffmpeg <http://code.google.com/p/pyffmpeg/>'", ")", "mp", "=", "pyffmpeg", ".", "FFMpegReader", "(", ")", "try", ":", "mp", ".", "open", "(", "fname", ")", "tracks", "=", "mp", ".", "get_tracks", "(", ")", "for", "track", "in", "tracks", ":", "if", "isinstance", "(", "track", ",", "pyffmpeg", ".", "VideoTrack", ")", ":", "break", "else", ":", "raise", "ValueError", "(", "'no video track found'", ")", "return", "(", "(", "track", ".", "duration", "(", ")", ",", ")", "+", "track", ".", "get_orig_size", "(", ")", ")", "finally", ":", "mp", ".", "close", "(", ")" ]
pull out the frame length .
train
false
13,085
def is_driver_compatible(context, driver, interface, host_agent_types): is_interface_compatible = driver.is_interface_compatible(interface) if (not driver.agent_type): return is_interface_compatible return (is_interface_compatible and (driver.agent_type in host_agent_types))
[ "def", "is_driver_compatible", "(", "context", ",", "driver", ",", "interface", ",", "host_agent_types", ")", ":", "is_interface_compatible", "=", "driver", ".", "is_interface_compatible", "(", "interface", ")", "if", "(", "not", "driver", ".", "agent_type", ")", ":", "return", "is_interface_compatible", "return", "(", "is_interface_compatible", "and", "(", "driver", ".", "agent_type", "in", "host_agent_types", ")", ")" ]
true if the driver is compatible with interface and host_agent_types .
train
false
13,086
def stream_with_context(generator_or_function): try: gen = iter(generator_or_function) except TypeError: def decorator(*args, **kwargs): gen = generator_or_function() return stream_with_context(gen) return update_wrapper(decorator, generator_or_function) def generator(): ctx = _request_ctx_stack.top if (ctx is None): raise RuntimeError('Attempted to stream with context but there was no context in the first place to keep around.') with ctx: (yield None) try: for item in gen: (yield item) finally: if hasattr(gen, 'close'): gen.close() wrapped_g = generator() next(wrapped_g) return wrapped_g
[ "def", "stream_with_context", "(", "generator_or_function", ")", ":", "try", ":", "gen", "=", "iter", "(", "generator_or_function", ")", "except", "TypeError", ":", "def", "decorator", "(", "*", "args", ",", "**", "kwargs", ")", ":", "gen", "=", "generator_or_function", "(", ")", "return", "stream_with_context", "(", "gen", ")", "return", "update_wrapper", "(", "decorator", ",", "generator_or_function", ")", "def", "generator", "(", ")", ":", "ctx", "=", "_request_ctx_stack", ".", "top", "if", "(", "ctx", "is", "None", ")", ":", "raise", "RuntimeError", "(", "'Attempted to stream with context but there was no context in the first place to keep around.'", ")", "with", "ctx", ":", "(", "yield", "None", ")", "try", ":", "for", "item", "in", "gen", ":", "(", "yield", "item", ")", "finally", ":", "if", "hasattr", "(", "gen", ",", "'close'", ")", ":", "gen", ".", "close", "(", ")", "wrapped_g", "=", "generator", "(", ")", "next", "(", "wrapped_g", ")", "return", "wrapped_g" ]
request contexts disappear when the response is started on the server .
train
true
13,090
def test_proxy_get(): client = Client() client.login(username='test', password='test') (httpd, finish) = run_test_server() try: finish_conf = proxy.conf.WHITELIST.set_for_testing('127\\.0\\.0\\.1:\\d*') try: response_get = client.get(('/proxy/127.0.0.1/%s/' % httpd.server_port), dict(foo='bar')) finally: finish_conf() assert_true(('Hello there' in response_get.content)) assert_true(('You requested: /?foo=bar.' in response_get.content)) assert_true((('/proxy/127.0.0.1/%s/foo.jpg' % httpd.server_port) in response_get.content)) assert_true((('/proxy/127.0.0.1/%s/baz?with=parameter' % httpd.server_port) in response_get.content)) finally: finish()
[ "def", "test_proxy_get", "(", ")", ":", "client", "=", "Client", "(", ")", "client", ".", "login", "(", "username", "=", "'test'", ",", "password", "=", "'test'", ")", "(", "httpd", ",", "finish", ")", "=", "run_test_server", "(", ")", "try", ":", "finish_conf", "=", "proxy", ".", "conf", ".", "WHITELIST", ".", "set_for_testing", "(", "'127\\\\.0\\\\.0\\\\.1:\\\\d*'", ")", "try", ":", "response_get", "=", "client", ".", "get", "(", "(", "'/proxy/127.0.0.1/%s/'", "%", "httpd", ".", "server_port", ")", ",", "dict", "(", "foo", "=", "'bar'", ")", ")", "finally", ":", "finish_conf", "(", ")", "assert_true", "(", "(", "'Hello there'", "in", "response_get", ".", "content", ")", ")", "assert_true", "(", "(", "'You requested: /?foo=bar.'", "in", "response_get", ".", "content", ")", ")", "assert_true", "(", "(", "(", "'/proxy/127.0.0.1/%s/foo.jpg'", "%", "httpd", ".", "server_port", ")", "in", "response_get", ".", "content", ")", ")", "assert_true", "(", "(", "(", "'/proxy/127.0.0.1/%s/baz?with=parameter'", "%", "httpd", ".", "server_port", ")", "in", "response_get", ".", "content", ")", ")", "finally", ":", "finish", "(", ")" ]
proxying test .
train
false
13,091
def l2_gram_schmidt(list, lim): r = [] for a in list: if (r == []): v = a else: v = (a - l2_projection(a, r, lim)) v_norm = l2_norm(v, lim) if (v_norm == 0): raise ValueError('The sequence is not linearly independent.') r.append((v / v_norm)) return r
[ "def", "l2_gram_schmidt", "(", "list", ",", "lim", ")", ":", "r", "=", "[", "]", "for", "a", "in", "list", ":", "if", "(", "r", "==", "[", "]", ")", ":", "v", "=", "a", "else", ":", "v", "=", "(", "a", "-", "l2_projection", "(", "a", ",", "r", ",", "lim", ")", ")", "v_norm", "=", "l2_norm", "(", "v", ",", "lim", ")", "if", "(", "v_norm", "==", "0", ")", ":", "raise", "ValueError", "(", "'The sequence is not linearly independent.'", ")", "r", ".", "append", "(", "(", "v", "/", "v_norm", ")", ")", "return", "r" ]
orthonormalizes the "list" of functions using the gram-schmidt process .
train
false
13,094
def p_expression_group(t): t[0] = t[2]
[ "def", "p_expression_group", "(", "t", ")", ":", "t", "[", "0", "]", "=", "t", "[", "2", "]" ]
expression : lparen expression rparen .
train
false
13,095
def restart_django_servers(): sh(cmd('touch', 'lms/urls.py', 'cms/urls.py'))
[ "def", "restart_django_servers", "(", ")", ":", "sh", "(", "cmd", "(", "'touch'", ",", "'lms/urls.py'", ",", "'cms/urls.py'", ")", ")" ]
restart the django server .
train
false
13,096
def get_default_algorithms(): default_algorithms = {'none': NoneAlgorithm(), 'HS256': HMACAlgorithm(HMACAlgorithm.SHA256), 'HS384': HMACAlgorithm(HMACAlgorithm.SHA384), 'HS512': HMACAlgorithm(HMACAlgorithm.SHA512)} if has_crypto: default_algorithms.update({'RS256': RSAAlgorithm(RSAAlgorithm.SHA256), 'RS384': RSAAlgorithm(RSAAlgorithm.SHA384), 'RS512': RSAAlgorithm(RSAAlgorithm.SHA512), 'ES256': ECAlgorithm(ECAlgorithm.SHA256), 'ES384': ECAlgorithm(ECAlgorithm.SHA384), 'ES512': ECAlgorithm(ECAlgorithm.SHA512), 'PS256': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA256), 'PS384': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA384), 'PS512': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA512)}) return default_algorithms
[ "def", "get_default_algorithms", "(", ")", ":", "default_algorithms", "=", "{", "'none'", ":", "NoneAlgorithm", "(", ")", ",", "'HS256'", ":", "HMACAlgorithm", "(", "HMACAlgorithm", ".", "SHA256", ")", ",", "'HS384'", ":", "HMACAlgorithm", "(", "HMACAlgorithm", ".", "SHA384", ")", ",", "'HS512'", ":", "HMACAlgorithm", "(", "HMACAlgorithm", ".", "SHA512", ")", "}", "if", "has_crypto", ":", "default_algorithms", ".", "update", "(", "{", "'RS256'", ":", "RSAAlgorithm", "(", "RSAAlgorithm", ".", "SHA256", ")", ",", "'RS384'", ":", "RSAAlgorithm", "(", "RSAAlgorithm", ".", "SHA384", ")", ",", "'RS512'", ":", "RSAAlgorithm", "(", "RSAAlgorithm", ".", "SHA512", ")", ",", "'ES256'", ":", "ECAlgorithm", "(", "ECAlgorithm", ".", "SHA256", ")", ",", "'ES384'", ":", "ECAlgorithm", "(", "ECAlgorithm", ".", "SHA384", ")", ",", "'ES512'", ":", "ECAlgorithm", "(", "ECAlgorithm", ".", "SHA512", ")", ",", "'PS256'", ":", "RSAPSSAlgorithm", "(", "RSAPSSAlgorithm", ".", "SHA256", ")", ",", "'PS384'", ":", "RSAPSSAlgorithm", "(", "RSAPSSAlgorithm", ".", "SHA384", ")", ",", "'PS512'", ":", "RSAPSSAlgorithm", "(", "RSAPSSAlgorithm", ".", "SHA512", ")", "}", ")", "return", "default_algorithms" ]
returns the algorithms that are implemented by the library .
train
true
13,098
def ReverseCloseExpression(clean_lines, linenum, pos): line = clean_lines.elided[linenum] endchar = line[pos] if (endchar not in ')}]>'): return (line, 0, (-1)) if (endchar == ')'): startchar = '(' if (endchar == ']'): startchar = '[' if (endchar == '}'): startchar = '{' if (endchar == '>'): startchar = '<' (start_pos, num_open) = FindStartOfExpressionInLine(line, pos, 0, startchar, endchar) if (start_pos > (-1)): return (line, linenum, start_pos) while (linenum > 0): linenum -= 1 line = clean_lines.elided[linenum] (start_pos, num_open) = FindStartOfExpressionInLine(line, (len(line) - 1), num_open, startchar, endchar) if (start_pos > (-1)): return (line, linenum, start_pos) return (line, 0, (-1))
[ "def", "ReverseCloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "endchar", "=", "line", "[", "pos", "]", "if", "(", "endchar", "not", "in", "')}]>'", ")", ":", "return", "(", "line", ",", "0", ",", "(", "-", "1", ")", ")", "if", "(", "endchar", "==", "')'", ")", ":", "startchar", "=", "'('", "if", "(", "endchar", "==", "']'", ")", ":", "startchar", "=", "'['", "if", "(", "endchar", "==", "'}'", ")", ":", "startchar", "=", "'{'", "if", "(", "endchar", "==", "'>'", ")", ":", "startchar", "=", "'<'", "(", "start_pos", ",", "num_open", ")", "=", "FindStartOfExpressionInLine", "(", "line", ",", "pos", ",", "0", ",", "startchar", ",", "endchar", ")", "if", "(", "start_pos", ">", "(", "-", "1", ")", ")", ":", "return", "(", "line", ",", "linenum", ",", "start_pos", ")", "while", "(", "linenum", ">", "0", ")", ":", "linenum", "-=", "1", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "(", "start_pos", ",", "num_open", ")", "=", "FindStartOfExpressionInLine", "(", "line", ",", "(", "len", "(", "line", ")", "-", "1", ")", ",", "num_open", ",", "startchar", ",", "endchar", ")", "if", "(", "start_pos", ">", "(", "-", "1", ")", ")", ":", "return", "(", "line", ",", "linenum", ",", "start_pos", ")", "return", "(", "line", ",", "0", ",", "(", "-", "1", ")", ")" ]
if input points to ) or } or ] or > .
train
false
13,099
def image_save_for_web(image, fp=None, format=None): opt = dict(format=(image.format or format)) if (image.format == 'PNG'): opt.update(optimize=True) alpha = False if ((image.mode in ('RGBA', 'LA')) or ((image.mode == 'P') and ('transparency' in image.info))): alpha = image.convert('RGBA').split()[(-1)] if (image.mode != 'P'): image = image.convert('RGBA').convert('P', palette=Image.WEB, colors=256) if alpha: image.putalpha(alpha) elif (image.format == 'JPEG'): opt.update(optimize=True, quality=80) if fp: image.save(fp, **opt) else: img = StringIO.StringIO() image.save(img, **opt) return img.getvalue()
[ "def", "image_save_for_web", "(", "image", ",", "fp", "=", "None", ",", "format", "=", "None", ")", ":", "opt", "=", "dict", "(", "format", "=", "(", "image", ".", "format", "or", "format", ")", ")", "if", "(", "image", ".", "format", "==", "'PNG'", ")", ":", "opt", ".", "update", "(", "optimize", "=", "True", ")", "alpha", "=", "False", "if", "(", "(", "image", ".", "mode", "in", "(", "'RGBA'", ",", "'LA'", ")", ")", "or", "(", "(", "image", ".", "mode", "==", "'P'", ")", "and", "(", "'transparency'", "in", "image", ".", "info", ")", ")", ")", ":", "alpha", "=", "image", ".", "convert", "(", "'RGBA'", ")", ".", "split", "(", ")", "[", "(", "-", "1", ")", "]", "if", "(", "image", ".", "mode", "!=", "'P'", ")", ":", "image", "=", "image", ".", "convert", "(", "'RGBA'", ")", ".", "convert", "(", "'P'", ",", "palette", "=", "Image", ".", "WEB", ",", "colors", "=", "256", ")", "if", "alpha", ":", "image", ".", "putalpha", "(", "alpha", ")", "elif", "(", "image", ".", "format", "==", "'JPEG'", ")", ":", "opt", ".", "update", "(", "optimize", "=", "True", ",", "quality", "=", "80", ")", "if", "fp", ":", "image", ".", "save", "(", "fp", ",", "**", "opt", ")", "else", ":", "img", "=", "StringIO", ".", "StringIO", "(", ")", "image", ".", "save", "(", "img", ",", "**", "opt", ")", "return", "img", ".", "getvalue", "(", ")" ]
save image optimized for web usage .
train
false
13,100
def _strtobool(val): val = val.lower() if (val in ('y', 'yes', 't', 'true', 'on', '1')): return 1 elif (val in ('n', 'no', 'f', 'false', 'off', '0')): return 0 else: raise ValueError(('invalid truth value %r' % (val,)))
[ "def", "_strtobool", "(", "val", ")", ":", "val", "=", "val", ".", "lower", "(", ")", "if", "(", "val", "in", "(", "'y'", ",", "'yes'", ",", "'t'", ",", "'true'", ",", "'on'", ",", "'1'", ")", ")", ":", "return", "1", "elif", "(", "val", "in", "(", "'n'", ",", "'no'", ",", "'f'", ",", "'false'", ",", "'off'", ",", "'0'", ")", ")", ":", "return", "0", "else", ":", "raise", "ValueError", "(", "(", "'invalid truth value %r'", "%", "(", "val", ",", ")", ")", ")" ]
convert a string representation of truth to true (1) or false (0) .
train
true
13,101
def get_all_styles(): for name in STYLE_MAP: (yield name) for (name, _) in find_plugin_styles(): (yield name)
[ "def", "get_all_styles", "(", ")", ":", "for", "name", "in", "STYLE_MAP", ":", "(", "yield", "name", ")", "for", "(", "name", ",", "_", ")", "in", "find_plugin_styles", "(", ")", ":", "(", "yield", "name", ")" ]
return an generator for all styles by name .
train
false
13,103
def _get_collection_summary_dicts_from_models(collection_summary_models): collection_summaries = [get_collection_summary_from_model(collection_summary_model) for collection_summary_model in collection_summary_models] result = {} for collection_summary in collection_summaries: result[collection_summary.id] = collection_summary return result
[ "def", "_get_collection_summary_dicts_from_models", "(", "collection_summary_models", ")", ":", "collection_summaries", "=", "[", "get_collection_summary_from_model", "(", "collection_summary_model", ")", "for", "collection_summary_model", "in", "collection_summary_models", "]", "result", "=", "{", "}", "for", "collection_summary", "in", "collection_summaries", ":", "result", "[", "collection_summary", ".", "id", "]", "=", "collection_summary", "return", "result" ]
given an iterable of collectionsummarymodel instances .
train
false
13,106
def bohman(M, sym=True): if _len_guards(M): return np.ones(M) (M, needs_trunc) = _extend(M, sym) fac = np.abs(np.linspace((-1), 1, M)[1:(-1)]) w = (((1 - fac) * np.cos((np.pi * fac))) + ((1.0 / np.pi) * np.sin((np.pi * fac)))) w = np.r_[(0, w, 0)] return _truncate(w, needs_trunc)
[ "def", "bohman", "(", "M", ",", "sym", "=", "True", ")", ":", "if", "_len_guards", "(", "M", ")", ":", "return", "np", ".", "ones", "(", "M", ")", "(", "M", ",", "needs_trunc", ")", "=", "_extend", "(", "M", ",", "sym", ")", "fac", "=", "np", ".", "abs", "(", "np", ".", "linspace", "(", "(", "-", "1", ")", ",", "1", ",", "M", ")", "[", "1", ":", "(", "-", "1", ")", "]", ")", "w", "=", "(", "(", "(", "1", "-", "fac", ")", "*", "np", ".", "cos", "(", "(", "np", ".", "pi", "*", "fac", ")", ")", ")", "+", "(", "(", "1.0", "/", "np", ".", "pi", ")", "*", "np", ".", "sin", "(", "(", "np", ".", "pi", "*", "fac", ")", ")", ")", ")", "w", "=", "np", ".", "r_", "[", "(", "0", ",", "w", ",", "0", ")", "]", "return", "_truncate", "(", "w", ",", "needs_trunc", ")" ]
return a bohman window .
train
false
13,108
def format_filename(filename, shorten=False): if shorten: filename = os.path.basename(filename) return filename_to_ui(filename)
[ "def", "format_filename", "(", "filename", ",", "shorten", "=", "False", ")", ":", "if", "shorten", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "return", "filename_to_ui", "(", "filename", ")" ]
formats a filename for user display .
train
true